From ef41af5b06f6e6819b75ae695dc6c31438a46895 Mon Sep 17 00:00:00 2001 From: Natasha Dada Date: Fri, 13 Sep 2024 09:27:48 -0400 Subject: [PATCH 01/23] Propagate agent host tags for mysql (#18400) --- mysql/assets/configuration/spec.yaml | 17 +++++- mysql/changelog.d/18400.added | 1 + mysql/datadog_checks/mysql/config.py | 43 ++++++++++++-- .../mysql/config_models/defaults.py | 8 +++ .../mysql/config_models/instance.py | 1 + .../mysql/config_models/shared.py | 5 +- .../mysql/data/conf.yaml.example | 12 ++++ mysql/datadog_checks/mysql/mysql.py | 2 +- mysql/pyproject.toml | 2 +- mysql/tests/test_mysql.py | 56 +++++++++++++++++++ 10 files changed, 139 insertions(+), 8 deletions(-) create mode 100644 mysql/changelog.d/18400.added diff --git a/mysql/assets/configuration/spec.yaml b/mysql/assets/configuration/spec.yaml index de6bc8f34de09..ce50bc273edf2 100644 --- a/mysql/assets/configuration/spec.yaml +++ b/mysql/assets/configuration/spec.yaml @@ -4,6 +4,13 @@ files: options: - template: init_config options: + - name: propagate_agent_tags + description: | + Set to `true` to propagate the tags from `datadog.yaml` to the check. + When set to `true`, the tags from `datadog.yaml` are added to the check's tags for all instances. + value: + example: false + type: boolean - template: init_config/db overrides: global_custom_queries.value.example: @@ -49,6 +56,14 @@ files: type: number example: 3306 + - name: propagate_agent_tags + description: | + Set to `true` to propagate the tags from `datadog.yaml` to the check. + When set to `true`, the tags from `datadog.yaml` are added to the check's tags for all instances. + value: + example: false + type: boolean + - name: reported_hostname description: | Set the reported hostname for this instance. This value overrides the hostname detected by the Agent @@ -390,7 +405,7 @@ files: Capped by `schemas_collection.collection_interval` value: type: number - example: 60 + example: 60 - name: query_metrics description: Configure collection of query metrics options: diff --git a/mysql/changelog.d/18400.added b/mysql/changelog.d/18400.added new file mode 100644 index 0000000000000..b6a149a4552ac --- /dev/null +++ b/mysql/changelog.d/18400.added @@ -0,0 +1 @@ +Update the propagate_agent_tags setting. When set to `true`, the tags from the agent host are now added to the check's tags for all instances. diff --git a/mysql/datadog_checks/mysql/config.py b/mysql/datadog_checks/mysql/config.py index be482cc63fc63..1d4c69f35c8ef 100644 --- a/mysql/datadog_checks/mysql/config.py +++ b/mysql/datadog_checks/mysql/config.py @@ -4,12 +4,13 @@ from datadog_checks.base import ConfigurationError, is_affirmative from datadog_checks.base.log import get_check_logger from datadog_checks.base.utils.aws import rds_parse_tags_from_endpoint +from datadog_checks.base.utils.db.utils import get_agent_host_tags DEFAULT_MAX_CUSTOM_QUERIES = 20 class MySQLConfig(object): - def __init__(self, instance): + def __init__(self, instance, init_config): self.log = get_check_logger() self.host = instance.get('host', instance.get('server', '')) self.port = int(instance.get('port', 0)) @@ -18,7 +19,10 @@ def __init__(self, instance): self.defaults_file = instance.get('defaults_file', '') self.user = instance.get('username', instance.get('user', '')) self.password = str(instance.get('password', instance.get('pass', ''))) - self.tags = self._build_tags(instance.get('tags', [])) + self.tags = self._build_tags( + custom_tags=instance.get('tags', []), + propagate_agent_tags=self._should_propagate_agent_tags(instance, init_config), + ) self.options = instance.get('options', {}) or {} # options could be None if empty in the YAML replication_channel = self.options.get('replication_channel') if replication_channel: @@ -94,12 +98,26 @@ def __init__(self, instance): self.database_instance_collection_interval = instance.get('database_instance_collection_interval', 300) self.configuration_checks() - def _build_tags(self, custom_tags): - tags = list(set(custom_tags)) or [] + def _build_tags(self, custom_tags, propagate_agent_tags): + # Clean up tags in case there was a None entry in the instance + # e.g. if the yaml contains tags: but no actual tags + if custom_tags is None: + tags = [] + else: + tags = list(set(custom_tags)) rds_tags = rds_parse_tags_from_endpoint(self.host) if rds_tags: tags.extend(rds_tags) + + if propagate_agent_tags: + try: + agent_tags = get_agent_host_tags() + tags.extend(agent_tags) + except Exception as e: + raise ConfigurationError( + 'propagate_agent_tags enabled but there was an error fetching agent tags {}'.format(e) + ) return tags def configuration_checks(self): @@ -119,3 +137,20 @@ def configuration_checks(self): if self.mysql_sock and self.host: self.log.warning("Both socket and host have been specified, socket will be used") + + @staticmethod + def _should_propagate_agent_tags(instance, init_config) -> bool: + ''' + return True if the agent tags should be propagated to the check + ''' + instance_propagate_agent_tags = instance.get('propagate_agent_tags') + init_config_propagate_agent_tags = init_config.get('propagate_agent_tags') + + if instance_propagate_agent_tags is not None: + # if the instance has explicitly set the value, return the boolean + return instance_propagate_agent_tags + if init_config_propagate_agent_tags is not None: + # if the init_config has explicitly set the value, return the boolean + return init_config_propagate_agent_tags + # if neither the instance nor the init_config has set the value, return False + return False diff --git a/mysql/datadog_checks/mysql/config_models/defaults.py b/mysql/datadog_checks/mysql/config_models/defaults.py index e4a4c67c81f6a..99ce64738ccf6 100644 --- a/mysql/datadog_checks/mysql/config_models/defaults.py +++ b/mysql/datadog_checks/mysql/config_models/defaults.py @@ -8,6 +8,10 @@ # ddev -x validate models -s +def shared_propagate_agent_tags(): + return False + + def instance_connect_timeout(): return 10 @@ -52,5 +56,9 @@ def instance_port(): return 3306 +def instance_propagate_agent_tags(): + return False + + def instance_use_global_custom_queries(): return 'true' diff --git a/mysql/datadog_checks/mysql/config_models/instance.py b/mysql/datadog_checks/mysql/config_models/instance.py index fa93f7f03f463..9df8ed457cf07 100644 --- a/mysql/datadog_checks/mysql/config_models/instance.py +++ b/mysql/datadog_checks/mysql/config_models/instance.py @@ -205,6 +205,7 @@ class InstanceConfig(BaseModel): options: Optional[Options] = None password: Optional[str] = None port: Optional[float] = None + propagate_agent_tags: Optional[bool] = None queries: Optional[tuple[MappingProxyType[str, Any], ...]] = None query_activity: Optional[QueryActivity] = None query_metrics: Optional[QueryMetrics] = None diff --git a/mysql/datadog_checks/mysql/config_models/shared.py b/mysql/datadog_checks/mysql/config_models/shared.py index 8a1218b18c764..b8010d04de971 100644 --- a/mysql/datadog_checks/mysql/config_models/shared.py +++ b/mysql/datadog_checks/mysql/config_models/shared.py @@ -17,7 +17,7 @@ from datadog_checks.base.utils.functions import identity from datadog_checks.base.utils.models import validation -from . import validators +from . import defaults, validators class SharedConfig(BaseModel): @@ -27,6 +27,7 @@ class SharedConfig(BaseModel): frozen=True, ) global_custom_queries: Optional[tuple[MappingProxyType[str, Any], ...]] = None + propagate_agent_tags: Optional[bool] = None service: Optional[str] = None @model_validator(mode='before') @@ -39,6 +40,8 @@ def _validate(cls, value, info): field_name = field.alias or info.field_name if field_name in info.context['configured_fields']: value = getattr(validators, f'shared_{info.field_name}', identity)(value, field=field) + else: + value = getattr(defaults, f'shared_{info.field_name}', lambda: value)() return validation.utils.make_immutable(value) diff --git a/mysql/datadog_checks/mysql/data/conf.yaml.example b/mysql/datadog_checks/mysql/data/conf.yaml.example index 3048034be9e0e..3dbf966831e81 100644 --- a/mysql/datadog_checks/mysql/data/conf.yaml.example +++ b/mysql/datadog_checks/mysql/data/conf.yaml.example @@ -2,6 +2,12 @@ # init_config: + ## @param propagate_agent_tags - boolean - optional - default: false + ## Set to `true` to propagate the tags from `datadog.yaml` to the check. + ## When set to `true`, the tags from `datadog.yaml` are added to the check's tags for all instances. + # + # propagate_agent_tags: false + ## @param global_custom_queries - list of mappings - optional ## See `custom_queries` defined below. ## @@ -46,6 +52,12 @@ instances: # port: 3306 + ## @param propagate_agent_tags - boolean - optional - default: false + ## Set to `true` to propagate the tags from `datadog.yaml` to the check. + ## When set to `true`, the tags from `datadog.yaml` are added to the check's tags for all instances. + # + # propagate_agent_tags: false + ## @param reported_hostname - string - optional ## Set the reported hostname for this instance. This value overrides the hostname detected by the Agent ## and can be useful to set a custom hostname when connecting to a remote database through a proxy. diff --git a/mysql/datadog_checks/mysql/mysql.py b/mysql/datadog_checks/mysql/mysql.py index 1e809531e9df7..a4c2eeb6a4ec2 100644 --- a/mysql/datadog_checks/mysql/mysql.py +++ b/mysql/datadog_checks/mysql/mysql.py @@ -115,7 +115,7 @@ def __init__(self, name, init_config, instances): self._resolved_hostname = None self._agent_hostname = None self._is_aurora = None - self._config = MySQLConfig(self.instance) + self._config = MySQLConfig(self.instance, init_config) self.tags = self._config.tags self.cloud_metadata = self._config.cloud_metadata diff --git a/mysql/pyproject.toml b/mysql/pyproject.toml index 73383c2f4d533..3b5ca5eb45684 100644 --- a/mysql/pyproject.toml +++ b/mysql/pyproject.toml @@ -28,7 +28,7 @@ classifiers = [ "Private :: Do Not Upload", ] dependencies = [ - "datadog-checks-base>=36.5.0", + "datadog-checks-base>=36.14.0", ] dynamic = [ "version", diff --git a/mysql/tests/test_mysql.py b/mysql/tests/test_mysql.py index 2b099d2b4d3b6..f2baef2e86ec6 100644 --- a/mysql/tests/test_mysql.py +++ b/mysql/tests/test_mysql.py @@ -759,3 +759,59 @@ def test_database_instance_metadata(aggregator, dd_run_check, instance_complex, dbm_metadata = aggregator.get_event_platform_events("dbm-metadata") event = next((e for e in dbm_metadata if e['kind'] == 'database_instance'), None) assert event is None + + +@pytest.mark.parametrize( + 'instance_propagate_agent_tags,init_config_propagate_agent_tags,should_propagate_agent_tags', + [ + pytest.param(True, True, True, id="both true"), + pytest.param(True, False, True, id="instance config true prevails"), + pytest.param(False, True, False, id="instance config false prevails"), + pytest.param(False, False, False, id="both false"), + pytest.param(None, True, True, id="init_config true applies to all instances"), + pytest.param(None, False, False, id="init_config false applies to all instances"), + pytest.param(None, None, False, id="default to false"), + pytest.param(True, None, True, id="instance config true prevails, init_config is None"), + pytest.param(False, None, False, id="instance config false prevails, init_config is None"), + ], +) +@pytest.mark.integration +def test_propagate_agent_tags( + aggregator, + dd_run_check, + instance_basic, + instance_propagate_agent_tags, + init_config_propagate_agent_tags, + should_propagate_agent_tags, +): + instance_basic['propagate_agent_tags'] = instance_propagate_agent_tags + init_config = {} + if init_config_propagate_agent_tags is not None: + init_config['propagate_agent_tags'] = init_config_propagate_agent_tags + + agent_tags = ['my-env:test-env', 'random:tag', 'bar:foo'] + expected_tags = ( + instance_basic.get('tags', []) + + [ + 'server:{}'.format(HOST), + 'port:{}'.format(PORT), + 'dd.internal.resource:database_instance:forced_hostname', + "dd.internal.resource:aws_rds_instance:foo.aws.com", + "dd.internal.resource:azure_mysql_server:my-instance", + 'dd.internal.resource:gcp_sql_database_instance:foo-project:bar', + ] + + agent_tags + ) + + with mock.patch('datadog_checks.mysql.config.get_agent_host_tags', return_value=agent_tags): + check = MySql(common.CHECK_NAME, init_config, [instance_basic]) + assert check._config._should_propagate_agent_tags(instance_basic, init_config) == should_propagate_agent_tags + if should_propagate_agent_tags: + assert all(tag in check.tags for tag in agent_tags) + dd_run_check(check) + aggregator.assert_service_check( + 'mysql.can_connect', + count=1, + status=MySql.OK, + tags=expected_tags, + ) From 3bed02c0b00fa2ca461c5f930c24461d97e87773 Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Fri, 13 Sep 2024 18:11:38 +0200 Subject: [PATCH 02/23] Mark postgres test as flake (#18585) --- postgres/tests/test_logical_replication.py | 1 + 1 file changed, 1 insertion(+) diff --git a/postgres/tests/test_logical_replication.py b/postgres/tests/test_logical_replication.py index ca232d2216cd8..d5dbe86b21600 100644 --- a/postgres/tests/test_logical_replication.py +++ b/postgres/tests/test_logical_replication.py @@ -112,6 +112,7 @@ def test_subscription_stats_sync_errors(aggregator, integration_check, pg_replic @requires_over_10 +@pytest.mark.flaky(max_runs=5) def test_stat_subscription(aggregator, integration_check, pg_replica_logical): check = integration_check(pg_replica_logical) check.check(pg_replica_logical) From 360643fedda1cd1941c824629a6267a2d9565645 Mon Sep 17 00:00:00 2001 From: "agent-platform-auto-pr[bot]" <153269286+agent-platform-auto-pr[bot]@users.noreply.github.com> Date: Fri, 13 Sep 2024 19:13:32 +0200 Subject: [PATCH 03/23] Update dependency resolution (#18582) Co-authored-by: alopezz --- .deps/image_digests.json | 6 ++--- .deps/resolved/linux-aarch64_py3.txt | 32 +++++++++++++-------------- .deps/resolved/linux-x86_64_py3.txt | 16 +++++++------- .deps/resolved/macos-x86_64_py3.txt | 24 ++++++++++---------- .deps/resolved/windows-x86_64_py3.txt | 16 +++++++------- 5 files changed, 47 insertions(+), 47 deletions(-) diff --git a/.deps/image_digests.json b/.deps/image_digests.json index d82e5f11f3dcc..18afec2ebf7af 100644 --- a/.deps/image_digests.json +++ b/.deps/image_digests.json @@ -1,5 +1,5 @@ { - "linux-aarch64": "sha256:bb4ca64678ec1be3fe0c94b6f069912c4e78f52a1918b770e40066cc1c1a7191", - "linux-x86_64": "sha256:cd58dce0bb12645ca22037094a13aab7b2509d17e219e2ec32876fd1aa8b8082", - "windows-x86_64": "sha256:caa8cfa4d307c442edcc0cf42528ba4a5ca80b36371df22ce527a18ea41f6476" + "linux-aarch64": "sha256:63f46c0aaad6c81830b99754b158625e6ff5dbf50cd74e62c098fa46ab6808ec", + "linux-x86_64": "sha256:e854e6d6f8258c2bb80c5f525a9c9f545ce95bd555583e9ae04bbeede9d5666a", + "windows-x86_64": "sha256:ce5ac8bd45f7816a8ea3f55f54d70c8856667ad2cf8d638b4c62728368a65652" } diff --git a/.deps/resolved/linux-aarch64_py3.txt b/.deps/resolved/linux-aarch64_py3.txt index 6bec3776b9004..c26dbcccd23e3 100644 --- a/.deps/resolved/linux-aarch64_py3.txt +++ b/.deps/resolved/linux-aarch64_py3.txt @@ -1,9 +1,9 @@ -aerospike @ https://agent-int-packages.datadoghq.com/built/aerospike/aerospike-7.1.1-20240903133950-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=42ba0630fd0b9556c52020caeefa473393d9a454c9097869656ab2546e202eae +aerospike @ https://agent-int-packages.datadoghq.com/built/aerospike/aerospike-7.1.1-20240906205206-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=ce1eebec5b0de4e3c1b3c93783f894a2ea7f72cee74b49803759b8d61fdb915f annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-types/annotated_types-0.7.0-py3-none-any.whl#sha256=1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 -azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.30.2-py3-none-any.whl#sha256=cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68 beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed @@ -12,14 +12,14 @@ boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 -cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.0-py3-none-any.whl#sha256=043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=daae6e5108e618f3fb64eab8c9dbf74e05fae5ba4b4d785fc7548a1c2573444b clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402155018-py3-none-manylinux2014_aarch64.whl#sha256=aba3c1683ef1b2099933e030464d29b3ad1c206784ebd15d8a7147ecd6ba24e1 -confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240903133950-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=d177a75d998473b5198052cb4e0179054cef510507a9c225b5f9365faf794a7a +confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240906205206-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=c628d09f4367a3cb57bfff2e4dcb31148f0dc9b2e1ebb97534c8212cb65b1874 cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5 ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=1f4de09300cd24101e5209a542372e06aff84280cb1036218bfda41e2110f3d3 @@ -28,10 +28,10 @@ deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprec dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add -filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.15.4-py3-none-any.whl#sha256=6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402155019-py3-none-manylinux2014_aarch64.whl#sha256=14259f824080062cc890965747597ff00a9d6c76a1eb926673fed68a45860ccd google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 -gssapi @ https://agent-int-packages.datadoghq.com/built/gssapi/gssapi-1.8.3-20240903133951-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=9ca19d6518fd0fde917444022cb2660d41eed3f2feb7e66ee8bf41015750a8a7 +gssapi @ https://agent-int-packages.datadoghq.com/built/gssapi/gssapi-1.8.3-20240906205210-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=662fe269460cc32dd2016f000f9b54eaa5a346415782d5cd15bde929f45601b6 hazelcast-python-client @ https://agent-int-packages.datadoghq.com/external/hazelcast-python-client/hazelcast_python_client-5.4.0-py3-none-any.whl#sha256=16195cd58feb2dd3be1594d08d42527ae00797548a6a9d6a601aae2e8514ff5f idna @ https://agent-int-packages.datadoghq.com/external/idna/idna-3.8-py3-none-any.whl#sha256=050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac importlib-metadata @ https://agent-int-packages.datadoghq.com/external/importlib-metadata/importlib_metadata-8.4.0-py3-none-any.whl#sha256=66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 @@ -42,15 +42,15 @@ jmespath @ https://agent-int-packages.datadoghq.com/external/jmespath/jmespath-1 jsonpatch @ https://agent-int-packages.datadoghq.com/external/jsonpatch/jsonpatch-1.33-py2.py3-none-any.whl#sha256=0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade jsonpointer @ https://agent-int-packages.datadoghq.com/external/jsonpointer/jsonpointer-3.0.0-py2.py3-none-any.whl#sha256=13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 keystoneauth1 @ https://agent-int-packages.datadoghq.com/external/keystoneauth1/keystoneauth1-5.8.0-py3-none-any.whl#sha256=e69dff80c509ab64d4de4494658d914e81f26af720828dc584ceee74ecd666d9 -krb5 @ https://agent-int-packages.datadoghq.com/built/krb5/krb5-0.6.0-20240903133951-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=a9c175d3195a80100904b4f62163f52971bf3abd59fb880436b7b23b8da01215 +krb5 @ https://agent-int-packages.datadoghq.com/built/krb5/krb5-0.6.0-20240906205210-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=fb885d4cc3db8b34dacda9132906e2885cdb63bbb2095bb10bb2f916e3435e3f kubernetes @ https://agent-int-packages.datadoghq.com/external/kubernetes/kubernetes-30.1.0-py2.py3-none-any.whl#sha256=e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2.py3-none-any.whl#sha256=5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70 lxml @ https://agent-int-packages.datadoghq.com/external/lxml/lxml-4.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl#sha256=4855161013dfb2b762e02b3f4d4a21cc7c6aec13c69e3bffbf5022b3e708dd97 lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=edfd858985c23523f4e5a7526ca6ee65ff930207a7ec8a8f57a01eae506aaee7 mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14 -msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.30.0-py3-none-any.whl#sha256=423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d -netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240903133952-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=558a52f3e6453136784f9e03ca1c08afa46e13177aee61c00f69a3c933558889 +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240906205211-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=7349b734192c7847e0f89a7836ac1157ad94dace2c76727260d3fb4f6f5b88de oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 @@ -60,15 +60,15 @@ packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packagin paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a -platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.2.2-py3-none-any.whl#sha256=2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.2-py3-none-any.whl#sha256=eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl#sha256=b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce -psutil @ https://agent-int-packages.datadoghq.com/built/psutil/psutil-5.9.6-20240904194634-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=d1516a9e2507814c7be4491b0c79925bbd2a55f33d4c1011cce5f3f33373dbed +psutil @ https://agent-int-packages.datadoghq.com/built/psutil/psutil-5.9.6-20240906205211-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=77fb0059abe0517cf2b8f14e3f10cca95200372e60d75fdc884e84bfa60040a2 psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2 pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d -pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.0-py3-none-any.whl#sha256=be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623 pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 @@ -77,7 +77,7 @@ pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3- pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=bf821bd3befb993a6db17229a2c60c1550e957de02a6ff4dd0af9476637b2e4d pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 -pyodbc @ https://agent-int-packages.datadoghq.com/built/pyodbc/pyodbc-5.1.0-20240903133952-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=2e1b8a189cd31a704e6ce5f4032dff37f52c2fe8c23b0b051695677f08dad240 +pyodbc @ https://agent-int-packages.datadoghq.com/built/pyodbc/pyodbc-5.1.0-20240906205212-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=28aae58b273b5ebbbfb47da5433e673d1bf674d23a222565355251bcdf3c87db pyopenssl @ https://agent-int-packages.datadoghq.com/external/pyopenssl/pyOpenSSL-24.2.1-py3-none-any.whl#sha256=967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d pysmi @ https://agent-int-packages.datadoghq.com/external/pysmi/pysmi-1.2.1-py3-none-any.whl#sha256=d97c60de9f81d33ab2899124d95a94fa7fefacc86ab6e00cbfec543a073e6d33 pysnmp @ https://agent-int-packages.datadoghq.com/external/pysnmp/pysnmp-5.1.0-py3-none-any.whl#sha256=375a8adfc6820faf24ace6761a6d20544e60580d714ff7266df272850c39b439 @@ -88,7 +88,7 @@ pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0 python-binary-memcached @ https://agent-int-packages.datadoghq.com/external/python-binary-memcached/python_binary_memcached-0.31.2-py3-none-any.whl#sha256=e5b93d54429e835cab7d5b33988649f9748344aa49adaed8eed94b37e714d562 python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 python3-gearman @ https://agent-int-packages.datadoghq.com/external/python3-gearman/python3_gearman-0.1.0-py3-none-any.whl#sha256=4a5808d3a0bfc6c243548ad57e7aab4bee62c9cba2b1c3a860fdd292d46a112d -pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.1-py2.py3-none-any.whl#sha256=328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319 +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172038-py2.py3-none-manylinux2014_aarch64.whl#sha256=e173daf28895975b57850fef301837f24fba59dd8ff1d931795732e9be281d57 pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 @@ -118,8 +118,8 @@ tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-a typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 -uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240903133953-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=a6e5b5ed638e72689bbe8e204eea4637df0e34142c1e21d160e17645a5b693eb -urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.2-py3-none-any.whl#sha256=a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240906205212-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=d082be5d82c27fc6165678addae82cb6bec71210004970a6f5442ffd9a916139 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 diff --git a/.deps/resolved/linux-x86_64_py3.txt b/.deps/resolved/linux-x86_64_py3.txt index 7a245a771153b..c8db799a83736 100644 --- a/.deps/resolved/linux-x86_64_py3.txt +++ b/.deps/resolved/linux-x86_64_py3.txt @@ -3,7 +3,7 @@ annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-ty asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 -azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.30.2-py3-none-any.whl#sha256=cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed @@ -12,7 +12,7 @@ boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 -cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.0-py3-none-any.whl#sha256=043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 @@ -28,7 +28,7 @@ deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprec dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add -filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.15.4-py3-none-any.whl#sha256=6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402154840-py3-none-manylinux2014_x86_64.whl#sha256=14259f824080062cc890965747597ff00a9d6c76a1eb926673fed68a45860ccd google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 gssapi @ https://agent-int-packages.datadoghq.com/built/gssapi/gssapi-1.8.3-20240905223414-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=7312a7e15b2859f1e40905e7ecbf166e4c2e60c14e1067544b7d97fd4a794d36 @@ -48,7 +48,7 @@ ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2. lxml @ https://agent-int-packages.datadoghq.com/external/lxml/lxml-4.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl#sha256=c71b5b860c5215fdbaa56f715bc218e45a98477f816b46cfde4a84d25b13274e lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=0e9c410b11a31dbdc94c05ac3c480cb4b222460faf9231f12538d0074e56c563 mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f -msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.30.0-py3-none-any.whl#sha256=423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240905223415-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=7bacb0d8874bd8eff36b639f524b3604b6df18285465cf351d4664dbec7798ce oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca @@ -60,7 +60,7 @@ packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packagin paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a -platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.2.2-py3-none-any.whl#sha256=2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.2-py3-none-any.whl#sha256=eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 @@ -68,7 +68,7 @@ protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5 psutil @ https://agent-int-packages.datadoghq.com/external/psutil/psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4 psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d -pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.0-py3-none-any.whl#sha256=be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4 pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 @@ -89,7 +89,7 @@ pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0 python-binary-memcached @ https://agent-int-packages.datadoghq.com/external/python-binary-memcached/python_binary_memcached-0.31.2-py3-none-any.whl#sha256=e5b93d54429e835cab7d5b33988649f9748344aa49adaed8eed94b37e714d562 python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 python3-gearman @ https://agent-int-packages.datadoghq.com/external/python3-gearman/python3_gearman-0.1.0-py3-none-any.whl#sha256=4a5808d3a0bfc6c243548ad57e7aab4bee62c9cba2b1c3a860fdd292d46a112d -pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.1-py2.py3-none-any.whl#sha256=328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319 +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172050-py2.py3-none-manylinux2014_x86_64.whl#sha256=e173daf28895975b57850fef301837f24fba59dd8ff1d931795732e9be281d57 pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 @@ -120,7 +120,7 @@ typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-ext tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240905223416-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=d37438ba530fcc6e3af980d66b3167464d3227bcb2e8de62508ab84f231723c5 -urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.2-py3-none-any.whl#sha256=a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 diff --git a/.deps/resolved/macos-x86_64_py3.txt b/.deps/resolved/macos-x86_64_py3.txt index 98b940f0795ab..76a662eb6e418 100644 --- a/.deps/resolved/macos-x86_64_py3.txt +++ b/.deps/resolved/macos-x86_64_py3.txt @@ -2,7 +2,7 @@ annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-ty asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 -azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.30.2-py3-none-any.whl#sha256=cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl#sha256=c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841 beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed @@ -11,14 +11,14 @@ boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 -cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.0-py3-none-any.whl#sha256=043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl#sha256=a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl#sha256=573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp311-cp311-macosx_10_9_x86_64.whl#sha256=bb3401bb08d0e27166faac7ff277ff3482e096d0455152a6b7c38b3a632a007b clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl#sha256=5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402154932-py3-none-macosx_10_12_universal2.whl#sha256=aba3c1683ef1b2099933e030464d29b3ad1c206784ebd15d8a7147ecd6ba24e1 -confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240906151810-cp311-cp311-macosx_10_12_universal2.whl#sha256=4f0180dcde487a2dd111780456c85b26e910e402c15ea63bfe759257b726316f +confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240913120822-cp311-cp311-macosx_10_12_universal2.whl#sha256=d2357529d6ade99cfb5bf70a3b42ab3bcd96b35c0ae05a8ed44ac343724528d6 cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl#sha256=7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp311-cp311-macosx_12_0_x86_64.whl#sha256=86f209aa4dc544368b593aa56e1f9e302d9329410664f03dbaaf16afb6501b0e @@ -27,7 +27,7 @@ deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprec dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add -filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.15.4-py3-none-any.whl#sha256=6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402154934-py3-none-macosx_10_12_universal2.whl#sha256=14259f824080062cc890965747597ff00a9d6c76a1eb926673fed68a45860ccd google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 gssapi @ https://agent-int-packages.datadoghq.com/external/gssapi/gssapi-1.8.3-cp311-cp311-macosx_10_9_x86_64.whl#sha256=b03d6b30f1fcd66d9a688b45a97e302e4dd3f1386d5c333442731aec73cdb409 @@ -47,9 +47,9 @@ ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2. lxml @ https://agent-int-packages.datadoghq.com/external/lxml/lxml-4.9.4-cp311-cp311-macosx_11_0_universal2.whl#sha256=359a8b09d712df27849e0bcb62c6a3404e780b274b0b7e4c39a88826d1926c28 lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp311-cp311-macosx_10_9_x86_64.whl#sha256=30e8c20b8857adef7be045c65f47ab1e2c4fabba86a9fa9a997d7674a31ea6b6 mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896 -msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.30.0-py3-none-any.whl#sha256=423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d -netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240906151811-cp311-cp311-macosx_10_12_universal2.whl#sha256=b9dc702c73506e9b38196f807a2d0484ef79a55a84b77d15cae57e6bef3f1199 +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240913120822-cp311-cp311-macosx_10_12_universal2.whl#sha256=af7088e06f7b24ab704694f9cdacd4582da7ec503771b487ffe5430e936e5284 oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 @@ -59,7 +59,7 @@ packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packagin paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a -platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.2.2-py3-none-any.whl#sha256=2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.2-py3-none-any.whl#sha256=eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 @@ -67,14 +67,14 @@ protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5 psutil @ https://agent-int-packages.datadoghq.com/external/psutil/psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl#sha256=c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl#sha256=ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26 pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d -pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.0-py3-none-any.whl#sha256=be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl#sha256=82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3 pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl#sha256=d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312 pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68 -pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240906151811-cp311-cp311-macosx_10_12_universal2.whl#sha256=41ec6d3b287a1466f336124485fd3f2d7cd4254d95c4c6712b354099dbdd6592 +pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240913120823-cp311-cp311-macosx_10_12_universal2.whl#sha256=d921f583750fe092c18cbc0a5e0a5439da8b2294c27a8a6f92cb6f9ccac6fd78 pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl#sha256=401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 pyodbc @ https://agent-int-packages.datadoghq.com/external/pyodbc/pyodbc-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=aa6f46377da303bf79bcb4b559899507df4b2559f30dcfdf191358ee4b99f3ab @@ -88,7 +88,7 @@ pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0 python-binary-memcached @ https://agent-int-packages.datadoghq.com/external/python-binary-memcached/python_binary_memcached-0.31.2-py3-none-any.whl#sha256=e5b93d54429e835cab7d5b33988649f9748344aa49adaed8eed94b37e714d562 python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 python3-gearman @ https://agent-int-packages.datadoghq.com/external/python3-gearman/python3_gearman-0.1.0-py3-none-any.whl#sha256=4a5808d3a0bfc6c243548ad57e7aab4bee62c9cba2b1c3a860fdd292d46a112d -pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.1-py2.py3-none-any.whl#sha256=328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319 +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172113-py2.py3-none-macosx_10_12_universal2.whl#sha256=e173daf28895975b57850fef301837f24fba59dd8ff1d931795732e9be281d57 pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl#sha256=cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 @@ -118,8 +118,8 @@ tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-a typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 -uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240906151811-cp311-cp311-macosx_10_12_universal2.whl#sha256=b3ec71796abaa193400c0b02a0ecb4dda1471cc6b3502cfa5acf1d8f8eca382b -urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.2-py3-none-any.whl#sha256=a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240913120824-cp311-cp311-macosx_10_12_universal2.whl#sha256=5022f7754d6ed018d050ec08831d1d0f93b0e48f6a4e4a276e95989b4ec6db9d +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 diff --git a/.deps/resolved/windows-x86_64_py3.txt b/.deps/resolved/windows-x86_64_py3.txt index 5fc6735cc7051..410ca5c6eddd1 100644 --- a/.deps/resolved/windows-x86_64_py3.txt +++ b/.deps/resolved/windows-x86_64_py3.txt @@ -2,7 +2,7 @@ annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-ty asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 -azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.30.2-py3-none-any.whl#sha256=cf019c1ca832e96274ae85abd3d9f752397194d9fea3b41487290562ac8abe4a +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-win_amd64.whl#sha256=61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9 beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed @@ -11,7 +11,7 @@ boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 -cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.0-py3-none-any.whl#sha256=043bb8af72596432a7df63abcff0055ac0f198a4d2e95af8db5a936a7074a761 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp311-cp311-win_amd64.whl#sha256=caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl#sha256=663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 @@ -27,7 +27,7 @@ deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprec dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add -filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.15.4-py3-none-any.whl#sha256=6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402154628-py3-none-win_amd64.whl#sha256=07e8e97e51dc9248d58d60d33076b82380135c31ab3727a33b885cea17e34bc7 google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 hazelcast-python-client @ https://agent-int-packages.datadoghq.com/external/hazelcast-python-client/hazelcast_python_client-5.4.0-py3-none-any.whl#sha256=16195cd58feb2dd3be1594d08d42527ae00797548a6a9d6a601aae2e8514ff5f @@ -45,7 +45,7 @@ ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2. lxml @ https://agent-int-packages.datadoghq.com/external/lxml/lxml-4.9.4-cp311-cp311-win_amd64.whl#sha256=c7721a3ef41591341388bb2265395ce522aba52f969d33dacd822da8f018aff8 lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp311-cp311-win_amd64.whl#sha256=b14d948e6dce389f9a7afc666d60dd1e35fa2138a8ec5306d30cd2e30d36b40c mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp311-cp311-win_amd64.whl#sha256=0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec -msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.30.0-py3-none-any.whl#sha256=423872177410cb61683566dc3932db7a76f661a5d2f6f52f02a047f101e1c1de +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240402154629-cp311-cp311-win_amd64.whl#sha256=e00ed4567b7dbf9f3a0b0b98535b13f932a1e81ee4efbbeb56218a7bf3aaacd2 oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca @@ -57,7 +57,7 @@ packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packagin paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a -platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.2.2-py3-none-any.whl#sha256=2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.2-py3-none-any.whl#sha256=eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617 ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 @@ -65,7 +65,7 @@ protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5 psutil @ https://agent-int-packages.datadoghq.com/external/psutil/psutil-5.9.6-cp37-abi3-win_amd64.whl#sha256=6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl#sha256=b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417 pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d -pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.0-py3-none-any.whl#sha256=be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl#sha256=2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 @@ -84,7 +84,7 @@ pysnmpcrypto @ https://agent-int-packages.datadoghq.com/external/pysnmpcrypto/py pysocks @ https://agent-int-packages.datadoghq.com/external/pysocks/PySocks-1.7.1-py3-none-any.whl#sha256=2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0.11.1-py3-none-any.whl#sha256=129a4294f2c4d681d5875240ef87accc6f1d921e8983737fb0b59642b397951e python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 -pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.1-py2.py3-none-any.whl#sha256=328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319 +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172100-py2.py3-none-win_amd64.whl#sha256=19446fe48dbdd8b64097eff5648cc4b5a19165ede40826507f5e1398e1032e12 pywin32 @ https://agent-int-packages.datadoghq.com/external/pywin32/pywin32-306-cp311-cp311-win_amd64.whl#sha256=a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp311-cp311-win_amd64.whl#sha256=e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 @@ -117,7 +117,7 @@ typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-ext tzdata @ https://agent-int-packages.datadoghq.com/external/tzdata/tzdata-2024.1-py2.py3-none-any.whl#sha256=9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252 tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240402154631-cp311-cp311-win_amd64.whl#sha256=f45d4f913936457ad976b516f39d95b5f13824f29ccabaff24bdfb65c7719cce -urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.2-py3-none-any.whl#sha256=a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-win_amd64.whl#sha256=aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 From 9a881d43ab66b7f48a22128ea21b9ebd9cf30751 Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Fri, 13 Sep 2024 19:47:00 +0200 Subject: [PATCH 04/23] Stop packaging integrations for Python 2 (#18580) * Stop packaging integrations for Python 2 --- .ddev/config.toml | 4 - LICENSE-3rdparty.csv | 22 --- active_directory/changelog.d/18580.removed | 1 + active_directory/pyproject.toml | 4 +- active_directory/setup.py | 85 ---------- activemq/changelog.d/18580.removed | 1 + activemq/pyproject.toml | 1 - activemq/setup.py | 81 ---------- activemq_xml/changelog.d/18580.removed | 1 + activemq_xml/pyproject.toml | 1 - activemq_xml/setup.py | 82 ---------- aerospike/changelog.d/18580.removed | 1 + aerospike/pyproject.toml | 4 +- aerospike/setup.py | 79 ---------- agent_requirements.in | 149 +++++++----------- amazon_msk/changelog.d/18580.removed | 1 + amazon_msk/pyproject.toml | 4 +- amazon_msk/setup.py | 81 ---------- ambari/changelog.d/18580.removed | 1 + ambari/pyproject.toml | 1 - ambari/setup.py | 79 ---------- apache/changelog.d/18580.removed | 1 + apache/pyproject.toml | 1 - apache/setup.py | 90 ----------- aspdotnet/changelog.d/18580.removed | 1 + aspdotnet/pyproject.toml | 4 +- aspdotnet/setup.py | 80 ---------- azure_iot_edge/changelog.d/18580.removed | 1 + azure_iot_edge/pyproject.toml | 1 - azure_iot_edge/setup.py | 79 ---------- btrfs/changelog.d/18580.removed | 1 + btrfs/pyproject.toml | 1 - btrfs/setup.py | 83 ---------- cacti/changelog.d/18580.removed | 1 + cacti/pyproject.toml | 4 +- cacti/setup.py | 81 ---------- calico/changelog.d/18580.removed | 1 + calico/pyproject.toml | 1 - calico/setup.py | 80 ---------- cassandra/changelog.d/18580.removed | 1 + cassandra/pyproject.toml | 1 - cassandra/setup.py | 81 ---------- cassandra_nodetool/changelog.d/18580.removed | 1 + cassandra_nodetool/pyproject.toml | 1 - cassandra_nodetool/setup.py | 82 ---------- ceph/changelog.d/18580.removed | 1 + ceph/pyproject.toml | 1 - ceph/setup.py | 83 ---------- cilium/changelog.d/18580.removed | 1 + cilium/pyproject.toml | 1 - cilium/setup.py | 79 ---------- cisco_aci/changelog.d/18580.removed | 1 + cisco_aci/pyproject.toml | 4 +- cisco_aci/setup.py | 76 --------- citrix_hypervisor/changelog.d/18580.removed | 1 + citrix_hypervisor/pyproject.toml | 1 - citrix_hypervisor/setup.py | 79 ---------- clickhouse/changelog.d/18580.removed | 1 + clickhouse/pyproject.toml | 10 +- clickhouse/setup.py | 79 ---------- cloud_foundry_api/changelog.d/18580.removed | 1 + cloud_foundry_api/pyproject.toml | 4 +- cloud_foundry_api/setup.py | 69 -------- cockroachdb/changelog.d/18580.removed | 1 + cockroachdb/pyproject.toml | 1 - cockroachdb/setup.py | 79 ---------- confluent_platform/changelog.d/18580.removed | 1 + confluent_platform/pyproject.toml | 1 - confluent_platform/setup.py | 79 ---------- consul/changelog.d/18580.removed | 1 + consul/pyproject.toml | 1 - consul/setup.py | 79 ---------- coredns/changelog.d/18580.removed | 1 + coredns/pyproject.toml | 1 - coredns/setup.py | 78 --------- couch/changelog.d/18580.removed | 1 + couch/pyproject.toml | 1 - couch/setup.py | 83 ---------- couchbase/changelog.d/18580.removed | 1 + couchbase/pyproject.toml | 1 - couchbase/setup.py | 83 ---------- crio/changelog.d/18580.removed | 1 + crio/pyproject.toml | 1 - crio/setup.py | 80 ---------- datadog_checks_base/changelog.d/18580.removed | 1 + datadog_checks_base/pyproject.toml | 72 +++------ datadog_checks_base/setup.py | 85 ---------- .../changelog.d/18580.removed | 1 + .../pyproject.toml | 1 - datadog_checks_dependency_provider/setup.py | 79 ---------- datadog_checks_dev/changelog.d/18580.removed | 1 + datadog_checks_dev/pyproject.toml | 18 +-- datadog_checks_dev/setup.py | 72 --------- .../changelog.d/18580.removed | 1 + datadog_checks_downloader/pyproject.toml | 8 +- .../changelog.d/18580.removed | 1 + datadog_cluster_agent/pyproject.toml | 1 - datadog_cluster_agent/setup.py | 79 ---------- directory/changelog.d/18580.removed | 1 + directory/pyproject.toml | 1 - directory/setup.py | 83 ---------- disk/changelog.d/18580.removed | 1 + disk/pyproject.toml | 1 - disk/setup.py | 81 ---------- dns_check/changelog.d/18580.removed | 1 + dns_check/pyproject.toml | 4 +- dns_check/setup.py | 81 ---------- dotnetclr/changelog.d/18580.removed | 1 + dotnetclr/pyproject.toml | 4 +- dotnetclr/setup.py | 82 ---------- druid/changelog.d/18580.removed | 1 + druid/pyproject.toml | 1 - druid/setup.py | 79 ---------- ecs_fargate/changelog.d/18580.removed | 1 + ecs_fargate/pyproject.toml | 1 - ecs_fargate/setup.py | 80 ---------- eks_fargate/changelog.d/18580.removed | 1 + eks_fargate/pyproject.toml | 1 - eks_fargate/setup.py | 79 ---------- elastic/changelog.d/18580.removed | 1 + elastic/pyproject.toml | 1 - elastic/setup.py | 82 ---------- envoy/changelog.d/18580.removed | 1 + envoy/pyproject.toml | 1 - envoy/setup.py | 80 ---------- esxi/changelog.d/18580.removed | 1 + esxi/pyproject.toml | 2 +- etcd/changelog.d/18580.removed | 1 + etcd/pyproject.toml | 1 - etcd/setup.py | 80 ---------- exchange_server/changelog.d/18580.removed | 1 + exchange_server/pyproject.toml | 4 +- exchange_server/setup.py | 80 ---------- external_dns/changelog.d/18580.removed | 1 + external_dns/pyproject.toml | 1 - external_dns/setup.py | 80 ---------- flink/changelog.d/18580.removed | 1 + flink/pyproject.toml | 1 - flink/setup.py | 79 ---------- fluentd/changelog.d/18580.removed | 1 + fluentd/pyproject.toml | 1 - fluentd/setup.py | 80 ---------- foundationdb/changelog.d/18580.removed | 1 + foundationdb/pyproject.toml | 3 +- foundationdb/setup.py | 80 ---------- gearmand/changelog.d/18580.removed | 1 + gearmand/pyproject.toml | 4 +- gearmand/setup.py | 80 ---------- gitlab/changelog.d/18580.removed | 1 + gitlab/pyproject.toml | 1 - gitlab/setup.py | 80 ---------- gitlab_runner/changelog.d/18580.removed | 1 + gitlab_runner/pyproject.toml | 1 - gitlab_runner/setup.py | 80 ---------- glusterfs/changelog.d/18580.removed | 1 + glusterfs/pyproject.toml | 1 - glusterfs/setup.py | 79 ---------- go_expvar/changelog.d/18580.removed | 1 + go_expvar/pyproject.toml | 1 - go_expvar/setup.py | 80 ---------- gunicorn/changelog.d/18580.removed | 1 + gunicorn/pyproject.toml | 1 - gunicorn/setup.py | 80 ---------- haproxy/changelog.d/18580.removed | 1 + haproxy/pyproject.toml | 1 - haproxy/setup.py | 82 ---------- harbor/changelog.d/18580.removed | 1 + harbor/pyproject.toml | 1 - harbor/setup.py | 79 ---------- hazelcast/changelog.d/18580.removed | 1 + hazelcast/pyproject.toml | 3 +- hazelcast/setup.py | 79 ---------- hdfs_datanode/changelog.d/18580.removed | 1 + hdfs_datanode/pyproject.toml | 1 - hdfs_datanode/setup.py | 83 ---------- hdfs_namenode/changelog.d/18580.removed | 1 + hdfs_namenode/pyproject.toml | 1 - hdfs_namenode/setup.py | 83 ---------- http_check/changelog.d/18580.removed | 1 + http_check/pyproject.toml | 7 +- http_check/setup.py | 83 ---------- hudi/changelog.d/18580.removed | 1 + hudi/pyproject.toml | 1 - hudi/setup.py | 79 ---------- hyperv/changelog.d/18580.removed | 1 + hyperv/pyproject.toml | 1 - hyperv/setup.py | 79 ---------- ibm_ace/changelog.d/18580.removed | 1 + ibm_ace/pyproject.toml | 2 +- ibm_db2/changelog.d/18580.removed | 1 + ibm_db2/pyproject.toml | 1 - ibm_db2/setup.py | 79 ---------- ibm_i/changelog.d/18580.removed | 1 + ibm_i/pyproject.toml | 2 +- ibm_mq/changelog.d/18580.removed | 1 + ibm_mq/pyproject.toml | 2 +- ibm_was/changelog.d/18580.removed | 1 + ibm_was/pyproject.toml | 1 - ibm_was/setup.py | 79 ---------- iis/changelog.d/18580.removed | 1 + iis/pyproject.toml | 4 +- iis/setup.py | 80 ---------- istio/changelog.d/18580.removed | 1 + istio/pyproject.toml | 1 - istio/setup.py | 81 ---------- journald/changelog.d/18580.removed | 1 + journald/pyproject.toml | 1 - journald/setup.py | 79 ---------- kafka/changelog.d/18580.removed | 1 + kafka/pyproject.toml | 1 - kafka/setup.py | 81 ---------- kafka_consumer/changelog.d/18580.removed | 1 + kafka_consumer/pyproject.toml | 2 +- kong/changelog.d/18580.removed | 1 + kong/pyproject.toml | 1 - kong/setup.py | 81 ---------- .../changelog.d/18580.removed | 1 + kube_apiserver_metrics/pyproject.toml | 1 - kube_apiserver_metrics/setup.py | 79 ---------- .../changelog.d/18580.removed | 1 + kube_controller_manager/pyproject.toml | 1 - kube_controller_manager/setup.py | 79 ---------- kube_dns/changelog.d/18580.removed | 1 + kube_dns/pyproject.toml | 1 - kube_dns/setup.py | 80 ---------- kube_metrics_server/changelog.d/18580.removed | 1 + kube_metrics_server/pyproject.toml | 1 - kube_metrics_server/setup.py | 79 ---------- kube_proxy/changelog.d/18580.removed | 1 + kube_proxy/pyproject.toml | 1 - kube_proxy/setup.py | 81 ---------- kube_scheduler/changelog.d/18580.removed | 1 + kube_scheduler/pyproject.toml | 1 - kube_scheduler/setup.py | 79 ---------- kubelet/changelog.d/18580.removed | 1 + kubelet/pyproject.toml | 1 - kubelet/setup.py | 80 ---------- kubernetes_state/changelog.d/18580.removed | 1 + kubernetes_state/pyproject.toml | 1 - kubernetes_state/setup.py | 80 ---------- kyototycoon/changelog.d/18580.removed | 1 + kyototycoon/pyproject.toml | 1 - kyototycoon/setup.py | 83 ---------- lighttpd/changelog.d/18580.removed | 1 + lighttpd/pyproject.toml | 1 - lighttpd/setup.py | 76 --------- linkerd/changelog.d/18580.removed | 1 + linkerd/pyproject.toml | 1 - linkerd/setup.py | 80 ---------- linux_proc_extras/changelog.d/18580.removed | 1 + linux_proc_extras/pyproject.toml | 1 - linux_proc_extras/setup.py | 81 ---------- mapr/changelog.d/18580.removed | 1 + mapr/pyproject.toml | 1 - mapr/setup.py | 79 ---------- mapreduce/changelog.d/18580.removed | 1 + mapreduce/pyproject.toml | 1 - mapreduce/setup.py | 83 ---------- marathon/changelog.d/18580.removed | 1 + marathon/pyproject.toml | 1 - marathon/setup.py | 80 ---------- marklogic/changelog.d/18580.removed | 1 + marklogic/pyproject.toml | 1 - marklogic/setup.py | 69 -------- mcache/changelog.d/18580.removed | 1 + mcache/pyproject.toml | 4 +- mcache/setup.py | 77 --------- mesos_master/changelog.d/18580.removed | 1 + mesos_master/pyproject.toml | 1 - mesos_master/setup.py | 80 ---------- mesos_slave/changelog.d/18580.removed | 1 + mesos_slave/pyproject.toml | 1 - mesos_slave/setup.py | 80 ---------- mongo/changelog.d/18580.removed | 1 + mongo/pyproject.toml | 3 +- mysql/changelog.d/18580.removed | 1 + mysql/pyproject.toml | 9 +- nagios/changelog.d/18580.removed | 1 + nagios/pyproject.toml | 1 - nagios/setup.py | 81 ---------- network/changelog.d/18580.removed | 1 + network/pyproject.toml | 1 - network/setup.py | 83 ---------- nfsstat/changelog.d/18580.removed | 1 + nfsstat/pyproject.toml | 1 - nfsstat/setup.py | 80 ---------- nginx/changelog.d/18580.removed | 1 + nginx/pyproject.toml | 1 - nginx/setup.py | 80 ---------- .../changelog.d/18580.removed | 1 + nginx_ingress_controller/pyproject.toml | 1 - nginx_ingress_controller/setup.py | 79 ---------- openldap/changelog.d/18580.removed | 1 + openldap/pyproject.toml | 1 - openldap/setup.py | 78 --------- openmetrics/changelog.d/18580.removed | 1 + openmetrics/pyproject.toml | 1 - openmetrics/setup.py | 79 ---------- openstack/changelog.d/18580.removed | 1 + openstack/pyproject.toml | 1 - openstack/setup.py | 83 ---------- .../changelog.d/18580.removed | 1 + openstack_controller/pyproject.toml | 2 +- pan_firewall/changelog.d/18580.removed | 1 + pan_firewall/pyproject.toml | 1 - pan_firewall/setup.py | 79 ---------- pdh_check/changelog.d/18580.removed | 1 + pdh_check/pyproject.toml | 4 +- pdh_check/setup.py | 82 ---------- pgbouncer/changelog.d/18580.removed | 1 + pgbouncer/pyproject.toml | 3 +- pgbouncer/setup.py | 77 --------- php_fpm/changelog.d/18580.removed | 1 + php_fpm/pyproject.toml | 1 - php_fpm/setup.py | 80 ---------- postfix/changelog.d/18580.removed | 1 + postfix/pyproject.toml | 1 - postfix/setup.py | 80 ---------- postgres/changelog.d/18580.removed | 1 + postgres/pyproject.toml | 13 +- powerdns_recursor/changelog.d/18580.removed | 1 + powerdns_recursor/pyproject.toml | 1 - powerdns_recursor/setup.py | 85 ---------- process/changelog.d/18580.removed | 1 + process/pyproject.toml | 1 - process/setup.py | 80 ---------- prometheus/changelog.d/18580.removed | 1 + prometheus/pyproject.toml | 1 - prometheus/setup.py | 80 ---------- proxysql/changelog.d/18580.removed | 1 + proxysql/pyproject.toml | 4 +- proxysql/setup.py | 79 ---------- rabbitmq/changelog.d/18580.removed | 1 + rabbitmq/pyproject.toml | 1 - rabbitmq/setup.py | 80 ---------- redisdb/changelog.d/18580.removed | 1 + redisdb/pyproject.toml | 4 +- redisdb/setup.py | 77 --------- rethinkdb/changelog.d/18580.removed | 1 + rethinkdb/pyproject.toml | 1 - rethinkdb/setup.py | 79 ---------- riak/changelog.d/18580.removed | 1 + riak/pyproject.toml | 1 - riak/setup.py | 81 ---------- riakcs/changelog.d/18580.removed | 1 + riakcs/pyproject.toml | 3 +- riakcs/setup.py | 80 ---------- sap_hana/changelog.d/18580.removed | 1 + sap_hana/pyproject.toml | 1 - sap_hana/setup.py | 79 ---------- scylla/changelog.d/18580.removed | 1 + scylla/pyproject.toml | 1 - scylla/setup.py | 84 ---------- sidekiq/changelog.d/18580.removed | 1 + sidekiq/pyproject.toml | 1 - sidekiq/setup.py | 79 ---------- silk/changelog.d/18580.removed | 1 + silk/pyproject.toml | 1 - silk/setup.py | 78 --------- singlestore/changelog.d/18580.removed | 1 + singlestore/pyproject.toml | 4 +- singlestore/setup.py | 79 ---------- snmp/changelog.d/18580.removed | 1 + snmp/pyproject.toml | 9 +- snmp/setup.py | 80 ---------- snowflake/changelog.d/18580.removed | 1 + snowflake/pyproject.toml | 2 +- sonarqube/changelog.d/18580.removed | 1 + sonarqube/pyproject.toml | 1 - sonarqube/setup.py | 79 ---------- spark/changelog.d/18580.removed | 1 + spark/pyproject.toml | 4 +- spark/setup.py | 80 ---------- sqlserver/changelog.d/18580.removed | 1 + sqlserver/pyproject.toml | 7 +- squid/changelog.d/18580.removed | 1 + squid/pyproject.toml | 1 - squid/setup.py | 81 ---------- ssh_check/changelog.d/18580.removed | 1 + ssh_check/pyproject.toml | 4 +- ssh_check/setup.py | 80 ---------- statsd/changelog.d/18580.removed | 1 + statsd/pyproject.toml | 1 - statsd/setup.py | 80 ---------- supervisord/changelog.d/18580.removed | 1 + supervisord/pyproject.toml | 1 - supervisord/setup.py | 80 ---------- system_core/changelog.d/18580.removed | 1 + system_core/pyproject.toml | 1 - system_core/setup.py | 80 ---------- system_swap/changelog.d/18580.removed | 1 + system_swap/pyproject.toml | 1 - system_swap/setup.py | 80 ---------- tcp_check/changelog.d/18580.removed | 1 + tcp_check/pyproject.toml | 1 - tcp_check/setup.py | 80 ---------- teamcity/changelog.d/18580.removed | 1 + teamcity/pyproject.toml | 1 - teamcity/setup.py | 76 --------- tenable/changelog.d/18580.removed | 1 + tenable/pyproject.toml | 1 - tenable/setup.py | 79 ---------- teradata/changelog.d/18580.removed | 1 + teradata/pyproject.toml | 1 - tls/changelog.d/18580.removed | 1 + tls/pyproject.toml | 8 +- tls/setup.py | 79 ---------- tokumx/setup.py | 78 --------- tomcat/changelog.d/18580.removed | 1 + tomcat/pyproject.toml | 1 - tomcat/setup.py | 81 ---------- twemproxy/changelog.d/18580.removed | 1 + twemproxy/pyproject.toml | 1 - twemproxy/setup.py | 80 ---------- twistlock/changelog.d/18580.removed | 1 + twistlock/pyproject.toml | 1 - twistlock/setup.py | 79 ---------- varnish/changelog.d/18580.removed | 1 + varnish/pyproject.toml | 1 - varnish/setup.py | 83 ---------- vault/changelog.d/18580.removed | 1 + vault/pyproject.toml | 1 - vault/setup.py | 80 ---------- vertica/changelog.d/18580.removed | 1 + vertica/pyproject.toml | 4 +- vertica/setup.py | 79 ---------- voltdb/changelog.d/18580.removed | 1 + voltdb/pyproject.toml | 1 - voltdb/setup.py | 81 ---------- vsphere/changelog.d/18580.removed | 1 + vsphere/pyproject.toml | 5 +- vsphere/setup.py | 76 --------- weblogic/changelog.d/18580.removed | 1 + weblogic/pyproject.toml | 1 - weblogic/setup.py | 79 ---------- win32_event_log/changelog.d/18580.removed | 1 + win32_event_log/pyproject.toml | 4 +- win32_event_log/setup.py | 80 ---------- windows_service/changelog.d/18580.removed | 1 + windows_service/pyproject.toml | 4 +- windows_service/setup.py | 80 ---------- wmi_check/changelog.d/18580.removed | 1 + wmi_check/pyproject.toml | 4 +- wmi_check/setup.py | 80 ---------- yarn/changelog.d/18580.removed | 1 + yarn/pyproject.toml | 1 - yarn/setup.py | 83 ---------- zk/changelog.d/18580.removed | 1 + zk/pyproject.toml | 1 - zk/setup.py | 80 ---------- 450 files changed, 294 insertions(+), 11703 deletions(-) create mode 100644 active_directory/changelog.d/18580.removed delete mode 100644 active_directory/setup.py create mode 100644 activemq/changelog.d/18580.removed delete mode 100644 activemq/setup.py create mode 100644 activemq_xml/changelog.d/18580.removed delete mode 100644 activemq_xml/setup.py create mode 100644 aerospike/changelog.d/18580.removed delete mode 100644 aerospike/setup.py create mode 100644 amazon_msk/changelog.d/18580.removed delete mode 100644 amazon_msk/setup.py create mode 100644 ambari/changelog.d/18580.removed delete mode 100644 ambari/setup.py create mode 100644 apache/changelog.d/18580.removed delete mode 100644 apache/setup.py create mode 100644 aspdotnet/changelog.d/18580.removed delete mode 100644 aspdotnet/setup.py create mode 100644 azure_iot_edge/changelog.d/18580.removed delete mode 100644 azure_iot_edge/setup.py create mode 100644 btrfs/changelog.d/18580.removed delete mode 100644 btrfs/setup.py create mode 100644 cacti/changelog.d/18580.removed delete mode 100644 cacti/setup.py create mode 100644 calico/changelog.d/18580.removed delete mode 100644 calico/setup.py create mode 100644 cassandra/changelog.d/18580.removed delete mode 100644 cassandra/setup.py create mode 100644 cassandra_nodetool/changelog.d/18580.removed delete mode 100644 cassandra_nodetool/setup.py create mode 100644 ceph/changelog.d/18580.removed delete mode 100644 ceph/setup.py create mode 100644 cilium/changelog.d/18580.removed delete mode 100644 cilium/setup.py create mode 100644 cisco_aci/changelog.d/18580.removed delete mode 100644 cisco_aci/setup.py create mode 100644 citrix_hypervisor/changelog.d/18580.removed delete mode 100644 citrix_hypervisor/setup.py create mode 100644 clickhouse/changelog.d/18580.removed delete mode 100644 clickhouse/setup.py create mode 100644 cloud_foundry_api/changelog.d/18580.removed delete mode 100644 cloud_foundry_api/setup.py create mode 100644 cockroachdb/changelog.d/18580.removed delete mode 100644 cockroachdb/setup.py create mode 100644 confluent_platform/changelog.d/18580.removed delete mode 100644 confluent_platform/setup.py create mode 100644 consul/changelog.d/18580.removed delete mode 100644 consul/setup.py create mode 100644 coredns/changelog.d/18580.removed delete mode 100644 coredns/setup.py create mode 100644 couch/changelog.d/18580.removed delete mode 100644 couch/setup.py create mode 100644 couchbase/changelog.d/18580.removed delete mode 100644 couchbase/setup.py create mode 100644 crio/changelog.d/18580.removed delete mode 100644 crio/setup.py create mode 100644 datadog_checks_base/changelog.d/18580.removed delete mode 100644 datadog_checks_base/setup.py create mode 100644 datadog_checks_dependency_provider/changelog.d/18580.removed delete mode 100644 datadog_checks_dependency_provider/setup.py create mode 100644 datadog_checks_dev/changelog.d/18580.removed delete mode 100644 datadog_checks_dev/setup.py create mode 100644 datadog_checks_downloader/changelog.d/18580.removed create mode 100644 datadog_cluster_agent/changelog.d/18580.removed delete mode 100644 datadog_cluster_agent/setup.py create mode 100644 directory/changelog.d/18580.removed delete mode 100644 directory/setup.py create mode 100644 disk/changelog.d/18580.removed delete mode 100644 disk/setup.py create mode 100644 dns_check/changelog.d/18580.removed delete mode 100644 dns_check/setup.py create mode 100644 dotnetclr/changelog.d/18580.removed delete mode 100644 dotnetclr/setup.py create mode 100644 druid/changelog.d/18580.removed delete mode 100644 druid/setup.py create mode 100644 ecs_fargate/changelog.d/18580.removed delete mode 100644 ecs_fargate/setup.py create mode 100644 eks_fargate/changelog.d/18580.removed delete mode 100644 eks_fargate/setup.py create mode 100644 elastic/changelog.d/18580.removed delete mode 100644 elastic/setup.py create mode 100644 envoy/changelog.d/18580.removed delete mode 100644 envoy/setup.py create mode 100644 esxi/changelog.d/18580.removed create mode 100644 etcd/changelog.d/18580.removed delete mode 100644 etcd/setup.py create mode 100644 exchange_server/changelog.d/18580.removed delete mode 100644 exchange_server/setup.py create mode 100644 external_dns/changelog.d/18580.removed delete mode 100644 external_dns/setup.py create mode 100644 flink/changelog.d/18580.removed delete mode 100644 flink/setup.py create mode 100644 fluentd/changelog.d/18580.removed delete mode 100644 fluentd/setup.py create mode 100644 foundationdb/changelog.d/18580.removed delete mode 100644 foundationdb/setup.py create mode 100644 gearmand/changelog.d/18580.removed delete mode 100644 gearmand/setup.py create mode 100644 gitlab/changelog.d/18580.removed delete mode 100644 gitlab/setup.py create mode 100644 gitlab_runner/changelog.d/18580.removed delete mode 100644 gitlab_runner/setup.py create mode 100644 glusterfs/changelog.d/18580.removed delete mode 100644 glusterfs/setup.py create mode 100644 go_expvar/changelog.d/18580.removed delete mode 100644 go_expvar/setup.py create mode 100644 gunicorn/changelog.d/18580.removed delete mode 100644 gunicorn/setup.py create mode 100644 haproxy/changelog.d/18580.removed delete mode 100644 haproxy/setup.py create mode 100644 harbor/changelog.d/18580.removed delete mode 100644 harbor/setup.py create mode 100644 hazelcast/changelog.d/18580.removed delete mode 100644 hazelcast/setup.py create mode 100644 hdfs_datanode/changelog.d/18580.removed delete mode 100644 hdfs_datanode/setup.py create mode 100644 hdfs_namenode/changelog.d/18580.removed delete mode 100644 hdfs_namenode/setup.py create mode 100644 http_check/changelog.d/18580.removed delete mode 100644 http_check/setup.py create mode 100644 hudi/changelog.d/18580.removed delete mode 100644 hudi/setup.py create mode 100644 hyperv/changelog.d/18580.removed delete mode 100644 hyperv/setup.py create mode 100644 ibm_ace/changelog.d/18580.removed create mode 100644 ibm_db2/changelog.d/18580.removed delete mode 100644 ibm_db2/setup.py create mode 100644 ibm_i/changelog.d/18580.removed create mode 100644 ibm_mq/changelog.d/18580.removed create mode 100644 ibm_was/changelog.d/18580.removed delete mode 100644 ibm_was/setup.py create mode 100644 iis/changelog.d/18580.removed delete mode 100644 iis/setup.py create mode 100644 istio/changelog.d/18580.removed delete mode 100644 istio/setup.py create mode 100644 journald/changelog.d/18580.removed delete mode 100644 journald/setup.py create mode 100644 kafka/changelog.d/18580.removed delete mode 100644 kafka/setup.py create mode 100644 kafka_consumer/changelog.d/18580.removed create mode 100644 kong/changelog.d/18580.removed delete mode 100644 kong/setup.py create mode 100644 kube_apiserver_metrics/changelog.d/18580.removed delete mode 100644 kube_apiserver_metrics/setup.py create mode 100644 kube_controller_manager/changelog.d/18580.removed delete mode 100644 kube_controller_manager/setup.py create mode 100644 kube_dns/changelog.d/18580.removed delete mode 100644 kube_dns/setup.py create mode 100644 kube_metrics_server/changelog.d/18580.removed delete mode 100644 kube_metrics_server/setup.py create mode 100644 kube_proxy/changelog.d/18580.removed delete mode 100644 kube_proxy/setup.py create mode 100644 kube_scheduler/changelog.d/18580.removed delete mode 100644 kube_scheduler/setup.py create mode 100644 kubelet/changelog.d/18580.removed delete mode 100644 kubelet/setup.py create mode 100644 kubernetes_state/changelog.d/18580.removed delete mode 100644 kubernetes_state/setup.py create mode 100644 kyototycoon/changelog.d/18580.removed delete mode 100644 kyototycoon/setup.py create mode 100644 lighttpd/changelog.d/18580.removed delete mode 100644 lighttpd/setup.py create mode 100644 linkerd/changelog.d/18580.removed delete mode 100644 linkerd/setup.py create mode 100644 linux_proc_extras/changelog.d/18580.removed delete mode 100644 linux_proc_extras/setup.py create mode 100644 mapr/changelog.d/18580.removed delete mode 100644 mapr/setup.py create mode 100644 mapreduce/changelog.d/18580.removed delete mode 100644 mapreduce/setup.py create mode 100644 marathon/changelog.d/18580.removed delete mode 100644 marathon/setup.py create mode 100644 marklogic/changelog.d/18580.removed delete mode 100644 marklogic/setup.py create mode 100644 mcache/changelog.d/18580.removed delete mode 100644 mcache/setup.py create mode 100644 mesos_master/changelog.d/18580.removed delete mode 100644 mesos_master/setup.py create mode 100644 mesos_slave/changelog.d/18580.removed delete mode 100644 mesos_slave/setup.py create mode 100644 mongo/changelog.d/18580.removed create mode 100644 mysql/changelog.d/18580.removed create mode 100644 nagios/changelog.d/18580.removed delete mode 100644 nagios/setup.py create mode 100644 network/changelog.d/18580.removed delete mode 100644 network/setup.py create mode 100644 nfsstat/changelog.d/18580.removed delete mode 100644 nfsstat/setup.py create mode 100644 nginx/changelog.d/18580.removed delete mode 100644 nginx/setup.py create mode 100644 nginx_ingress_controller/changelog.d/18580.removed delete mode 100644 nginx_ingress_controller/setup.py create mode 100644 openldap/changelog.d/18580.removed delete mode 100644 openldap/setup.py create mode 100644 openmetrics/changelog.d/18580.removed delete mode 100644 openmetrics/setup.py create mode 100644 openstack/changelog.d/18580.removed delete mode 100644 openstack/setup.py create mode 100644 openstack_controller/changelog.d/18580.removed create mode 100644 pan_firewall/changelog.d/18580.removed delete mode 100644 pan_firewall/setup.py create mode 100644 pdh_check/changelog.d/18580.removed delete mode 100644 pdh_check/setup.py create mode 100644 pgbouncer/changelog.d/18580.removed delete mode 100644 pgbouncer/setup.py create mode 100644 php_fpm/changelog.d/18580.removed delete mode 100644 php_fpm/setup.py create mode 100644 postfix/changelog.d/18580.removed delete mode 100644 postfix/setup.py create mode 100644 postgres/changelog.d/18580.removed create mode 100644 powerdns_recursor/changelog.d/18580.removed delete mode 100644 powerdns_recursor/setup.py create mode 100644 process/changelog.d/18580.removed delete mode 100644 process/setup.py create mode 100644 prometheus/changelog.d/18580.removed delete mode 100644 prometheus/setup.py create mode 100644 proxysql/changelog.d/18580.removed delete mode 100644 proxysql/setup.py create mode 100644 rabbitmq/changelog.d/18580.removed delete mode 100644 rabbitmq/setup.py create mode 100644 redisdb/changelog.d/18580.removed delete mode 100644 redisdb/setup.py create mode 100644 rethinkdb/changelog.d/18580.removed delete mode 100644 rethinkdb/setup.py create mode 100644 riak/changelog.d/18580.removed delete mode 100644 riak/setup.py create mode 100644 riakcs/changelog.d/18580.removed delete mode 100644 riakcs/setup.py create mode 100644 sap_hana/changelog.d/18580.removed delete mode 100644 sap_hana/setup.py create mode 100644 scylla/changelog.d/18580.removed delete mode 100644 scylla/setup.py create mode 100644 sidekiq/changelog.d/18580.removed delete mode 100644 sidekiq/setup.py create mode 100644 silk/changelog.d/18580.removed delete mode 100644 silk/setup.py create mode 100644 singlestore/changelog.d/18580.removed delete mode 100644 singlestore/setup.py create mode 100644 snmp/changelog.d/18580.removed delete mode 100644 snmp/setup.py create mode 100644 snowflake/changelog.d/18580.removed create mode 100644 sonarqube/changelog.d/18580.removed delete mode 100644 sonarqube/setup.py create mode 100644 spark/changelog.d/18580.removed delete mode 100644 spark/setup.py create mode 100644 sqlserver/changelog.d/18580.removed create mode 100644 squid/changelog.d/18580.removed delete mode 100644 squid/setup.py create mode 100644 ssh_check/changelog.d/18580.removed delete mode 100644 ssh_check/setup.py create mode 100644 statsd/changelog.d/18580.removed delete mode 100644 statsd/setup.py create mode 100644 supervisord/changelog.d/18580.removed delete mode 100644 supervisord/setup.py create mode 100644 system_core/changelog.d/18580.removed delete mode 100644 system_core/setup.py create mode 100644 system_swap/changelog.d/18580.removed delete mode 100644 system_swap/setup.py create mode 100644 tcp_check/changelog.d/18580.removed delete mode 100644 tcp_check/setup.py create mode 100644 teamcity/changelog.d/18580.removed delete mode 100644 teamcity/setup.py create mode 100644 tenable/changelog.d/18580.removed delete mode 100644 tenable/setup.py create mode 100644 teradata/changelog.d/18580.removed create mode 100644 tls/changelog.d/18580.removed delete mode 100644 tls/setup.py delete mode 100644 tokumx/setup.py create mode 100644 tomcat/changelog.d/18580.removed delete mode 100644 tomcat/setup.py create mode 100644 twemproxy/changelog.d/18580.removed delete mode 100644 twemproxy/setup.py create mode 100644 twistlock/changelog.d/18580.removed delete mode 100644 twistlock/setup.py create mode 100644 varnish/changelog.d/18580.removed delete mode 100644 varnish/setup.py create mode 100644 vault/changelog.d/18580.removed delete mode 100644 vault/setup.py create mode 100644 vertica/changelog.d/18580.removed delete mode 100644 vertica/setup.py create mode 100644 voltdb/changelog.d/18580.removed delete mode 100644 voltdb/setup.py create mode 100644 vsphere/changelog.d/18580.removed delete mode 100644 vsphere/setup.py create mode 100644 weblogic/changelog.d/18580.removed delete mode 100644 weblogic/setup.py create mode 100644 win32_event_log/changelog.d/18580.removed delete mode 100644 win32_event_log/setup.py create mode 100644 windows_service/changelog.d/18580.removed delete mode 100644 windows_service/setup.py create mode 100644 wmi_check/changelog.d/18580.removed delete mode 100644 wmi_check/setup.py create mode 100644 yarn/changelog.d/18580.removed delete mode 100644 yarn/setup.py create mode 100644 zk/changelog.d/18580.removed delete mode 100644 zk/setup.py diff --git a/.ddev/config.toml b/.ddev/config.toml index d1250705c2b2c..e61e8b7c0a9e8 100644 --- a/.ddev/config.toml +++ b/.ddev/config.toml @@ -105,12 +105,9 @@ simplejson = ['MIT'] supervisor = ['BSD-3-Clause-Modification'] # https://github.com/Cairnarvon/uptime/blob/master/COPYING.txt uptime = ['BSD-2-Clause'] -# https://github.com/hickeroar/win_inet_pton/blob/master/LICENSE -win-inet-pton = ['Unlicense'] [overrides.dependencies.repo] PyYAML = 'https://github.com/yaml/pyyaml' -contextlib2 = 'https://github.com/jazzband/contextlib2' dnspython = 'https://github.com/rthalley/dnspython' foundationdb = 'https://github.com/apple/foundationdb' in-toto = 'https://github.com/in-toto/in-toto' @@ -127,7 +124,6 @@ service-identity = 'https://github.com/pyca/service-identity' snowflake-connector-python = 'https://github.com/snowflakedb/snowflake-connector-python' supervisor = 'https://github.com/Supervisor/supervisor' tuf = 'https://github.com/theupdateframework/python-tuf' -typing = 'https://github.com/python/typing' [overrides.validate.openmetrics] exclude = [ diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index dd696d324e017..0fae915aade09 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -1,5 +1,4 @@ Component,Origin,License,Copyright -PyJWT,PyPI,MIT,Copyright (c) 2015 José Padilla PyJWT,PyPI,MIT,Copyright (c) 2015-2022 José Padilla PyMySQL,PyPI,MIT,"Copyright (c) 2010, 2013 PyMySQL contributors" PySocks,PyPI,BSD-3-Clause,Copyright 2006 Dan-Haim. All rights reserved. @@ -7,38 +6,28 @@ PyYAML,PyPI,MIT,Copyright (c) 2017-2021 Ingy döt Net aerospike,PyPI,Apache-2.0,"Copyright Aerospike, Inc." aws-requests-auth,PyPI,BSD-3-Clause,Copyright (c) David Muller. azure-identity,PyPI,MIT,Copyright (c) Microsoft Corporation. -beautifulsoup4,PyPI,MIT,Copyright (c) 2004-2017 Leonard Richardson beautifulsoup4,PyPI,MIT,Copyright (c) Leonard Richardson binary,PyPI,Apache-2.0,Copyright 2018 Ofek Lev binary,PyPI,MIT,Copyright 2018 Ofek Lev boto3,PyPI,Apache-2.0,"Copyright 2013-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved." -boto3,PyPI,Apache-2.0,Copyright 2014 Amazon Web Services botocore,PyPI,Apache-2.0,"Copyright 2012-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved." -botocore,PyPI,Apache-2.0,Copyright 2012 Amazon Web Services -cachetools,PyPI,MIT,Copyright (c) 2014-2019 Thomas Kemmer cachetools,PyPI,MIT,Copyright (c) 2014-2024 Thomas Kemmer check-postgres,"https://github.com/bucardo/",BSD-2-Clause,Copyright 2007 - 2023 Greg Sabino Mullane clickhouse-cityhash,PyPI,MIT,"Copyright (c) 2011, Alexander Marshalov " clickhouse-driver,PyPI,MIT,Copyright (c) 2017 by Konstantin Lebedev. cm-client,PyPI,Apache-2.0, confluent-kafka,PyPI,Apache-2.0,Copyright (C) 1995-1998 Jean-loup Gailly and Mark Adler -contextlib2,PyPI,PSF,Copyright (c) 2016 Nick Coghlan cryptography,PyPI,Apache-2.0,Copyright (c) Individual contributors. cryptography,PyPI,BSD-3-Clause,Copyright (c) Individual contributors. cryptography,PyPI,PSF,Copyright (c) Individual contributors. -ddtrace,PyPI,BSD-3-Clause,"Copyright (c) 2016, Datadog " ddtrace,PyPI,BSD-3-Clause,"Copyright 2016 Datadog, Inc." dnspython,PyPI,ISC,Copyright (C) Dnspython Contributors -enum34,PyPI,BSD-3-Clause,Copyright Ethan Furman flup,Vendor,BSD-3-Clause,Copyright (c) 2005 Allan Saddi. All Rights Reserved. flup-py3,Vendor,BSD-3-Clause,"Copyright (c) 2005, 2006 Allan Saddi All rights reserved." foundationdb,PyPI,Apache-2.0,Copyright 2017 FoundationDB -futures,PyPI,PSF,Copyright (c) 2015 Brian Quinlan -gearman,PyPI,Apache-2.0,Copyright 2010 Yelp hazelcast-python-client,PyPI,Apache-2.0,"Copyright (c) 2008-2023, Hazelcast, Inc. All Rights Reserved." importlib-metadata,PyPI,Apache-2.0,"Copyright 2017-2019 Jason R. Coombs, Barry Warsaw" in-toto,PyPI,Apache-2.0,Copyright 2018 New York University -ipaddress,PyPI,PSF,Copyright (c) 2013 Philipp Hagemeister jellyfish,PyPI,MIT,Copyright (c) 2015 James Turk kentik-snmp-profiles,"https://github.com/kentik/snmp-profiles",Apache-2.0, kubernetes,PyPI,Apache-2.0,Copyright 2014 The Kubernetes Authors. @@ -46,7 +35,6 @@ ldap3,PyPI,LGPL-3.0-only,Copyright 2013 - 2020 Giovanni Cannata lxml,PyPI,BSD-3-Clause,Copyright (c) 2004 Infrae. All rights reserved. lz4,PyPI,BSD-3-Clause,"Copyright (c) 2012-2013, Steeve Morin" mmh3,PyPI,CC0-1.0,Copyright (c) 2011-2023 Hajime Senuma -mmh3,PyPI,CC0-1.0,Hajime Senuma. mmh3 is dedicated to the public domain under CC0-1.0. oauthlib,PyPI,BSD-3-Clause,Copyright (c) 2019 The OAuthlib Community openstacksdk,PyPI,Apache-2.0,Copyright OpenStack orjson,PyPI,Apache-2.0,Copyright 2018 ijl @@ -55,9 +43,7 @@ packaging,PyPI,Apache-2.0,Copyright (c) Donald Stufft and individual contributor packaging,PyPI,BSD-3-Clause,Copyright (c) Donald Stufft and individual contributors. paramiko,PyPI,LGPL-2.1-only,Copyright (C) 2009 Jeff Forcier ply,PyPI,BSD-3-Clause,Copyright (C) 2001-2018 -prometheus-client,PyPI,Apache-2.0,Copyright 2015 Brian Brazil prometheus-client,PyPI,Apache-2.0,Copyright 2015 The Prometheus Authors -protobuf,PyPI,BSD-3-Clause,Copyright 2008 Google Inc. protobuf,PyPI,BSD-3-Clause,Copyright 2008 Google Inc. All rights reserved. psutil,PyPI,BSD-3-Clause,"Copyright (c) 2009, Jay Loden, Dave Daeschler, Giampaolo Rodola" psycopg2-binary,PyPI,BSD-3-Clause,Copyright 2013 Federico Di Gregorio @@ -71,34 +57,28 @@ pymongo,PyPI,Apache-2.0,Copyright The MongoDB Python Team pymqi,PyPI,PSF,Copyright (c) Zato Source s.r.o. pyodbc,PyPI,MIT,Copyright (c) 2008 Michael Kleehammer pysmi,PyPI,BSD-2-Clause,Copyright (c) 2015-2020 Ilya Etingof -pysmi,PyPI,BSD-3-Clause,Copyright (c) 2015-2019 Ilya Etingof pysmi,PyPI,BSD-3-Clause,Copyright (c) 2015-2020 Ilya Etingof pysnmp,PyPI,BSD-2-Clause,"Copyright (c) 2005-2019, Ilya Etingof " pysnmp,PyPI,BSD-3-Clause,"Copyright (c) 2005-2019, Ilya Etingof " pysnmp-mibs,PyPI,BSD-3-Clause,"Copyright (c) 2005-2016, Ilya Etingof " -python-binary-memcached,PyPI,MIT,Copyright (c) 2011 Jayson Reis python-binary-memcached,PyPI,MIT,Copyright (c) 2011 Jayson Reis python-dateutil,PyPI,Apache-2.0,Copyright 2017- Paul Ganssle python-dateutil,PyPI,BSD-3-Clause,Copyright 2017- Paul Ganssle python3-gearman,PyPI,Apache-2.0, -pyvmomi,PyPI,Apache-2.0,"Copyright (c) 2005-2021 VMware, Inc. All Rights Reserved." pyvmomi,PyPI,Apache-2.0,Copyright (c) 2005-2024 Broadcom. All Rights Reserved. pywin32,PyPI,PSF,Copyright 2002-2003 by Blackdog Software Pty Ltd. redis,PyPI,MIT,"Copyright (c) 2022-2023, Redis, inc." -redis,PyPI,MIT,Copyright (c) 2012 Andy McCurdy requests,PyPI,Apache-2.0,Copyright 2019 Kenneth Reitz requests-kerberos,PyPI,ISC,Copyright (c) 2012 Kenneth Reitz requests-ntlm,PyPI,ISC,Copyright (c) 2013 Ben Toews requests-oauthlib,PyPI,BSD-3-Clause,Copyright (c) 2014 Kenneth Reitz. requests-oauthlib,PyPI,ISC,Copyright (c) 2014 Kenneth Reitz. requests-toolbelt,PyPI,Apache-2.0,"Copyright 2014 Ian Cordasco, Cory Benfield" -requests-unixsocket,PyPI,Apache-2.0,Copyright 2014 Marc Abramowitz requests-unixsocket2,PyPI,ISC,Copyright (c) 2024 - 2024 thelab rethinkdb,PyPI,Apache-2.0,Copyright 2018 RethinkDB. scandir,PyPI,BSD-3-Clause,"Copyright (c) 2012, Ben Hoyt" securesystemslib,PyPI,MIT,Copyright (c) 2016 Santiago Torres semver,PyPI,BSD-3-Clause,"Copyright (c) 2013, Konstantine Rybnikov" -service-identity,PyPI,MIT,Copyright (c) 2014 Hynek Schlawack service-identity,PyPI,MIT,Copyright (c) 2014 Hynek Schlawack and the service-identity contributors simplejson,PyPI,MIT,Copyright (c) 2006 Bob Ippolito six,PyPI,MIT,Copyright (c) 2010-2020 Benjamin Peterson @@ -106,8 +86,6 @@ snowflake-connector-python,PyPI,Apache-2.0,"Copyright (c) 2013-2023 Snowflake Co supervisor,PyPI,BSD-3-Clause-Modification,"Copyright (c) 2002-2005, Daniel Krech, http://eikeon.com/" tuf,PyPI,Apache-2.0,Copyright (c) 2010 New York University tuf,PyPI,MIT,Copyright (c) 2010 New York University -typing,PyPI,PSF,"Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam" uptime,PyPI,BSD-2-Clause,"Copyright (c) 2012, Koen Crolla" vertica-python,PyPI,Apache-2.0,"Copyright 2013 Justin Berka, Alex Kim, Siting Ren" -win-inet-pton,PyPI,Unlicense,Ryan Vennell. win-inet-pton is dedicated to the public domain under Unlicense. wrapt,PyPI,BSD-3-Clause,"Copyright (c) 2013-2023, Graham Dumpleton" diff --git a/active_directory/changelog.d/18580.removed b/active_directory/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/active_directory/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/active_directory/pyproject.toml b/active_directory/pyproject.toml index 4ec2b9fc68f4c..4c890ddf12e26 100644 --- a/active_directory/pyproject.toml +++ b/active_directory/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/active_directory/setup.py b/active_directory/setup.py deleted file mode 100644 index 2ea547128b813..0000000000000 --- a/active_directory/setup.py +++ /dev/null @@ -1,85 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# Always prefer setuptools over distutils -# To use a consistent encoding -from codecs import open -from os import path - -from setuptools import setup - -here = path.abspath(path.dirname(__file__)) - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'active_directory', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-active_directory', - version=ABOUT["__version__"], - description='The Active Directory check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent active directory check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.active_directory'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/activemq/changelog.d/18580.removed b/activemq/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/activemq/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/activemq/pyproject.toml b/activemq/pyproject.toml index 3faa8fad8b300..b42281225f9dc 100644 --- a/activemq/pyproject.toml +++ b/activemq/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/activemq/setup.py b/activemq/setup.py deleted file mode 100644 index 24ba318f628dd..0000000000000 --- a/activemq/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'activemq', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-activemq', - version=ABOUT['__version__'], - description='The ActiveMQ check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent activemq check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.activemq'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/activemq_xml/changelog.d/18580.removed b/activemq_xml/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/activemq_xml/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/activemq_xml/pyproject.toml b/activemq_xml/pyproject.toml index 51543fa3bbde6..6280e20a75d14 100644 --- a/activemq_xml/pyproject.toml +++ b/activemq_xml/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/activemq_xml/setup.py b/activemq_xml/setup.py deleted file mode 100644 index acc4f6ccdf37e..0000000000000 --- a/activemq_xml/setup.py +++ /dev/null @@ -1,82 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "activemq_xml", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-activemq_xml', - version=ABOUT["__version__"], - description='The ActiveMQ XML check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent activemq_xml check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.activemq_xml'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/aerospike/changelog.d/18580.removed b/aerospike/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/aerospike/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/aerospike/pyproject.toml b/aerospike/pyproject.toml index 860201dbab418..ff847c5883619 100644 --- a/aerospike/pyproject.toml +++ b/aerospike/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -40,8 +39,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "aerospike==4.0.0; sys_platform != 'win32' and sys_platform != 'darwin' and python_version < '3.0'", - "aerospike==7.1.1; sys_platform != 'win32' and sys_platform != 'darwin' and python_version > '3.0'", + "aerospike==7.1.1; sys_platform != 'win32' and sys_platform != 'darwin'", ] [project.urls] diff --git a/aerospike/setup.py b/aerospike/setup.py deleted file mode 100644 index 00ce46d661a01..0000000000000 --- a/aerospike/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'aerospike', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-aerospike', - version=ABOUT['__version__'], - description='The Aerospike check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent aerospike check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.aerospike'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/agent_requirements.in b/agent_requirements.in index 722a198af6118..2d667d7ca21ea 100644 --- a/agent_requirements.in +++ b/agent_requirements.in @@ -1,117 +1,74 @@ -aerospike==4.0.0; sys_platform != 'win32' and sys_platform != 'darwin' and python_version < '3.0' -aerospike==7.1.1; sys_platform != 'win32' and sys_platform != 'darwin' and python_version > '3.0' +aerospike==7.1.1; sys_platform != 'win32' and sys_platform != 'darwin' aws-requests-auth==0.4.3 -azure-identity==1.17.1; python_version > '3.0' -beautifulsoup4==4.12.3; python_version > '3.0' -beautifulsoup4==4.9.3; python_version < '3.0' +azure-identity==1.17.1 +beautifulsoup4==4.12.3 binary==1.0.0 -boto3==1.17.112; python_version < '3.0' -boto3==1.35.10; python_version > '3.0' -botocore==1.20.112; python_version < '3.0' -botocore==1.35.10; python_version > '3.0' -cachetools==3.1.1; python_version < '3.0' -cachetools==5.5.0; python_version > '3.0' -clickhouse-cityhash==1.0.2.3; python_version < '3.0' -clickhouse-cityhash==1.0.2.4; python_version > '3.0' -clickhouse-driver==0.2.0; python_version < '3.0' -clickhouse-driver==0.2.9; python_version > '3.0' +boto3==1.35.10 +botocore==1.35.10 +cachetools==5.5.0 +clickhouse-cityhash==1.0.2.4 +clickhouse-driver==0.2.9 cm-client==45.0.4 -confluent-kafka==2.5.0; python_version > '3.0' -contextlib2==0.6.0.post1; python_version < '3.0' -cryptography==3.3.2; python_version < '3.0' -cryptography==43.0.0; python_version > '3.0' -ddtrace==0.32.2; sys_platform == 'win32' and python_version < '3.0' -ddtrace==0.53.2; sys_platform != 'win32' and python_version < '3.0' -ddtrace==2.10.6; python_version > '3.0' -dnspython==1.16.0; python_version < '3.0' -dnspython==2.6.1; python_version > '3.0' -enum34==1.1.10; python_version < '3.0' -foundationdb==6.3.24; python_version > '3.0' -futures==3.4.0; python_version < '3.0' -gearman==2.0.2; sys_platform != 'win32' and python_version < '3.0' -hazelcast-python-client==5.4.0; python_version > '3.0' +confluent-kafka==2.5.0 +cryptography==43.0.0 +ddtrace==2.10.6 +dnspython==2.6.1 +foundationdb==6.3.24 +hazelcast-python-client==5.4.0 importlib-metadata==2.1.3; python_version < '3.8' -in-toto==2.0.0; python_version > '3.0' -ipaddress==1.0.23; python_version < '3.0' -jellyfish==1.1.0; python_version > '3.0' -kubernetes==18.20.0; python_version < '3.0' -kubernetes==30.1.0; python_version > '3.0' +in-toto==2.0.0 +jellyfish==1.1.0 +kubernetes==30.1.0 ldap3==2.9.1 lxml==4.9.4 -lz4==2.2.1; python_version < '3.0' -lz4==4.3.3; python_version > '3.0' -mmh3==2.5.1; python_version < '3.0' -mmh3==4.1.0; python_version > '3.0' -oauthlib==3.1.0; python_version < '3.0' -oauthlib==3.2.2; python_version > '3.0' -openstacksdk==3.3.0; python_version > '3.0' -orjson==3.10.7; python_version > '3.0' -packaging==24.1; python_version > '3.0' -paramiko==2.12.0; python_version < '3.0' -paramiko==3.4.1; python_version > '3.0' +lz4==4.3.3 +mmh3==4.1.0 +oauthlib==3.2.2 +openstacksdk==3.3.0 +orjson==3.10.7 +packaging==24.1 +paramiko==3.4.1 ply==3.11 -prometheus-client==0.12.0; python_version < '3.0' -prometheus-client==0.20.0; python_version > '3.0' -protobuf==3.17.3; python_version < '3.0' -protobuf==5.27.3; python_version > '3.0' +prometheus-client==0.20.0 +protobuf==5.27.3 psutil==5.9.6 -psycopg2-binary==2.9.9; python_version > '3.0' +psycopg2-binary==2.9.9 pyasn1==0.4.8 pycryptodomex==3.20.0 -pydantic==2.8.2; python_version > '3.0' -pyjwt==1.7.1; python_version < '3.0' -pyjwt==2.9.0; python_version > '3.0' +pydantic==2.8.2 +pyjwt==2.9.0 pymongo[srv]==4.8.0; python_version >= '3.9' -pymqi==1.12.10; (sys_platform != 'darwin' or platform_machine != 'arm64') and python_version > '3.0' -pymysql==0.10.1; python_version < '3.0' -pymysql==1.1.1; python_version > '3.0' -pyodbc==5.1.0; (sys_platform != 'darwin' or platform_machine != 'arm64') and python_version > '3.0' -pyopenssl==24.2.1; python_version > '3.0' -pysmi==0.3.4; python_version < '3.0' -pysmi==1.2.1; python_version > '3.0' +pymqi==1.12.10; sys_platform != 'darwin' or platform_machine != 'arm64' +pymysql==1.1.1 +pyodbc==5.1.0; sys_platform != 'darwin' or platform_machine != 'arm64' +pyopenssl==24.2.1 +pysmi==1.2.1 pysnmp-mibs==0.1.6 -pysnmp==4.4.10; python_version < '3.0' -pysnmp==5.1.0; python_version > '3.0' +pysnmp==5.1.0 pysocks==1.7.1 -python-binary-memcached==0.26.1; sys_platform != 'win32' and python_version < '3.0' -python-binary-memcached==0.31.2; sys_platform != 'win32' and python_version > '3.0' +python-binary-memcached==0.31.2; sys_platform != 'win32' python-dateutil==2.9.0.post0 -python3-gearman==0.1.0; sys_platform != 'win32' and python_version > '3.0' -pyvmomi==8.0.0.1; python_version < '3.0' -pyvmomi==8.0.3.0.1; python_version > '3.0' -pywin32==228; sys_platform == 'win32' and python_version < '3.0' -pywin32==306; sys_platform == 'win32' and python_version > '3.0' -pyyaml==5.4.1; python_version < '3.0' -pyyaml==6.0.2; python_version > '3.0' -redis==3.5.3; python_version < '3.0' -redis==5.0.8; python_version > '3.0' -requests-kerberos==0.12.0; python_version < '3.0' -requests-kerberos==0.15.0; python_version > '3.0' -requests-ntlm==1.1.0; python_version < '3.0' -requests-ntlm==1.3.0; python_version > '3.0' -requests-oauthlib==1.3.1; python_version < '3.0' -requests-oauthlib==2.0.0; python_version > '3.0' +python3-gearman==0.1.0; sys_platform != 'win32' +pyvmomi==8.0.3.0.1 +pywin32==306; sys_platform == 'win32' +pyyaml==6.0.2 +redis==5.0.8 +requests-kerberos==0.15.0 +requests-ntlm==1.3.0 +requests-oauthlib==2.0.0 requests-toolbelt==1.0.0 -requests-unixsocket2==0.4.2; python_version > '3.0' -requests-unixsocket==0.3.0; python_version < '3.0' -requests==2.27.1; python_version < '3.0' -requests==2.32.3; python_version > '3.0' +requests-unixsocket2==0.4.2 +requests==2.32.3 rethinkdb==2.4.9 scandir==1.10.0; python_version < '3.5' -securesystemslib[crypto,pynacl]==0.28.0; python_version > '3.0' -semver==2.13.0; python_version < '3.0' -semver==3.0.2; python_version > '3.0' -service-identity[idna]==21.1.0; python_version < '3.0' -service-identity[idna]==24.1.0; python_version > '3.0' +securesystemslib[crypto,pynacl]==0.28.0 +semver==3.0.2 +service-identity[idna]==24.1.0 simplejson==3.19.3 six==1.16.0 -snowflake-connector-python==3.12.1; python_version > '3.0' +snowflake-connector-python==3.12.1 supervisor==4.2.5 -tuf==4.0.0; python_version > '3.0' -typing==3.10.0.0; python_version < '3.0' +tuf==4.0.0 uptime==3.0.1 -vertica-python==1.2.0; python_version < '3.0' -vertica-python==1.4.0; python_version > '3.0' -win-inet-pton==1.1.0; sys_platform == 'win32' and python_version < '3.0' -wrapt==1.15.0; python_version < '3.0' -wrapt==1.16.0; python_version > '3.0' +vertica-python==1.4.0 +wrapt==1.16.0 diff --git a/amazon_msk/changelog.d/18580.removed b/amazon_msk/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/amazon_msk/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/amazon_msk/pyproject.toml b/amazon_msk/pyproject.toml index 81d3972b49142..f47548b11d133 100644 --- a/amazon_msk/pyproject.toml +++ b/amazon_msk/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "boto3==1.17.112; python_version < '3.0'", - "boto3==1.35.10; python_version > '3.0'", + "boto3==1.35.10", ] [project.urls] diff --git a/amazon_msk/setup.py b/amazon_msk/setup.py deleted file mode 100644 index f148cb3d7aec9..0000000000000 --- a/amazon_msk/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'amazon_msk', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-amazon-msk', - version=ABOUT['__version__'], - description='The Amazon MSK check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent amazon_msk check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.amazon_msk'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/ambari/changelog.d/18580.removed b/ambari/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ambari/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ambari/pyproject.toml b/ambari/pyproject.toml index 7f41c77326c99..77806c16a9596 100644 --- a/ambari/pyproject.toml +++ b/ambari/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/ambari/setup.py b/ambari/setup.py deleted file mode 100644 index 37f027cf7b3b7..0000000000000 --- a/ambari/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'ambari', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-ambari', - version=ABOUT['__version__'], - description='The Ambari check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent ambari check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.ambari'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/apache/changelog.d/18580.removed b/apache/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/apache/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/apache/pyproject.toml b/apache/pyproject.toml index 01d91d39381d3..977063e134e8c 100644 --- a/apache/pyproject.toml +++ b/apache/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/apache/setup.py b/apache/setup.py deleted file mode 100644 index 99e625e0bbd19..0000000000000 --- a/apache/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def read(*parts): - with open(path.join(HERE, *parts), 'r') as fp: - return fp.read() - - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "apache", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-apache', - version=ABOUT["__version__"], - description='The Apache Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent apache check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.apache'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/aspdotnet/changelog.d/18580.removed b/aspdotnet/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/aspdotnet/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/aspdotnet/pyproject.toml b/aspdotnet/pyproject.toml index 898d12f8f3218..ec447d479c201 100644 --- a/aspdotnet/pyproject.toml +++ b/aspdotnet/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/aspdotnet/setup.py b/aspdotnet/setup.py deleted file mode 100644 index 049af0318ba70..0000000000000 --- a/aspdotnet/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'aspdotnet', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-aspdotnet', - version=ABOUT["__version__"], - description='The ASP .NET check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent aspdotnet check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.aspdotnet'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/azure_iot_edge/changelog.d/18580.removed b/azure_iot_edge/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/azure_iot_edge/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/azure_iot_edge/pyproject.toml b/azure_iot_edge/pyproject.toml index b8d5245a15bc9..bf2283ba5c6bf 100644 --- a/azure_iot_edge/pyproject.toml +++ b/azure_iot_edge/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/azure_iot_edge/setup.py b/azure_iot_edge/setup.py deleted file mode 100644 index fae140c5524ee..0000000000000 --- a/azure_iot_edge/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'azure_iot_edge', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-azure_iot_edge', - version=ABOUT['__version__'], - description='The Azure IoT Edge check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent azure_iot_edge check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.azure_iot_edge'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/btrfs/changelog.d/18580.removed b/btrfs/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/btrfs/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/btrfs/pyproject.toml b/btrfs/pyproject.toml index d3ea981217a9b..69ac8f8563694 100644 --- a/btrfs/pyproject.toml +++ b/btrfs/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/btrfs/setup.py b/btrfs/setup.py deleted file mode 100644 index 487d6d25ab411..0000000000000 --- a/btrfs/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "btrfs", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-btrfs', - version=ABOUT["__version__"], - description='The Btrfs check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent btrfs check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.btrfs'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cacti/changelog.d/18580.removed b/cacti/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cacti/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cacti/pyproject.toml b/cacti/pyproject.toml index 0ecb46fc1c53a..081135d2a36f7 100644 --- a/cacti/pyproject.toml +++ b/cacti/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pymysql==0.10.1; python_version < '3.0'", - "pymysql==1.1.1; python_version > '3.0'", + "pymysql==1.1.1", ] [project.urls] diff --git a/cacti/setup.py b/cacti/setup.py deleted file mode 100644 index c9060a4c4287b..0000000000000 --- a/cacti/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'cacti', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-cacti', - version=ABOUT['__version__'], - description='The Cacti check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent cacti check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.cacti'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/calico/changelog.d/18580.removed b/calico/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/calico/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/calico/pyproject.toml b/calico/pyproject.toml index 445d97a29a962..56abfd47d08f3 100644 --- a/calico/pyproject.toml +++ b/calico/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/calico/setup.py b/calico/setup.py deleted file mode 100644 index a78fbe9582e35..0000000000000 --- a/calico/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2022-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'calico', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-calico', - version=ABOUT['__version__'], - description='The calico check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent calico check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Max', - author_email='max@galadrim.fr', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.calico'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cassandra/changelog.d/18580.removed b/cassandra/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cassandra/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cassandra/pyproject.toml b/cassandra/pyproject.toml index 730b72c08629a..b247564c5d336 100644 --- a/cassandra/pyproject.toml +++ b/cassandra/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/cassandra/setup.py b/cassandra/setup.py deleted file mode 100644 index 09b5c2cef51bf..0000000000000 --- a/cassandra/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'cassandra', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-cassandra', - version=ABOUT['__version__'], - description='The Cassandra check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent cassandra check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.cassandra'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cassandra_nodetool/changelog.d/18580.removed b/cassandra_nodetool/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cassandra_nodetool/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cassandra_nodetool/pyproject.toml b/cassandra_nodetool/pyproject.toml index 828a470d296f4..10d14859d3313 100644 --- a/cassandra_nodetool/pyproject.toml +++ b/cassandra_nodetool/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/cassandra_nodetool/setup.py b/cassandra_nodetool/setup.py deleted file mode 100644 index e42ed2a7bf813..0000000000000 --- a/cassandra_nodetool/setup.py +++ /dev/null @@ -1,82 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'cassandra_nodetool', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-cassandra_nodetool', - version=ABOUT['__version__'], - description='The Cassandra Nodetool check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent cassandra_nodetool check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.cassandra_nodetool'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/ceph/changelog.d/18580.removed b/ceph/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ceph/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ceph/pyproject.toml b/ceph/pyproject.toml index 5471f87e75a9e..ecfde2326b5d4 100644 --- a/ceph/pyproject.toml +++ b/ceph/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/ceph/setup.py b/ceph/setup.py deleted file mode 100644 index 940c4d9bf6849..0000000000000 --- a/ceph/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "ceph", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-ceph', - version=ABOUT["__version__"], - description='The Ceph check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent ceph check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.ceph'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cilium/changelog.d/18580.removed b/cilium/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cilium/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cilium/pyproject.toml b/cilium/pyproject.toml index a7006609b026a..b8a31cc3cc53b 100644 --- a/cilium/pyproject.toml +++ b/cilium/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/cilium/setup.py b/cilium/setup.py deleted file mode 100644 index 0e114f36f8882..0000000000000 --- a/cilium/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'cilium', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-cilium', - version=ABOUT['__version__'], - description='The Cilium check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent cilium check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.cilium'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cisco_aci/changelog.d/18580.removed b/cisco_aci/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cisco_aci/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cisco_aci/pyproject.toml b/cisco_aci/pyproject.toml index a9e10c72da631..703e39f823cf4 100644 --- a/cisco_aci/pyproject.toml +++ b/cisco_aci/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "cryptography==3.3.2; python_version < '3.0'", - "cryptography==43.0.0; python_version > '3.0'", + "cryptography==43.0.0", ] [project.urls] diff --git a/cisco_aci/setup.py b/cisco_aci/setup.py deleted file mode 100644 index bb0d752a9b8f9..0000000000000 --- a/cisco_aci/setup.py +++ /dev/null @@ -1,76 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "cisco_aci", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-cisco_aci', - version=ABOUT["__version__"], - description='The Cisco ACI check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent Cisco ACI check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.cisco_aci'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/citrix_hypervisor/changelog.d/18580.removed b/citrix_hypervisor/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/citrix_hypervisor/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/citrix_hypervisor/pyproject.toml b/citrix_hypervisor/pyproject.toml index 0fa032091d111..b1efb277e4c16 100644 --- a/citrix_hypervisor/pyproject.toml +++ b/citrix_hypervisor/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/citrix_hypervisor/setup.py b/citrix_hypervisor/setup.py deleted file mode 100644 index 2f79e6c5c0e4c..0000000000000 --- a/citrix_hypervisor/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'citrix_hypervisor', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-citrix_hypervisor', - version=ABOUT['__version__'], - description='The citrix_hypervisor check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent citrix_hypervisor check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.citrix_hypervisor'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/clickhouse/changelog.d/18580.removed b/clickhouse/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/clickhouse/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/clickhouse/pyproject.toml b/clickhouse/pyproject.toml index c9095e406a976..1f16c0f0e5523 100644 --- a/clickhouse/pyproject.toml +++ b/clickhouse/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,12 +38,9 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "clickhouse-cityhash==1.0.2.3; python_version < '3.0'", - "clickhouse-cityhash==1.0.2.4; python_version > '3.0'", - "clickhouse-driver==0.2.0; python_version < '3.0'", - "clickhouse-driver==0.2.9; python_version > '3.0'", - "lz4==2.2.1; python_version < '3.0'", - "lz4==4.3.3; python_version > '3.0'", + "clickhouse-cityhash==1.0.2.4", + "clickhouse-driver==0.2.9", + "lz4==4.3.3", ] [project.urls] diff --git a/clickhouse/setup.py b/clickhouse/setup.py deleted file mode 100644 index 1488cdd47ab38..0000000000000 --- a/clickhouse/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'clickhouse', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-clickhouse', - version=ABOUT['__version__'], - description='The ClickHouse check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent clickhouse check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.clickhouse'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cloud_foundry_api/changelog.d/18580.removed b/cloud_foundry_api/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cloud_foundry_api/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cloud_foundry_api/pyproject.toml b/cloud_foundry_api/pyproject.toml index 6bf6c9c01d54b..9b7921cbfd05e 100644 --- a/cloud_foundry_api/pyproject.toml +++ b/cloud_foundry_api/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "semver==2.13.0; python_version < '3.0'", - "semver==3.0.2; python_version > '3.0'", + "semver==3.0.2", ] [project.urls] diff --git a/cloud_foundry_api/setup.py b/cloud_foundry_api/setup.py deleted file mode 100644 index 4919aeabc2492..0000000000000 --- a/cloud_foundry_api/setup.py +++ /dev/null @@ -1,69 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'cloud_foundry_api', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-cloud_foundry_api', - version=ABOUT['__version__'], - description='The Cloud Foundry API check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent cloud_foundry_api check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.cloud_foundry_api'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/cockroachdb/changelog.d/18580.removed b/cockroachdb/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/cockroachdb/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/cockroachdb/pyproject.toml b/cockroachdb/pyproject.toml index cea008dc05b86..f13e06a527fa9 100644 --- a/cockroachdb/pyproject.toml +++ b/cockroachdb/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/cockroachdb/setup.py b/cockroachdb/setup.py deleted file mode 100644 index 1e5538e942b87..0000000000000 --- a/cockroachdb/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'cockroachdb', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-cockroachdb', - version=ABOUT['__version__'], - description='The CockroachDB check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent cockroachdb check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.cockroachdb'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/confluent_platform/changelog.d/18580.removed b/confluent_platform/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/confluent_platform/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/confluent_platform/pyproject.toml b/confluent_platform/pyproject.toml index 510a46340b966..b3b5252141166 100644 --- a/confluent_platform/pyproject.toml +++ b/confluent_platform/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/confluent_platform/setup.py b/confluent_platform/setup.py deleted file mode 100644 index c0455d8ea6f2a..0000000000000 --- a/confluent_platform/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'confluent_platform', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-confluent_platform', - version=ABOUT['__version__'], - description='The Confluent Platform check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent confluent_platform check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.confluent_platform'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/consul/changelog.d/18580.removed b/consul/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/consul/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/consul/pyproject.toml b/consul/pyproject.toml index 74f978bacf163..55cf34f8ebb11 100644 --- a/consul/pyproject.toml +++ b/consul/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/consul/setup.py b/consul/setup.py deleted file mode 100644 index f68be9a3fbad1..0000000000000 --- a/consul/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "consul", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-consul', - version=ABOUT['__version__'], - description='The Consul Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent consul check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.consul'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/coredns/changelog.d/18580.removed b/coredns/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/coredns/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/coredns/pyproject.toml b/coredns/pyproject.toml index 7abd8cf1d1ce5..ceec99e2da61b 100644 --- a/coredns/pyproject.toml +++ b/coredns/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/coredns/setup.py b/coredns/setup.py deleted file mode 100644 index 05a7671dc396f..0000000000000 --- a/coredns/setup.py +++ /dev/null @@ -1,78 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "coredns", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-coredns', - version=ABOUT["__version__"], - description='CoreDNS collects DNS metrics in Kubernetes.', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # The package we're going to ship - packages=['datadog_checks.coredns'], - include_package_data=True, -) diff --git a/couch/changelog.d/18580.removed b/couch/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/couch/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/couch/pyproject.toml b/couch/pyproject.toml index c1646a577184f..993a987d05f7e 100644 --- a/couch/pyproject.toml +++ b/couch/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/couch/setup.py b/couch/setup.py deleted file mode 100644 index 8665cb8287966..0000000000000 --- a/couch/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "couch", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-couch', - version=ABOUT["__version__"], - description='The CouchDB check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent couch check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.couch'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/couchbase/changelog.d/18580.removed b/couchbase/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/couchbase/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/couchbase/pyproject.toml b/couchbase/pyproject.toml index 2b233099b8ed4..9b3d7c5557ff7 100644 --- a/couchbase/pyproject.toml +++ b/couchbase/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/couchbase/setup.py b/couchbase/setup.py deleted file mode 100644 index 1d82606d6c2e0..0000000000000 --- a/couchbase/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "couchbase", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-couchbase', - version=ABOUT['__version__'], - description='The Couchbase check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent couchbase check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.couchbase'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/crio/changelog.d/18580.removed b/crio/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/crio/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/crio/pyproject.toml b/crio/pyproject.toml index 564380350ff42..6a1fc4966154a 100644 --- a/crio/pyproject.toml +++ b/crio/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/crio/setup.py b/crio/setup.py deleted file mode 100644 index e203abf365993..0000000000000 --- a/crio/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'crio', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-crio', - version=ABOUT['__version__'], - description='The Crio check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent crio check container runtime kubernetes', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.crio'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/datadog_checks_base/changelog.d/18580.removed b/datadog_checks_base/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/datadog_checks_base/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/datadog_checks_base/pyproject.toml b/datadog_checks_base/pyproject.toml index 8ba058fa77203..6134ffd9228ef 100644 --- a/datadog_checks_base/pyproject.toml +++ b/datadog_checks_base/pyproject.toml @@ -23,7 +23,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", ] @@ -34,71 +33,46 @@ license = "BSD-3-Clause" [project.optional-dependencies] db = [ - "mmh3==2.5.1; python_version < '3.0'", - "mmh3==4.1.0; python_version > '3.0'", + "mmh3==4.1.0", ] deps = [ "binary==1.0.0", - "cachetools==3.1.1; python_version < '3.0'", - "cachetools==5.5.0; python_version > '3.0'", - "contextlib2==0.6.0.post1; python_version < '3.0'", - "cryptography==3.3.2; python_version < '3.0'", - "cryptography==43.0.0; python_version > '3.0'", - "ddtrace==0.32.2; sys_platform == 'win32' and python_version < '3.0'", - "ddtrace==0.53.2; sys_platform != 'win32' and python_version < '3.0'", - "ddtrace==2.10.6; python_version > '3.0'", - "enum34==1.1.10; python_version < '3.0'", + "cachetools==5.5.0", + "cryptography==43.0.0", + "ddtrace==2.10.6", "importlib-metadata==2.1.3; python_version < '3.8'", - "ipaddress==1.0.23; python_version < '3.0'", - "jellyfish==1.1.0; python_version > '3.0'", - "prometheus-client==0.12.0; python_version < '3.0'", - "prometheus-client==0.20.0; python_version > '3.0'", - "protobuf==3.17.3; python_version < '3.0'", - "protobuf==5.27.3; python_version > '3.0'", - "pydantic==2.8.2; python_version > '3.0'", + "jellyfish==1.1.0", + "prometheus-client==0.20.0", + "protobuf==5.27.3", + "pydantic==2.8.2", "python-dateutil==2.9.0.post0", - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", - "pyyaml==5.4.1; python_version < '3.0'", - "pyyaml==6.0.2; python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", + "pyyaml==6.0.2", "requests-toolbelt==1.0.0", - "requests-unixsocket2==0.4.2; python_version > '3.0'", - "requests-unixsocket==0.3.0; python_version < '3.0'", - "requests==2.27.1; python_version < '3.0'", - "requests==2.32.3; python_version > '3.0'", + "requests-unixsocket2==0.4.2", + "requests==2.32.3", "simplejson==3.19.3", "six==1.16.0", - "typing==3.10.0.0; python_version < '3.0'", "uptime==3.0.1", - "wrapt==1.15.0; python_version < '3.0'", - "wrapt==1.16.0; python_version > '3.0'", + "wrapt==1.16.0", ] http = [ "aws-requests-auth==0.4.3", - "botocore==1.20.112; python_version < '3.0'", - "botocore==1.35.10; python_version > '3.0'", - "oauthlib==3.1.0; python_version < '3.0'", - "oauthlib==3.2.2; python_version > '3.0'", - "pyjwt==1.7.1; python_version < '3.0'", - "pyjwt==2.9.0; python_version > '3.0'", - "pyopenssl==24.2.1; python_version > '3.0'", + "botocore==1.35.10", + "oauthlib==3.2.2", + "pyjwt==2.9.0", + "pyopenssl==24.2.1", "pysocks==1.7.1", - "requests-kerberos==0.12.0; python_version < '3.0'", - "requests-kerberos==0.15.0; python_version > '3.0'", - "requests-ntlm==1.1.0; python_version < '3.0'", - "requests-ntlm==1.3.0; python_version > '3.0'", - "requests-oauthlib==1.3.1; python_version < '3.0'", - "requests-oauthlib==2.0.0; python_version > '3.0'", - "win-inet-pton==1.1.0; sys_platform == 'win32' and python_version < '3.0'", + "requests-kerberos==0.15.0", + "requests-ntlm==1.3.0", + "requests-oauthlib==2.0.0", ] json = [ - "orjson==3.10.7; python_version > '3.0'", + "orjson==3.10.7", ] kube = [ - "kubernetes==18.20.0; python_version < '3.0'", - "kubernetes==30.1.0; python_version > '3.0'", - "requests-oauthlib==1.3.1; python_version < '3.0'", - "requests-oauthlib==2.0.0; python_version > '3.0'", + "kubernetes==30.1.0", + "requests-oauthlib==2.0.0", ] [project.urls] diff --git a/datadog_checks_base/setup.py b/datadog_checks_base/setup.py deleted file mode 100644 index 424c8b06abf87..0000000000000 --- a/datadog_checks_base/setup.py +++ /dev/null @@ -1,85 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "base", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -LONG_DESC = "" -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - LONG_DESC = f.read() - - -def get_requirements(fpath, exclude=None, only=None): - if exclude is None: - exclude = [] - if only is None: - only = [] - - with open(path.join(HERE, fpath), encoding='utf-8') as f: - requirements = [] - for line in f: - name = line.split("==")[0] - if only: - if name in only: - requirements.append(line.rstrip()) - else: - if name not in exclude: - requirements.append(line.rstrip()) - return requirements - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.+?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -setup( - # Version should always match one from an agent release - version=ABOUT["__version__"], - name='datadog-checks-base', - description='The Datadog Check Toolkit', - long_description=LONG_DESC, - long_description_content_type='text/markdown', - keywords='datadog agent checks', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks'], - include_package_data=True, - extras_require={ - 'deps': parse_pyproject_array('deps'), - 'db': parse_pyproject_array('db'), - 'http': parse_pyproject_array('http'), - 'json': parse_pyproject_array('json'), - 'kube': parse_pyproject_array('kube'), - }, -) diff --git a/datadog_checks_dependency_provider/changelog.d/18580.removed b/datadog_checks_dependency_provider/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/datadog_checks_dependency_provider/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/datadog_checks_dependency_provider/pyproject.toml b/datadog_checks_dependency_provider/pyproject.toml index 54cda63035ebd..e2c1264517a6a 100644 --- a/datadog_checks_dependency_provider/pyproject.toml +++ b/datadog_checks_dependency_provider/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: System Administrators", "Topic :: System :: Monitoring", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", ] dependencies = [ diff --git a/datadog_checks_dependency_provider/setup.py b/datadog_checks_dependency_provider/setup.py deleted file mode 100644 index a70b64ca6d917..0000000000000 --- a/datadog_checks_dependency_provider/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'datadog_checks_dependency_provider', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-datadog_checks_dependency_provider', - version=ABOUT['__version__'], - description='The datadog_checks_dependency_provider check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent datadog_checks_dependency_provider check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.datadog_checks_dependency_provider'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/datadog_checks_dev/changelog.d/18580.removed b/datadog_checks_dev/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/datadog_checks_dev/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/datadog_checks_dev/pyproject.toml b/datadog_checks_dev/pyproject.toml index 66a79ce6bf642..9c4b6796cb238 100644 --- a/datadog_checks_dev/pyproject.toml +++ b/datadog_checks_dev/pyproject.toml @@ -28,30 +28,22 @@ classifiers = [ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", ] dependencies = [ - "contextlib2; python_version < '3.0'", "coverage>=5.0.3", - "flaky; python_version < '3.0'", - "flaky>=3.8.0; python_version > '3.0'", + "flaky>=3.8.0", "mock", "psutil", - "py>=1.8.2; python_version < '3.0'", # https://github.com/ionelmc/pytest-benchmark/issues/226 - "pytest==4.6.11; python_version < '3.0'", - "pytest==8.1.1; python_version > '3.0'", - "pytest-asyncio>=0.23.4; python_version > '3.0'", - "pytest-benchmark[histogram]<4.0.0; python_version < '3.0'", - "pytest-benchmark[histogram]>=4.0.0; python_version > '3.0'", + "pytest==8.1.1", + "pytest-asyncio>=0.23.4", + "pytest-benchmark[histogram]>=4.0.0", "pytest-cov>=2.6.1", - "pytest-memray>=1.4.0; python_version > '3.0' and (platform_system=='Linux' or platform_system=='Darwin')", + "pytest-memray>=1.4.0; platform_system=='Linux' or platform_system=='Darwin'", "pytest-mock", "pyyaml>=5.4.1", "requests>=2.22.0", - "shutilwhich==1.1.0; python_version < '3.0'", "six", - "subprocess32==3.5.4; python_version < '3.0'", "tenacity", ] dynamic = [ diff --git a/datadog_checks_dev/setup.py b/datadog_checks_dev/setup.py deleted file mode 100644 index 4bc7b1cfe6007..0000000000000 --- a/datadog_checks_dev/setup.py +++ /dev/null @@ -1,72 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from io import open -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -with open(path.join(HERE, 'datadog_checks', 'dev', '__about__.py'), 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line.startswith('__version__'): - VERSION = line.split('=')[1].strip(' \'"') - break - else: - VERSION = '0.0.1' - -with open(path.join(HERE, 'README.md'), 'r', encoding='utf-8') as f: - README = f.read() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.+?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -setup( - name='datadog_checks_dev', - version=VERSION, - description='The Datadog Checks Developer Tools', - long_description=README, - long_description_content_type='text/markdown', - keywords='datadog agent checks dev tools tests', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', - ], - packages=['datadog_checks', 'datadog_checks.dev'], - install_requires=parse_pyproject_array('dependencies'), - # TODO: Uncomment when we fully drop Python 2 - # python_requires='>=3.7', - include_package_data=True, - extras_require={'cli': parse_pyproject_array('cli')}, - entry_points={ - 'pytest11': ['datadog_checks = datadog_checks.dev.plugin.pytest'], - 'console_scripts': ['ddev = datadog_checks.dev.tooling.cli:ddev'], - }, -) diff --git a/datadog_checks_downloader/changelog.d/18580.removed b/datadog_checks_downloader/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/datadog_checks_downloader/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/datadog_checks_downloader/pyproject.toml b/datadog_checks_downloader/pyproject.toml index ed4ec5c6579c7..89ad5c54ec84d 100644 --- a/datadog_checks_downloader/pyproject.toml +++ b/datadog_checks_downloader/pyproject.toml @@ -35,10 +35,10 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "in-toto==2.0.0; python_version > '3.0'", - "packaging==24.1; python_version > '3.0'", - "securesystemslib[crypto,pynacl]==0.28.0; python_version > '3.0'", - "tuf==4.0.0; python_version > '3.0'", + "in-toto==2.0.0", + "packaging==24.1", + "securesystemslib[crypto,pynacl]==0.28.0", + "tuf==4.0.0", ] [project.urls] diff --git a/datadog_cluster_agent/changelog.d/18580.removed b/datadog_cluster_agent/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/datadog_cluster_agent/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/datadog_cluster_agent/pyproject.toml b/datadog_cluster_agent/pyproject.toml index 12737d08ad66a..a26faebb5773b 100644 --- a/datadog_cluster_agent/pyproject.toml +++ b/datadog_cluster_agent/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/datadog_cluster_agent/setup.py b/datadog_cluster_agent/setup.py deleted file mode 100644 index dbbe975bd0884..0000000000000 --- a/datadog_cluster_agent/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'datadog_cluster_agent', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-datadog_cluster_agent', - version=ABOUT['__version__'], - description='The Datadog-Cluster-Agent check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent datadog_cluster_agent check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.datadog_cluster_agent'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/directory/changelog.d/18580.removed b/directory/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/directory/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/directory/pyproject.toml b/directory/pyproject.toml index 121560d1e3aad..d86492021fe18 100644 --- a/directory/pyproject.toml +++ b/directory/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/directory/setup.py b/directory/setup.py deleted file mode 100644 index 820dceee090ff..0000000000000 --- a/directory/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "directory", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-directory', - version=ABOUT["__version__"], - description='The Directory check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent directory check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='MIT', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.directory'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/disk/changelog.d/18580.removed b/disk/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/disk/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/disk/pyproject.toml b/disk/pyproject.toml index 7ce713fea8236..638bae2a48b34 100644 --- a/disk/pyproject.toml +++ b/disk/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/disk/setup.py b/disk/setup.py deleted file mode 100644 index 91f5b886427e1..0000000000000 --- a/disk/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "disk", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-disk', - version=ABOUT["__version__"], - description='The Disk check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent disk check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='MIT', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.disk'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/dns_check/changelog.d/18580.removed b/dns_check/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/dns_check/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/dns_check/pyproject.toml b/dns_check/pyproject.toml index b912c585aa967..f9c7f80ae6ca4 100644 --- a/dns_check/pyproject.toml +++ b/dns_check/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "dnspython==1.16.0; python_version < '3.0'", - "dnspython==2.6.1; python_version > '3.0'", + "dnspython==2.6.1", ] [project.urls] diff --git a/dns_check/setup.py b/dns_check/setup.py deleted file mode 100644 index 5e307b3f00add..0000000000000 --- a/dns_check/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "dns_check", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-dns_check', - version=ABOUT['__version__'], - description='The DNS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent dns_check check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.dns_check'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/dotnetclr/changelog.d/18580.removed b/dotnetclr/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/dotnetclr/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/dotnetclr/pyproject.toml b/dotnetclr/pyproject.toml index be607fcd9ffa6..72a9a1f23eeb6 100644 --- a/dotnetclr/pyproject.toml +++ b/dotnetclr/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/dotnetclr/setup.py b/dotnetclr/setup.py deleted file mode 100644 index ac9151c26e304..0000000000000 --- a/dotnetclr/setup.py +++ /dev/null @@ -1,82 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'dotnetclr', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-dotnetclr', - version=ABOUT["__version__"], - description='The .NET CLR check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent .NET CLR check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.dotnetclr'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/druid/changelog.d/18580.removed b/druid/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/druid/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/druid/pyproject.toml b/druid/pyproject.toml index 860877af7ebe4..5748ccdea4708 100644 --- a/druid/pyproject.toml +++ b/druid/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/druid/setup.py b/druid/setup.py deleted file mode 100644 index cc4563f123e5b..0000000000000 --- a/druid/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'druid', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-druid', - version=ABOUT['__version__'], - description='The druid check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent druid check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.druid'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/ecs_fargate/changelog.d/18580.removed b/ecs_fargate/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ecs_fargate/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ecs_fargate/pyproject.toml b/ecs_fargate/pyproject.toml index b9e64f7923d68..ee4dcf77128ce 100644 --- a/ecs_fargate/pyproject.toml +++ b/ecs_fargate/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/ecs_fargate/setup.py b/ecs_fargate/setup.py deleted file mode 100644 index 0e43e910b8bde..0000000000000 --- a/ecs_fargate/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'ecs_fargate', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-ecs_fargate', - version=ABOUT['__version__'], - description='The ECS Fargate check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent ecs_fargate check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.ecs_fargate'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/eks_fargate/changelog.d/18580.removed b/eks_fargate/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/eks_fargate/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/eks_fargate/pyproject.toml b/eks_fargate/pyproject.toml index f29e3469504b4..c0d369b029518 100644 --- a/eks_fargate/pyproject.toml +++ b/eks_fargate/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/eks_fargate/setup.py b/eks_fargate/setup.py deleted file mode 100644 index 57c62bca7fb59..0000000000000 --- a/eks_fargate/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020 -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'eks_fargate', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-eks_fargate', - version=ABOUT['__version__'], - description='The eks_fargate check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent eks_fargate check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.eks_fargate'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/elastic/changelog.d/18580.removed b/elastic/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/elastic/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/elastic/pyproject.toml b/elastic/pyproject.toml index b42a575884d10..b81be0f851e5a 100644 --- a/elastic/pyproject.toml +++ b/elastic/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/elastic/setup.py b/elastic/setup.py deleted file mode 100644 index 6b243c8985b4d..0000000000000 --- a/elastic/setup.py +++ /dev/null @@ -1,82 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "elastic", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-elastic', - version=ABOUT["__version__"], - description='The Elastic Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent elastic check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.elastic'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/envoy/changelog.d/18580.removed b/envoy/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/envoy/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/envoy/pyproject.toml b/envoy/pyproject.toml index 51fb995e5060c..cab901eee95b1 100644 --- a/envoy/pyproject.toml +++ b/envoy/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/envoy/setup.py b/envoy/setup.py deleted file mode 100644 index 410b81275c667..0000000000000 --- a/envoy/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'envoy', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-envoy', - version=ABOUT['__version__'], - description='The Envoy check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent envoy check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.envoy'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/esxi/changelog.d/18580.removed b/esxi/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/esxi/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/esxi/pyproject.toml b/esxi/pyproject.toml index c2b91effe4c3a..ce9b56b83225d 100644 --- a/esxi/pyproject.toml +++ b/esxi/pyproject.toml @@ -38,7 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ "pysocks==1.7.1", - "pyvmomi==8.0.3.0.1; python_version > '3.0'", + "pyvmomi==8.0.3.0.1", ] [project.urls] diff --git a/etcd/changelog.d/18580.removed b/etcd/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/etcd/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/etcd/pyproject.toml b/etcd/pyproject.toml index 48a8514971ec3..09024a3f45830 100644 --- a/etcd/pyproject.toml +++ b/etcd/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/etcd/setup.py b/etcd/setup.py deleted file mode 100644 index f5eedbb2d9b40..0000000000000 --- a/etcd/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'etcd', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-etcd', - version=ABOUT['__version__'], - description='The Etcd check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent etcd check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.etcd'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/exchange_server/changelog.d/18580.removed b/exchange_server/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/exchange_server/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/exchange_server/pyproject.toml b/exchange_server/pyproject.toml index 920a00a193e5b..d48128619c891 100644 --- a/exchange_server/pyproject.toml +++ b/exchange_server/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/exchange_server/setup.py b/exchange_server/setup.py deleted file mode 100644 index e35042819e96c..0000000000000 --- a/exchange_server/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'exchange_server', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-exchange_server', - version=ABOUT["__version__"], - description='The MS Exchange check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent exchange check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.exchange_server'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/external_dns/changelog.d/18580.removed b/external_dns/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/external_dns/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/external_dns/pyproject.toml b/external_dns/pyproject.toml index 4fe6ee4dcaae9..6e3e4e253cae1 100644 --- a/external_dns/pyproject.toml +++ b/external_dns/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/external_dns/setup.py b/external_dns/setup.py deleted file mode 100644 index 1b01b68182bcf..0000000000000 --- a/external_dns/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'external_dns', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-external_dns', - version=ABOUT['__version__'], - description='The ExternalDNS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent external_dns check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.external_dns'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/flink/changelog.d/18580.removed b/flink/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/flink/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/flink/pyproject.toml b/flink/pyproject.toml index 29fefd81378a5..2130b12282146 100644 --- a/flink/pyproject.toml +++ b/flink/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/flink/setup.py b/flink/setup.py deleted file mode 100644 index aa1a86bf1f80c..0000000000000 --- a/flink/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'flink', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-flink', - version=ABOUT['__version__'], - description='The Flink check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent flink check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.flink'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/fluentd/changelog.d/18580.removed b/fluentd/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/fluentd/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/fluentd/pyproject.toml b/fluentd/pyproject.toml index 79c042d7b22c8..4a41be9d1c2f7 100644 --- a/fluentd/pyproject.toml +++ b/fluentd/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/fluentd/setup.py b/fluentd/setup.py deleted file mode 100644 index e3c5f8336cca1..0000000000000 --- a/fluentd/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'fluentd', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-fluentd', - version=ABOUT['__version__'], - description='The Fluentd check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent fluentd check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.fluentd'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/foundationdb/changelog.d/18580.removed b/foundationdb/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/foundationdb/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/foundationdb/pyproject.toml b/foundationdb/pyproject.toml index 1f0d71c51a525..470049575ebe9 100644 --- a/foundationdb/pyproject.toml +++ b/foundationdb/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -40,7 +39,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "foundationdb==6.3.24; python_version > '3.0'", + "foundationdb==6.3.24", ] [project.urls] diff --git a/foundationdb/setup.py b/foundationdb/setup.py deleted file mode 100644 index aea314d32fff2..0000000000000 --- a/foundationdb/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2022-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'foundationdb', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-foundationdb', - version=ABOUT['__version__'], - description='The FoundationDB check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent foundationdb check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-extras', - # Author details - author='Edument Central Europe sro.', - author_email='datadog-integrations@edument.cz', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.foundationdb'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/gearmand/changelog.d/18580.removed b/gearmand/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/gearmand/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/gearmand/pyproject.toml b/gearmand/pyproject.toml index 2f26283e1bb35..6cbb39f22e913 100644 --- a/gearmand/pyproject.toml +++ b/gearmand/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "gearman==2.0.2; sys_platform != 'win32' and python_version < '3.0'", - "python3-gearman==0.1.0; sys_platform != 'win32' and python_version > '3.0'", + "python3-gearman==0.1.0; sys_platform != 'win32'", ] [project.urls] diff --git a/gearmand/setup.py b/gearmand/setup.py deleted file mode 100644 index e6cdee0471bde..0000000000000 --- a/gearmand/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'gearmand', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-gearmand', - version=ABOUT['__version__'], - description='The Gearmand check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent gearmand check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.gearmand'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/gitlab/changelog.d/18580.removed b/gitlab/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/gitlab/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/gitlab/pyproject.toml b/gitlab/pyproject.toml index c901443a0aad9..2b18e80e4f688 100644 --- a/gitlab/pyproject.toml +++ b/gitlab/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/gitlab/setup.py b/gitlab/setup.py deleted file mode 100644 index 34795aa45de5c..0000000000000 --- a/gitlab/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'gitlab', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-gitlab', - version=ABOUT['__version__'], - description='The Gitlab check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent gitlab check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.gitlab'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/gitlab_runner/changelog.d/18580.removed b/gitlab_runner/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/gitlab_runner/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/gitlab_runner/pyproject.toml b/gitlab_runner/pyproject.toml index 1f1bd0c327a3a..6a98e5af356a6 100644 --- a/gitlab_runner/pyproject.toml +++ b/gitlab_runner/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/gitlab_runner/setup.py b/gitlab_runner/setup.py deleted file mode 100644 index 51389ebda0d1d..0000000000000 --- a/gitlab_runner/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'gitlab_runner', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-gitlab_runner', - version=ABOUT['__version__'], - description='The Gitlab Runner check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent gitlab_runner check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.gitlab_runner'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/glusterfs/changelog.d/18580.removed b/glusterfs/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/glusterfs/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/glusterfs/pyproject.toml b/glusterfs/pyproject.toml index 15b1c43e9a1d0..b6dc2025ac9e5 100644 --- a/glusterfs/pyproject.toml +++ b/glusterfs/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/glusterfs/setup.py b/glusterfs/setup.py deleted file mode 100644 index 51e0ae4bc151b..0000000000000 --- a/glusterfs/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'glusterfs', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-glusterfs', - version=ABOUT['__version__'], - description='The GlusterFS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent glusterfs check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.glusterfs'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/go_expvar/changelog.d/18580.removed b/go_expvar/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/go_expvar/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/go_expvar/pyproject.toml b/go_expvar/pyproject.toml index 53ae248a2bdb7..1ab17cbdbb1cc 100644 --- a/go_expvar/pyproject.toml +++ b/go_expvar/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/go_expvar/setup.py b/go_expvar/setup.py deleted file mode 100644 index 80627720e03ec..0000000000000 --- a/go_expvar/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'go_expvar', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-go_expvar', - version=ABOUT['__version__'], - description='The Go Expvar check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent go_expvar check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.go_expvar'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/gunicorn/changelog.d/18580.removed b/gunicorn/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/gunicorn/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/gunicorn/pyproject.toml b/gunicorn/pyproject.toml index f5fc135bbecb5..36eecd88d930e 100644 --- a/gunicorn/pyproject.toml +++ b/gunicorn/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/gunicorn/setup.py b/gunicorn/setup.py deleted file mode 100644 index fc5ea74b841a4..0000000000000 --- a/gunicorn/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'gunicorn', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-gunicorn', - version=ABOUT['__version__'], - description='The Gunicorn check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent gunicorn check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.gunicorn'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/haproxy/changelog.d/18580.removed b/haproxy/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/haproxy/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/haproxy/pyproject.toml b/haproxy/pyproject.toml index d3afdeccd65f4..331fd6d769f85 100644 --- a/haproxy/pyproject.toml +++ b/haproxy/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/haproxy/setup.py b/haproxy/setup.py deleted file mode 100644 index 813be20b0cfcd..0000000000000 --- a/haproxy/setup.py +++ /dev/null @@ -1,82 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "haproxy", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-haproxy', - version=ABOUT["__version__"], - description='The HAProxy check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent haproxy check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.haproxy'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/harbor/changelog.d/18580.removed b/harbor/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/harbor/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/harbor/pyproject.toml b/harbor/pyproject.toml index 7542301a53a92..cfa46f3e817ba 100644 --- a/harbor/pyproject.toml +++ b/harbor/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/harbor/setup.py b/harbor/setup.py deleted file mode 100644 index 4b1c578ab4607..0000000000000 --- a/harbor/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'harbor', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-harbor', - version=ABOUT['__version__'], - description='The harbor check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent harbor check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.harbor'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/hazelcast/changelog.d/18580.removed b/hazelcast/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/hazelcast/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/hazelcast/pyproject.toml b/hazelcast/pyproject.toml index 9d777b0a0142c..7d7226fde345c 100644 --- a/hazelcast/pyproject.toml +++ b/hazelcast/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,7 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "hazelcast-python-client==5.4.0; python_version > '3.0'", + "hazelcast-python-client==5.4.0", ] [project.urls] diff --git a/hazelcast/setup.py b/hazelcast/setup.py deleted file mode 100644 index 6d2f207f0c220..0000000000000 --- a/hazelcast/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'hazelcast', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-hazelcast', - version=ABOUT['__version__'], - description='The Hazelcast check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent hazelcast check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.hazelcast'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/hdfs_datanode/changelog.d/18580.removed b/hdfs_datanode/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/hdfs_datanode/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/hdfs_datanode/pyproject.toml b/hdfs_datanode/pyproject.toml index c97c1f774038f..3052f61d40d3d 100644 --- a/hdfs_datanode/pyproject.toml +++ b/hdfs_datanode/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/hdfs_datanode/setup.py b/hdfs_datanode/setup.py deleted file mode 100644 index 64d9a572b28af..0000000000000 --- a/hdfs_datanode/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "hdfs_datanode", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-hdfs_datanode', - version=ABOUT['__version__'], - description='The HDFS Datanode Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent hdfs_datanode check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.hdfs_datanode'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/hdfs_namenode/changelog.d/18580.removed b/hdfs_namenode/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/hdfs_namenode/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/hdfs_namenode/pyproject.toml b/hdfs_namenode/pyproject.toml index 8d090b73965ce..7cff415a294fe 100644 --- a/hdfs_namenode/pyproject.toml +++ b/hdfs_namenode/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/hdfs_namenode/setup.py b/hdfs_namenode/setup.py deleted file mode 100644 index b42d76d829a82..0000000000000 --- a/hdfs_namenode/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "hdfs_namenode", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-hdfs_namenode', - version=ABOUT['__version__'], - description='The HDFS Namenode check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent hdfs_namenode check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.hdfs_namenode'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/http_check/changelog.d/18580.removed b/http_check/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/http_check/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/http_check/pyproject.toml b/http_check/pyproject.toml index 89ed482dfafae..414969c8f7962 100644 --- a/http_check/pyproject.toml +++ b/http_check/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,10 +38,8 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "cryptography==3.3.2; python_version < '3.0'", - "cryptography==43.0.0; python_version > '3.0'", - "requests-ntlm==1.1.0; python_version < '3.0'", - "requests-ntlm==1.3.0; python_version > '3.0'", + "cryptography==43.0.0", + "requests-ntlm==1.3.0", ] [project.urls] diff --git a/http_check/setup.py b/http_check/setup.py deleted file mode 100644 index 09567d7745ead..0000000000000 --- a/http_check/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "http_check", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-http_check', - version=ABOUT['__version__'], - description='The HTTP check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent http_check check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.http_check'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/hudi/changelog.d/18580.removed b/hudi/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/hudi/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/hudi/pyproject.toml b/hudi/pyproject.toml index 5e447df966116..24f95771d0f47 100644 --- a/hudi/pyproject.toml +++ b/hudi/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/hudi/setup.py b/hudi/setup.py deleted file mode 100644 index 741ead4c5aba6..0000000000000 --- a/hudi/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'hudi', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-hudi', - version=ABOUT['__version__'], - description='The Hudi check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent hudi check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.hudi'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/hyperv/changelog.d/18580.removed b/hyperv/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/hyperv/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/hyperv/pyproject.toml b/hyperv/pyproject.toml index f4f2de114c67a..572102aee2449 100644 --- a/hyperv/pyproject.toml +++ b/hyperv/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/hyperv/setup.py b/hyperv/setup.py deleted file mode 100644 index aeddf227cb598..0000000000000 --- a/hyperv/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'hyperv', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-hyperv', - version=ABOUT['__version__'], - description='The Hyper-V check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent hyperv check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.hyperv'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/ibm_ace/changelog.d/18580.removed b/ibm_ace/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ibm_ace/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ibm_ace/pyproject.toml b/ibm_ace/pyproject.toml index 1b5e06ec02116..1b27a99a0bdde 100644 --- a/ibm_ace/pyproject.toml +++ b/ibm_ace/pyproject.toml @@ -37,7 +37,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "pymqi==1.12.10; (sys_platform != 'darwin' or platform_machine != 'arm64') and python_version > '3.0'", + "pymqi==1.12.10; (sys_platform != 'darwin' or platform_machine != 'arm64')", ] [project.urls] diff --git a/ibm_db2/changelog.d/18580.removed b/ibm_db2/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ibm_db2/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ibm_db2/pyproject.toml b/ibm_db2/pyproject.toml index 1be3f70e6b4c5..ed4b3a696092b 100644 --- a/ibm_db2/pyproject.toml +++ b/ibm_db2/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/ibm_db2/setup.py b/ibm_db2/setup.py deleted file mode 100644 index ff4f7eb0a9d54..0000000000000 --- a/ibm_db2/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'ibm_db2', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-ibm_db2', - version=ABOUT['__version__'], - description='The IBM Db2 check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent ibm_db2 check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.ibm_db2'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/ibm_i/changelog.d/18580.removed b/ibm_i/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ibm_i/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ibm_i/pyproject.toml b/ibm_i/pyproject.toml index 93b3fb680d3c1..fe8b3fe6d7229 100644 --- a/ibm_i/pyproject.toml +++ b/ibm_i/pyproject.toml @@ -39,7 +39,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "pyodbc==5.1.0; (sys_platform != 'darwin' or platform_machine != 'arm64') and python_version > '3.0'", + "pyodbc==5.1.0; (sys_platform != 'darwin' or platform_machine != 'arm64')", ] [project.urls] diff --git a/ibm_mq/changelog.d/18580.removed b/ibm_mq/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ibm_mq/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ibm_mq/pyproject.toml b/ibm_mq/pyproject.toml index d3ca79e7bfd2c..c5d42f6be30c3 100644 --- a/ibm_mq/pyproject.toml +++ b/ibm_mq/pyproject.toml @@ -38,7 +38,7 @@ requires-python = ">=3.9" [project.optional-dependencies] deps = [ "psutil==5.9.6", - "pymqi==1.12.10; (sys_platform != 'darwin' or platform_machine != 'arm64') and python_version > '3.0'", + "pymqi==1.12.10; (sys_platform != 'darwin' or platform_machine != 'arm64')", ] [project.urls] diff --git a/ibm_was/changelog.d/18580.removed b/ibm_was/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ibm_was/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ibm_was/pyproject.toml b/ibm_was/pyproject.toml index 3bcb528e992f4..d56eac6de342a 100644 --- a/ibm_was/pyproject.toml +++ b/ibm_was/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/ibm_was/setup.py b/ibm_was/setup.py deleted file mode 100644 index eef81f428ffd1..0000000000000 --- a/ibm_was/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'ibm_was', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-ibm_was', - version=ABOUT['__version__'], - description='The IBM WAS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent ibm_was check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.ibm_was'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/iis/changelog.d/18580.removed b/iis/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/iis/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/iis/pyproject.toml b/iis/pyproject.toml index d7734b4108378..b367c7ed75362 100644 --- a/iis/pyproject.toml +++ b/iis/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/iis/setup.py b/iis/setup.py deleted file mode 100644 index 56a063db26edf..0000000000000 --- a/iis/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'iis', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-iis', - version=ABOUT["__version__"], - description='The IIS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent iis check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.iis'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/istio/changelog.d/18580.removed b/istio/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/istio/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/istio/pyproject.toml b/istio/pyproject.toml index b0e1bf9a4245a..7ca39cfb40e54 100644 --- a/istio/pyproject.toml +++ b/istio/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/istio/setup.py b/istio/setup.py deleted file mode 100644 index 60e3ff1a7e62d..0000000000000 --- a/istio/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "istio", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-istio', - version=ABOUT["__version__"], - description='The istio check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent istio check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='New BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.istio'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/journald/changelog.d/18580.removed b/journald/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/journald/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/journald/pyproject.toml b/journald/pyproject.toml index 38c11f7e85371..010340badfe1b 100644 --- a/journald/pyproject.toml +++ b/journald/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/journald/setup.py b/journald/setup.py deleted file mode 100644 index 16bb0a250a19e..0000000000000 --- a/journald/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'journald', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-journald', - version=ABOUT['__version__'], - description='The journald check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent journald check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.journald'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kafka/changelog.d/18580.removed b/kafka/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kafka/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kafka/pyproject.toml b/kafka/pyproject.toml index fbd0465beac16..19a35c76e8c35 100644 --- a/kafka/pyproject.toml +++ b/kafka/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kafka/setup.py b/kafka/setup.py deleted file mode 100644 index 46124f1129f86..0000000000000 --- a/kafka/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kafka', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-kafka', - version=ABOUT['__version__'], - description='The Kafka check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kafka check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kafka'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kafka_consumer/changelog.d/18580.removed b/kafka_consumer/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kafka_consumer/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kafka_consumer/pyproject.toml b/kafka_consumer/pyproject.toml index fd0795e888e0b..3e3eb29708129 100644 --- a/kafka_consumer/pyproject.toml +++ b/kafka_consumer/pyproject.toml @@ -36,7 +36,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "confluent-kafka==2.5.0; python_version > '3.0'", + "confluent-kafka==2.5.0", ] [project.urls] diff --git a/kong/changelog.d/18580.removed b/kong/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kong/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kong/pyproject.toml b/kong/pyproject.toml index 9153d8dee4baf..5e637f640fd1c 100644 --- a/kong/pyproject.toml +++ b/kong/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kong/setup.py b/kong/setup.py deleted file mode 100644 index 420619dc31c42..0000000000000 --- a/kong/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kong', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-kong', - version=ABOUT['__version__'], - description='The Kong check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kong check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kong'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kube_apiserver_metrics/changelog.d/18580.removed b/kube_apiserver_metrics/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kube_apiserver_metrics/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kube_apiserver_metrics/pyproject.toml b/kube_apiserver_metrics/pyproject.toml index c068880c4913e..b13c476bffdb4 100644 --- a/kube_apiserver_metrics/pyproject.toml +++ b/kube_apiserver_metrics/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kube_apiserver_metrics/setup.py b/kube_apiserver_metrics/setup.py deleted file mode 100644 index 03b58dc76d13d..0000000000000 --- a/kube_apiserver_metrics/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kube_apiserver_metrics', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-kube_apiserver_metrics', - version=ABOUT['__version__'], - description='The Kube_apiserver_metrics check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kube_apiserver_metrics check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kube_apiserver_metrics'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kube_controller_manager/changelog.d/18580.removed b/kube_controller_manager/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kube_controller_manager/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kube_controller_manager/pyproject.toml b/kube_controller_manager/pyproject.toml index ddaf26f4ddd3b..17c7abbf86546 100644 --- a/kube_controller_manager/pyproject.toml +++ b/kube_controller_manager/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kube_controller_manager/setup.py b/kube_controller_manager/setup.py deleted file mode 100644 index 2944c3c0a6b6f..0000000000000 --- a/kube_controller_manager/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kube_controller_manager', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-kube_controller_manager', - version=ABOUT['__version__'], - description='The Kubernetes Controller Manager check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kube_controller_manager check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kube_controller_manager'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kube_dns/changelog.d/18580.removed b/kube_dns/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kube_dns/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kube_dns/pyproject.toml b/kube_dns/pyproject.toml index c01edef1aedd5..67ea433f081d0 100644 --- a/kube_dns/pyproject.toml +++ b/kube_dns/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kube_dns/setup.py b/kube_dns/setup.py deleted file mode 100644 index 8667607eb4725..0000000000000 --- a/kube_dns/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kube_dns', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-kube_dns', - version=ABOUT['__version__'], - description='The KubeDNS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kube_dns check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kube_dns'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kube_metrics_server/changelog.d/18580.removed b/kube_metrics_server/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kube_metrics_server/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kube_metrics_server/pyproject.toml b/kube_metrics_server/pyproject.toml index 13ac40d8a400a..ed80bd29a6270 100644 --- a/kube_metrics_server/pyproject.toml +++ b/kube_metrics_server/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kube_metrics_server/setup.py b/kube_metrics_server/setup.py deleted file mode 100644 index 7e91037cea1a3..0000000000000 --- a/kube_metrics_server/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kube_metrics_server', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-kube_metrics_server', - version=ABOUT['__version__'], - description='The Kubernetes Metrics Server check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kube_metrics_server check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kube_metrics_server'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kube_proxy/changelog.d/18580.removed b/kube_proxy/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kube_proxy/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kube_proxy/pyproject.toml b/kube_proxy/pyproject.toml index 5522d6af17045..6a4c1a1cdafc7 100644 --- a/kube_proxy/pyproject.toml +++ b/kube_proxy/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kube_proxy/setup.py b/kube_proxy/setup.py deleted file mode 100644 index 82c148b407408..0000000000000 --- a/kube_proxy/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "kube_proxy", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-kube-proxy', - version=ABOUT["__version__"], - description='The kube_proxy Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kube_proxy check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kube_proxy'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kube_scheduler/changelog.d/18580.removed b/kube_scheduler/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kube_scheduler/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kube_scheduler/pyproject.toml b/kube_scheduler/pyproject.toml index 0cabe41d978d1..620f541b83b70 100644 --- a/kube_scheduler/pyproject.toml +++ b/kube_scheduler/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kube_scheduler/setup.py b/kube_scheduler/setup.py deleted file mode 100644 index a3dbf05aa1577..0000000000000 --- a/kube_scheduler/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kube_scheduler', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-kube_scheduler', - version=ABOUT['__version__'], - description='The Kubernetes Scheduler check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kube_scheduler check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kube_scheduler'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kubelet/changelog.d/18580.removed b/kubelet/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kubelet/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kubelet/pyproject.toml b/kubelet/pyproject.toml index 6c437878a5d83..7fd6a3cc53cc4 100644 --- a/kubelet/pyproject.toml +++ b/kubelet/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kubelet/setup.py b/kubelet/setup.py deleted file mode 100644 index 560612f4becd0..0000000000000 --- a/kubelet/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "kubelet", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-kubelet', - version=ABOUT["__version__"], - description='The Kubelet check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kubelet check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='New BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kubelet'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kubernetes_state/changelog.d/18580.removed b/kubernetes_state/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kubernetes_state/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kubernetes_state/pyproject.toml b/kubernetes_state/pyproject.toml index b091ad130b2a1..1d5704d3b20c1 100644 --- a/kubernetes_state/pyproject.toml +++ b/kubernetes_state/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kubernetes_state/setup.py b/kubernetes_state/setup.py deleted file mode 100644 index 36299509bdb7a..0000000000000 --- a/kubernetes_state/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'kubernetes_state', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-kubernetes_state', - version=ABOUT['__version__'], - description='The Kubernetes State check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kubernetes_state check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kubernetes_state'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/kyototycoon/changelog.d/18580.removed b/kyototycoon/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/kyototycoon/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/kyototycoon/pyproject.toml b/kyototycoon/pyproject.toml index 8875ce2f59157..aac5a3f24775a 100644 --- a/kyototycoon/pyproject.toml +++ b/kyototycoon/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/kyototycoon/setup.py b/kyototycoon/setup.py deleted file mode 100644 index 572186d628551..0000000000000 --- a/kyototycoon/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "kyototycoon", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-kyototycoon', - version=ABOUT['__version__'], - description='The KyotoTycoon check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent kyototycoon check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.kyototycoon'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/lighttpd/changelog.d/18580.removed b/lighttpd/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/lighttpd/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/lighttpd/pyproject.toml b/lighttpd/pyproject.toml index 5c557f72ea371..28eb6b254122e 100644 --- a/lighttpd/pyproject.toml +++ b/lighttpd/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/lighttpd/setup.py b/lighttpd/setup.py deleted file mode 100644 index b7fd064d4c564..0000000000000 --- a/lighttpd/setup.py +++ /dev/null @@ -1,76 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "lighttpd", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-lighttpd', - version=ABOUT["__version__"], - description='The lighttpd check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent lighttpd check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.lighttpd'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/linkerd/changelog.d/18580.removed b/linkerd/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/linkerd/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/linkerd/pyproject.toml b/linkerd/pyproject.toml index bc4e6042ff98c..45f685a892bf4 100644 --- a/linkerd/pyproject.toml +++ b/linkerd/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/linkerd/setup.py b/linkerd/setup.py deleted file mode 100644 index 4a9481ff40866..0000000000000 --- a/linkerd/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "linkerd", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-linkerd', - version=ABOUT["__version__"], - description='The Linkerd check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent linkerd check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.linkerd'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/linux_proc_extras/changelog.d/18580.removed b/linux_proc_extras/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/linux_proc_extras/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/linux_proc_extras/pyproject.toml b/linux_proc_extras/pyproject.toml index 787deaa23fd1c..b92af0be3195b 100644 --- a/linux_proc_extras/pyproject.toml +++ b/linux_proc_extras/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/linux_proc_extras/setup.py b/linux_proc_extras/setup.py deleted file mode 100644 index 6969f389b24d2..0000000000000 --- a/linux_proc_extras/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'linux_proc_extras', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-linux_proc_extras', - version=ABOUT['__version__'], - description='The Linux Proc Extras check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent linux_proc_extras check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.linux_proc_extras'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/mapr/changelog.d/18580.removed b/mapr/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mapr/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mapr/pyproject.toml b/mapr/pyproject.toml index 7f09d89d685dc..ad39020b8805b 100644 --- a/mapr/pyproject.toml +++ b/mapr/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/mapr/setup.py b/mapr/setup.py deleted file mode 100644 index 10c12404d63e1..0000000000000 --- a/mapr/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'mapr', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-mapr', - version=ABOUT['__version__'], - description='The mapr check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent mapr check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.mapr'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/mapreduce/changelog.d/18580.removed b/mapreduce/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mapreduce/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mapreduce/pyproject.toml b/mapreduce/pyproject.toml index 39eb096ac3b6d..1f77e1e9e45e8 100644 --- a/mapreduce/pyproject.toml +++ b/mapreduce/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/mapreduce/setup.py b/mapreduce/setup.py deleted file mode 100644 index 3993e06a6ffff..0000000000000 --- a/mapreduce/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "mapreduce", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-mapreduce', - version=ABOUT['__version__'], - description='The MapReduce check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent mapreduce check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.mapreduce'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/marathon/changelog.d/18580.removed b/marathon/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/marathon/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/marathon/pyproject.toml b/marathon/pyproject.toml index ed1b39a6e5b97..be2ec12a08957 100644 --- a/marathon/pyproject.toml +++ b/marathon/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/marathon/setup.py b/marathon/setup.py deleted file mode 100644 index 22b325f72e77b..0000000000000 --- a/marathon/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "marathon", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-marathon', - version=ABOUT["__version__"], - description='The Marathon check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent marathon check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.marathon'], - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/marklogic/changelog.d/18580.removed b/marklogic/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/marklogic/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/marklogic/pyproject.toml b/marklogic/pyproject.toml index 357f0d63b5144..c36da31616366 100644 --- a/marklogic/pyproject.toml +++ b/marklogic/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/marklogic/setup.py b/marklogic/setup.py deleted file mode 100644 index 5424c1be76176..0000000000000 --- a/marklogic/setup.py +++ /dev/null @@ -1,69 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'marklogic', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-marklogic', - version=ABOUT['__version__'], - description='The MarkLogic check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent marklogic check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.marklogic'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/mcache/changelog.d/18580.removed b/mcache/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mcache/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mcache/pyproject.toml b/mcache/pyproject.toml index 5d47c95293856..535b4bb5a02ab 100644 --- a/mcache/pyproject.toml +++ b/mcache/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "python-binary-memcached==0.26.1; sys_platform != 'win32' and python_version < '3.0'", - "python-binary-memcached==0.31.2; sys_platform != 'win32' and python_version > '3.0'", + "python-binary-memcached==0.31.2; sys_platform != 'win32'", ] [project.urls] diff --git a/mcache/setup.py b/mcache/setup.py deleted file mode 100644 index 7aee2b55879b2..0000000000000 --- a/mcache/setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "mcache", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-mcache', - version=ABOUT["__version__"], - description='The Memcache Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent Memcache check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.mcache'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/mesos_master/changelog.d/18580.removed b/mesos_master/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mesos_master/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mesos_master/pyproject.toml b/mesos_master/pyproject.toml index cfd49706c0e22..e08054638645c 100644 --- a/mesos_master/pyproject.toml +++ b/mesos_master/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/mesos_master/setup.py b/mesos_master/setup.py deleted file mode 100644 index 2113797052750..0000000000000 --- a/mesos_master/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'mesos_master', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-mesos_master', - version=ABOUT['__version__'], - description='The Mesos master check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent mesos_master check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.mesos_master'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/mesos_slave/changelog.d/18580.removed b/mesos_slave/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mesos_slave/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mesos_slave/pyproject.toml b/mesos_slave/pyproject.toml index c9dacbf4d52da..ef842e6069708 100644 --- a/mesos_slave/pyproject.toml +++ b/mesos_slave/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/mesos_slave/setup.py b/mesos_slave/setup.py deleted file mode 100644 index 25e460901596b..0000000000000 --- a/mesos_slave/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'mesos_slave', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-mesos_slave', - version=ABOUT['__version__'], - description='The Mesos slave check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent mesos_slave check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.mesos_slave'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/mongo/changelog.d/18580.removed b/mongo/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mongo/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mongo/pyproject.toml b/mongo/pyproject.toml index 7f940ef1e72d9..ae31a970f390d 100644 --- a/mongo/pyproject.toml +++ b/mongo/pyproject.toml @@ -38,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "cachetools==3.1.1; python_version < '3.0'", - "cachetools==5.5.0; python_version > '3.0'", + "cachetools==5.5.0", "pymongo[srv]==4.8.0; python_version >= '3.9'", ] diff --git a/mysql/changelog.d/18580.removed b/mysql/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/mysql/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/mysql/pyproject.toml b/mysql/pyproject.toml index 3b5ca5eb45684..01d6c9a97c5ba 100644 --- a/mysql/pyproject.toml +++ b/mysql/pyproject.toml @@ -37,12 +37,9 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "cachetools==3.1.1; python_version < '3.0'", - "cachetools==5.5.0; python_version > '3.0'", - "cryptography==3.3.2; python_version < '3.0'", - "cryptography==43.0.0; python_version > '3.0'", - "pymysql==0.10.1; python_version < '3.0'", - "pymysql==1.1.1; python_version > '3.0'", + "cachetools==5.5.0", + "cryptography==43.0.0", + "pymysql==1.1.1", ] [project.urls] diff --git a/nagios/changelog.d/18580.removed b/nagios/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/nagios/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/nagios/pyproject.toml b/nagios/pyproject.toml index 8660f4d12cd29..67de6ad22912d 100644 --- a/nagios/pyproject.toml +++ b/nagios/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/nagios/setup.py b/nagios/setup.py deleted file mode 100644 index 6f2f8e58dd852..0000000000000 --- a/nagios/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'nagios', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-nagios', - version=ABOUT['__version__'], - description='The Nagios check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent nagios check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.nagios'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/network/changelog.d/18580.removed b/network/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/network/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/network/pyproject.toml b/network/pyproject.toml index 93f3f3bb8e1d9..f038e2d5eeb73 100644 --- a/network/pyproject.toml +++ b/network/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/network/setup.py b/network/setup.py deleted file mode 100644 index 1cce71d85b4c8..0000000000000 --- a/network/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# Always prefer setuptools over distutils -# To use a consistent encoding -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "network", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-network', - version=ABOUT["__version__"], - description='The Network check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent network check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='MIT', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.network'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/nfsstat/changelog.d/18580.removed b/nfsstat/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/nfsstat/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/nfsstat/pyproject.toml b/nfsstat/pyproject.toml index 69bcc6fb28edf..78f478a8bf029 100644 --- a/nfsstat/pyproject.toml +++ b/nfsstat/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/nfsstat/setup.py b/nfsstat/setup.py deleted file mode 100644 index a18cd661fadb0..0000000000000 --- a/nfsstat/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'nfsstat', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-nfsstat', - version=ABOUT["__version__"], - description='The NFSstat check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent nfsstat check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.nfsstat'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/nginx/changelog.d/18580.removed b/nginx/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/nginx/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/nginx/pyproject.toml b/nginx/pyproject.toml index b123b102633f8..0c6c5a37a3f5a 100644 --- a/nginx/pyproject.toml +++ b/nginx/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/nginx/setup.py b/nginx/setup.py deleted file mode 100644 index 3c8345dc372cf..0000000000000 --- a/nginx/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'nginx', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-nginx', - version=ABOUT["__version__"], - description='The Nginx check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent nginx check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.nginx'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/nginx_ingress_controller/changelog.d/18580.removed b/nginx_ingress_controller/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/nginx_ingress_controller/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/nginx_ingress_controller/pyproject.toml b/nginx_ingress_controller/pyproject.toml index dd9b018d65092..c6ade0257b65b 100644 --- a/nginx_ingress_controller/pyproject.toml +++ b/nginx_ingress_controller/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/nginx_ingress_controller/setup.py b/nginx_ingress_controller/setup.py deleted file mode 100644 index 37b40cf98e2aa..0000000000000 --- a/nginx_ingress_controller/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'nginx_ingress_controller', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-nginx_ingress_controller', - version=ABOUT['__version__'], - description='The nginx-ingress-controller check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent nginx_ingress_controller check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.nginx_ingress_controller'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/openldap/changelog.d/18580.removed b/openldap/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/openldap/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/openldap/pyproject.toml b/openldap/pyproject.toml index 075918574e2e5..cdbbc1723d277 100644 --- a/openldap/pyproject.toml +++ b/openldap/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/openldap/setup.py b/openldap/setup.py deleted file mode 100644 index cd88eb70cfe0c..0000000000000 --- a/openldap/setup.py +++ /dev/null @@ -1,78 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "openldap", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-openldap', - version=ABOUT["__version__"], - description='The OpenLDAP integration collect metrics from your OpenLDAP server using the monitor backend', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.openldap'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - package_data={'datadog_checks.openldap': ['conf.yaml.example']}, - include_package_data=True, -) diff --git a/openmetrics/changelog.d/18580.removed b/openmetrics/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/openmetrics/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/openmetrics/pyproject.toml b/openmetrics/pyproject.toml index b3d01f73db708..999a17e36146f 100644 --- a/openmetrics/pyproject.toml +++ b/openmetrics/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/openmetrics/setup.py b/openmetrics/setup.py deleted file mode 100644 index 13796fc375acd..0000000000000 --- a/openmetrics/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "openmetrics", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-openmetrics', - version=ABOUT["__version__"], - description='The openmetrics check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent openmetrics check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.openmetrics'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/openstack/changelog.d/18580.removed b/openstack/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/openstack/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/openstack/pyproject.toml b/openstack/pyproject.toml index 088bccab3ebf9..096272e19f7de 100644 --- a/openstack/pyproject.toml +++ b/openstack/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/openstack/setup.py b/openstack/setup.py deleted file mode 100644 index 6dbf54d238456..0000000000000 --- a/openstack/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "openstack", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-openstack', - version=ABOUT['__version__'], - description='The Openstack check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent openstack check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.openstack'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/openstack_controller/changelog.d/18580.removed b/openstack_controller/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/openstack_controller/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/openstack_controller/pyproject.toml b/openstack_controller/pyproject.toml index b4e969a878ef3..92241eb9e8f30 100644 --- a/openstack_controller/pyproject.toml +++ b/openstack_controller/pyproject.toml @@ -37,7 +37,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "openstacksdk==3.3.0; python_version > '3.0'", + "openstacksdk==3.3.0", ] [project.urls] diff --git a/pan_firewall/changelog.d/18580.removed b/pan_firewall/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/pan_firewall/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/pan_firewall/pyproject.toml b/pan_firewall/pyproject.toml index caf7bf83cc4db..4fba326e78205 100644 --- a/pan_firewall/pyproject.toml +++ b/pan_firewall/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/pan_firewall/setup.py b/pan_firewall/setup.py deleted file mode 100644 index 034516f4b193e..0000000000000 --- a/pan_firewall/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'pan_firewall', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-pan_firewall', - version=ABOUT['__version__'], - description='The pan_firewall check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent pan_firewall check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.pan_firewall'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/pdh_check/changelog.d/18580.removed b/pdh_check/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/pdh_check/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/pdh_check/pyproject.toml b/pdh_check/pyproject.toml index e6aad6e9c7986..92fb3eebe16b4 100644 --- a/pdh_check/pyproject.toml +++ b/pdh_check/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/pdh_check/setup.py b/pdh_check/setup.py deleted file mode 100644 index a1e1b3d55ab91..0000000000000 --- a/pdh_check/setup.py +++ /dev/null @@ -1,82 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open # To use a consistent encoding -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'pdh_check', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-pdh_check', - version=ABOUT["__version__"], - description='The Windows Performance Counters check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent pdh_check check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.pdh_check'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/pgbouncer/changelog.d/18580.removed b/pgbouncer/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/pgbouncer/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/pgbouncer/pyproject.toml b/pgbouncer/pyproject.toml index 567d691f827d1..884c207d131e9 100644 --- a/pgbouncer/pyproject.toml +++ b/pgbouncer/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -40,7 +39,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "psycopg2-binary==2.9.9; python_version > '3.0'", + "psycopg2-binary==2.9.9", ] [project.urls] diff --git a/pgbouncer/setup.py b/pgbouncer/setup.py deleted file mode 100644 index 395a513a16b9c..0000000000000 --- a/pgbouncer/setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "pgbouncer", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-pgbouncer', - version=ABOUT["__version__"], - description='The PGbouncer check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent pgbouncer check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.pgbouncer'], - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/php_fpm/changelog.d/18580.removed b/php_fpm/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/php_fpm/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/php_fpm/pyproject.toml b/php_fpm/pyproject.toml index 9b3058660927a..dee5c967d0b09 100644 --- a/php_fpm/pyproject.toml +++ b/php_fpm/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/php_fpm/setup.py b/php_fpm/setup.py deleted file mode 100644 index c2370c3828aef..0000000000000 --- a/php_fpm/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'php_fpm', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-php_fpm', - version=ABOUT['__version__'], - description='The PHP FPM check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent php_fpm check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.php_fpm'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/postfix/changelog.d/18580.removed b/postfix/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/postfix/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/postfix/pyproject.toml b/postfix/pyproject.toml index 481b18d4a3450..004b7abc40e11 100644 --- a/postfix/pyproject.toml +++ b/postfix/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/postfix/setup.py b/postfix/setup.py deleted file mode 100644 index bb51b7b3ce814..0000000000000 --- a/postfix/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'postfix', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-postfix', - version=ABOUT['__version__'], - description='The Postfix check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent postfix check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.postfix'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/postgres/changelog.d/18580.removed b/postgres/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/postgres/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/postgres/pyproject.toml b/postgres/pyproject.toml index f677e1ae37849..8a9846261bd37 100644 --- a/postgres/pyproject.toml +++ b/postgres/pyproject.toml @@ -37,14 +37,11 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "azure-identity==1.17.1; python_version > '3.0'", - "boto3==1.17.112; python_version < '3.0'", - "boto3==1.35.10; python_version > '3.0'", - "cachetools==3.1.1; python_version < '3.0'", - "cachetools==5.5.0; python_version > '3.0'", - "psycopg2-binary==2.9.9; python_version > '3.0'", - "semver==2.13.0; python_version < '3.0'", - "semver==3.0.2; python_version > '3.0'", + "azure-identity==1.17.1", + "boto3==1.35.10", + "cachetools==5.5.0", + "psycopg2-binary==2.9.9", + "semver==3.0.2", ] [project.urls] diff --git a/powerdns_recursor/changelog.d/18580.removed b/powerdns_recursor/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/powerdns_recursor/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/powerdns_recursor/pyproject.toml b/powerdns_recursor/pyproject.toml index d849b32a84e6b..b1a671cb02c8c 100644 --- a/powerdns_recursor/pyproject.toml +++ b/powerdns_recursor/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/powerdns_recursor/setup.py b/powerdns_recursor/setup.py deleted file mode 100644 index af4b448998328..0000000000000 --- a/powerdns_recursor/setup.py +++ /dev/null @@ -1,85 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# Always prefer setuptools over distutils -# To use a consistent encoding -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "powerdns_recursor", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-powerdns_recursor', - version=ABOUT["__version__"], - description='The PowerDNS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent powerdns_recursor check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.powerdns_recursor'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/process/changelog.d/18580.removed b/process/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/process/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/process/pyproject.toml b/process/pyproject.toml index 11a24522ed886..8978b66f0fb6b 100644 --- a/process/pyproject.toml +++ b/process/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/process/setup.py b/process/setup.py deleted file mode 100644 index 93a231814b7e9..0000000000000 --- a/process/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'process', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-process', - version=ABOUT['__version__'], - description='The Process check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent process check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.process'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/prometheus/changelog.d/18580.removed b/prometheus/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/prometheus/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/prometheus/pyproject.toml b/prometheus/pyproject.toml index 8ae127d373664..51a4fe0408275 100644 --- a/prometheus/pyproject.toml +++ b/prometheus/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/prometheus/setup.py b/prometheus/setup.py deleted file mode 100644 index 044fdafc229a5..0000000000000 --- a/prometheus/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "prometheus", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-prometheus', - version=ABOUT["__version__"], - description='The prometheus check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent prometheus check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='New BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.prometheus'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/proxysql/changelog.d/18580.removed b/proxysql/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/proxysql/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/proxysql/pyproject.toml b/proxysql/pyproject.toml index b450860d8de48..09647491222ab 100644 --- a/proxysql/pyproject.toml +++ b/proxysql/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pymysql==0.10.1; python_version < '3.0'", - "pymysql==1.1.1; python_version > '3.0'", + "pymysql==1.1.1", ] [project.urls] diff --git a/proxysql/setup.py b/proxysql/setup.py deleted file mode 100644 index c545b174b2419..0000000000000 --- a/proxysql/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'proxysql', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-proxysql', - version=ABOUT['__version__'], - description='The ProxySQL check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent proxysql check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.proxysql'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/rabbitmq/changelog.d/18580.removed b/rabbitmq/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/rabbitmq/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/rabbitmq/pyproject.toml b/rabbitmq/pyproject.toml index 8344c18fbb096..fd4bd248d71ee 100644 --- a/rabbitmq/pyproject.toml +++ b/rabbitmq/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/rabbitmq/setup.py b/rabbitmq/setup.py deleted file mode 100644 index c79d812c47f21..0000000000000 --- a/rabbitmq/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'rabbitmq', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-rabbitmq', - version=ABOUT['__version__'], - description='The RabbitMQ check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent rabbitmq check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.rabbitmq'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/redisdb/changelog.d/18580.removed b/redisdb/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/redisdb/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/redisdb/pyproject.toml b/redisdb/pyproject.toml index ff1cf4bb4dfbd..8bf1515cfd702 100644 --- a/redisdb/pyproject.toml +++ b/redisdb/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "redis==3.5.3; python_version < '3.0'", - "redis==5.0.8; python_version > '3.0'", + "redis==5.0.8", ] [project.urls] diff --git a/redisdb/setup.py b/redisdb/setup.py deleted file mode 100644 index e0e1b14722f67..0000000000000 --- a/redisdb/setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "redisdb", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-redisdb', - version=ABOUT["__version__"], - description='The Redis Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent Redis check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.redisdb'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/rethinkdb/changelog.d/18580.removed b/rethinkdb/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/rethinkdb/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/rethinkdb/pyproject.toml b/rethinkdb/pyproject.toml index a7a18a2a77259..88caf7d283c61 100644 --- a/rethinkdb/pyproject.toml +++ b/rethinkdb/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/rethinkdb/setup.py b/rethinkdb/setup.py deleted file mode 100644 index fc993504dd327..0000000000000 --- a/rethinkdb/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'rethinkdb', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-rethinkdb', - version=ABOUT['__version__'], - description='The RethinkDB check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent rethinkdb check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.rethinkdb'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/riak/changelog.d/18580.removed b/riak/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/riak/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/riak/pyproject.toml b/riak/pyproject.toml index 062b543b99b96..f956b2e9eda5b 100644 --- a/riak/pyproject.toml +++ b/riak/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/riak/setup.py b/riak/setup.py deleted file mode 100644 index d81a0610d56d9..0000000000000 --- a/riak/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "riak", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-riak', - version=ABOUT["__version__"], - description='The Riak Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent riak check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.riak'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/riakcs/changelog.d/18580.removed b/riakcs/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/riakcs/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/riakcs/pyproject.toml b/riakcs/pyproject.toml index b6df2a1df3d62..3f8df8b6a4c56 100644 --- a/riakcs/pyproject.toml +++ b/riakcs/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,7 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "boto3==1.35.10; python_version > '3.0'", + "boto3==1.35.10", ] [project.urls] diff --git a/riakcs/setup.py b/riakcs/setup.py deleted file mode 100644 index 2bab4a8f039c4..0000000000000 --- a/riakcs/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'riakcs', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-riakcs', - version=ABOUT['__version__'], - description='The Riak CS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent riakcs check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.riakcs'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/sap_hana/changelog.d/18580.removed b/sap_hana/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/sap_hana/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/sap_hana/pyproject.toml b/sap_hana/pyproject.toml index b32236241b77e..826b4166ae66e 100644 --- a/sap_hana/pyproject.toml +++ b/sap_hana/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/sap_hana/setup.py b/sap_hana/setup.py deleted file mode 100644 index e8a50341ab934..0000000000000 --- a/sap_hana/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'sap_hana', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-sap_hana', - version=ABOUT['__version__'], - description='The SAP HANA check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent sap_hana check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.sap_hana'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/scylla/changelog.d/18580.removed b/scylla/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/scylla/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/scylla/pyproject.toml b/scylla/pyproject.toml index 26018e592920a..5e4355b0f5383 100644 --- a/scylla/pyproject.toml +++ b/scylla/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/scylla/setup.py b/scylla/setup.py deleted file mode 100644 index cc191f5448c8a..0000000000000 --- a/scylla/setup.py +++ /dev/null @@ -1,84 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -with open(path.join(HERE, 'datadog_checks', 'scylla', '__about__.py'), 'r', encoding='utf-8') as f: - for line in f: - line = line.strip() - if line.startswith('__version__'): - VERSION = line.split('=')[1].strip(' \'"') - break - else: - VERSION = '0.0.1' - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-scylla', - version=VERSION, - description='The Scylla check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent scylla check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.scylla'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/sidekiq/changelog.d/18580.removed b/sidekiq/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/sidekiq/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/sidekiq/pyproject.toml b/sidekiq/pyproject.toml index 45639fc64f09b..9cc7236bfd39c 100644 --- a/sidekiq/pyproject.toml +++ b/sidekiq/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/sidekiq/setup.py b/sidekiq/setup.py deleted file mode 100644 index 2a0405861b873..0000000000000 --- a/sidekiq/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'sidekiq', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-sidekiq', - version=ABOUT['__version__'], - description='The Sidekiq check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent sidekiq check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.sidekiq'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/silk/changelog.d/18580.removed b/silk/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/silk/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/silk/pyproject.toml b/silk/pyproject.toml index 8b6290ef86134..c3fc8baac47f1 100644 --- a/silk/pyproject.toml +++ b/silk/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/silk/setup.py b/silk/setup.py deleted file mode 100644 index a0a1d62d72e31..0000000000000 --- a/silk/setup.py +++ /dev/null @@ -1,78 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'silk', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-silk', - version=ABOUT['__version__'], - description='The Silk check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent silk check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.silk'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/singlestore/changelog.d/18580.removed b/singlestore/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/singlestore/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/singlestore/pyproject.toml b/singlestore/pyproject.toml index 6b9f0dcceadda..86e5e9297b603 100644 --- a/singlestore/pyproject.toml +++ b/singlestore/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pymysql==0.10.1; python_version < '3.0'", - "pymysql==1.1.1; python_version > '3.0'", + "pymysql==1.1.1", ] [project.urls] diff --git a/singlestore/setup.py b/singlestore/setup.py deleted file mode 100644 index e83d7c0b73afb..0000000000000 --- a/singlestore/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'singlestore', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-singlestore', - version=ABOUT['__version__'], - description='The SingleStore check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent singlestore check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.singlestore'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/snmp/changelog.d/18580.removed b/snmp/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/snmp/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/snmp/pyproject.toml b/snmp/pyproject.toml index a53aa3b49b855..9e4b5c3e439ae 100644 --- a/snmp/pyproject.toml +++ b/snmp/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,16 +38,12 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "futures==3.4.0; python_version < '3.0'", - "ipaddress==1.0.23; python_version < '3.0'", "ply==3.11", "pyasn1==0.4.8", "pycryptodomex==3.20.0", - "pysmi==0.3.4; python_version < '3.0'", - "pysmi==1.2.1; python_version > '3.0'", + "pysmi==1.2.1", "pysnmp-mibs==0.1.6", - "pysnmp==4.4.10; python_version < '3.0' ", - "pysnmp==5.1.0; python_version > '3.0'" + "pysnmp==5.1.0" ] [project.urls] diff --git a/snmp/setup.py b/snmp/setup.py deleted file mode 100644 index 8c6a5241b7fed..0000000000000 --- a/snmp/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'snmp', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-snmp', - version=ABOUT['__version__'], - description='The SNMP check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent snmp check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.snmp'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/snowflake/changelog.d/18580.removed b/snowflake/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/snowflake/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/snowflake/pyproject.toml b/snowflake/pyproject.toml index 4143da93bdb1e..ee5113b4c0b6f 100644 --- a/snowflake/pyproject.toml +++ b/snowflake/pyproject.toml @@ -37,7 +37,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "snowflake-connector-python==3.12.1; python_version > '3.0'", + "snowflake-connector-python==3.12.1", ] [project.urls] diff --git a/sonarqube/changelog.d/18580.removed b/sonarqube/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/sonarqube/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/sonarqube/pyproject.toml b/sonarqube/pyproject.toml index 830cf72461a2d..b67b915b76d58 100644 --- a/sonarqube/pyproject.toml +++ b/sonarqube/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/sonarqube/setup.py b/sonarqube/setup.py deleted file mode 100644 index 44cd2c34b35c7..0000000000000 --- a/sonarqube/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'sonarqube', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-sonarqube', - version=ABOUT['__version__'], - description='The SonarQube check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent sonarqube check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.sonarqube'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/spark/changelog.d/18580.removed b/spark/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/spark/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/spark/pyproject.toml b/spark/pyproject.toml index ffcf110f4cb19..e81d5ee0df3d3 100644 --- a/spark/pyproject.toml +++ b/spark/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "beautifulsoup4==4.12.3; python_version > '3.0'", - "beautifulsoup4==4.9.3; python_version < '3.0'", + "beautifulsoup4==4.12.3", ] [project.urls] diff --git a/spark/setup.py b/spark/setup.py deleted file mode 100644 index 58273c4608dca..0000000000000 --- a/spark/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'spark', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-spark', - version=ABOUT["__version__"], - description='The Spark check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent spark check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.spark'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/sqlserver/changelog.d/18580.removed b/sqlserver/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/sqlserver/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/sqlserver/pyproject.toml b/sqlserver/pyproject.toml index d4bcd6951ef27..14015e55328f5 100644 --- a/sqlserver/pyproject.toml +++ b/sqlserver/pyproject.toml @@ -37,11 +37,10 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "azure-identity==1.17.1; python_version > '3.0'", + "azure-identity==1.17.1", "lxml==4.9.4", - "pyodbc==5.1.0; (sys_platform != 'darwin' or platform_machine != 'arm64') and python_version > '3.0'", - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pyodbc==5.1.0; (sys_platform != 'darwin' or platform_machine != 'arm64')", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/squid/changelog.d/18580.removed b/squid/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/squid/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/squid/pyproject.toml b/squid/pyproject.toml index 3f895fb011d18..4184551c8ec92 100644 --- a/squid/pyproject.toml +++ b/squid/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/squid/setup.py b/squid/setup.py deleted file mode 100644 index 26f441cbd34c3..0000000000000 --- a/squid/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "squid", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-squid', - version=ABOUT["__version__"], - description='The Squid Check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent squid check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.squid'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/ssh_check/changelog.d/18580.removed b/ssh_check/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/ssh_check/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/ssh_check/pyproject.toml b/ssh_check/pyproject.toml index fbbcf59dcc7de..d3d8d32bf2bb5 100644 --- a/ssh_check/pyproject.toml +++ b/ssh_check/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "paramiko==2.12.0; python_version < '3.0'", - "paramiko==3.4.1; python_version > '3.0'", + "paramiko==3.4.1", ] [project.urls] diff --git a/ssh_check/setup.py b/ssh_check/setup.py deleted file mode 100644 index 843d808fb5832..0000000000000 --- a/ssh_check/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "ssh_check", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-ssh_check', - version=ABOUT["__version__"], - description='The SSH check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent ssh_check check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.ssh_check'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/statsd/changelog.d/18580.removed b/statsd/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/statsd/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/statsd/pyproject.toml b/statsd/pyproject.toml index 1c1bfec2ccb10..f5e3fd2f1033d 100644 --- a/statsd/pyproject.toml +++ b/statsd/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/statsd/setup.py b/statsd/setup.py deleted file mode 100644 index e6fb4b47e4395..0000000000000 --- a/statsd/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'statsd', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-statsd', - version=ABOUT['__version__'], - description='The StatsD check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent statsd check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.statsd'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/supervisord/changelog.d/18580.removed b/supervisord/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/supervisord/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/supervisord/pyproject.toml b/supervisord/pyproject.toml index 00f5e94aeb2f4..d2c516ce1668a 100644 --- a/supervisord/pyproject.toml +++ b/supervisord/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/supervisord/setup.py b/supervisord/setup.py deleted file mode 100644 index 1e357128d6a38..0000000000000 --- a/supervisord/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'supervisord', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-supervisord', - version=ABOUT['__version__'], - description='The Supervisord check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent supervisord check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.supervisord'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/system_core/changelog.d/18580.removed b/system_core/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/system_core/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/system_core/pyproject.toml b/system_core/pyproject.toml index 6152315718df2..6c1b9d79c7b6f 100644 --- a/system_core/pyproject.toml +++ b/system_core/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/system_core/setup.py b/system_core/setup.py deleted file mode 100644 index b6dad68f934d7..0000000000000 --- a/system_core/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "system_core", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-system_core', - version=ABOUT["__version__"], - description='The System Core check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent system_core check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.system_core'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/system_swap/changelog.d/18580.removed b/system_swap/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/system_swap/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/system_swap/pyproject.toml b/system_swap/pyproject.toml index 42446a6d8f20a..338cde59201cc 100644 --- a/system_swap/pyproject.toml +++ b/system_swap/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/system_swap/setup.py b/system_swap/setup.py deleted file mode 100644 index 42030118fd73a..0000000000000 --- a/system_swap/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'system_swap', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-system_swap', - version=ABOUT['__version__'], - description='The System Swap check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent system_swap check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.system_swap'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/tcp_check/changelog.d/18580.removed b/tcp_check/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/tcp_check/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/tcp_check/pyproject.toml b/tcp_check/pyproject.toml index 115965d19aeca..b19ac5ae591c2 100644 --- a/tcp_check/pyproject.toml +++ b/tcp_check/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/tcp_check/setup.py b/tcp_check/setup.py deleted file mode 100644 index a6b2d27974f2c..0000000000000 --- a/tcp_check/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'tcp_check', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-tcp_check', - version=ABOUT['__version__'], - description='The TCP check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent tcp_check check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.tcp_check'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/teamcity/changelog.d/18580.removed b/teamcity/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/teamcity/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/teamcity/pyproject.toml b/teamcity/pyproject.toml index 5737285410073..6c2a2dd305174 100644 --- a/teamcity/pyproject.toml +++ b/teamcity/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/teamcity/setup.py b/teamcity/setup.py deleted file mode 100644 index 3ce7841e11750..0000000000000 --- a/teamcity/setup.py +++ /dev/null @@ -1,76 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "teamcity", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-teamcity', - version=ABOUT["__version__"], - description='The Teamcity check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent teamcity check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.teamcity'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/tenable/changelog.d/18580.removed b/tenable/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/tenable/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/tenable/pyproject.toml b/tenable/pyproject.toml index a0ab9713531a0..8ab5356276b6d 100644 --- a/tenable/pyproject.toml +++ b/tenable/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/tenable/setup.py b/tenable/setup.py deleted file mode 100644 index c5d7bf9a2d6cf..0000000000000 --- a/tenable/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'tenable', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-tenable', - version=ABOUT['__version__'], - description='The Tenable check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent tenable check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.tenable'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/teradata/changelog.d/18580.removed b/teradata/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/teradata/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/teradata/pyproject.toml b/teradata/pyproject.toml index 1f98e465bf4f6..d1199e10cce27 100644 --- a/teradata/pyproject.toml +++ b/teradata/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/tls/changelog.d/18580.removed b/tls/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/tls/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/tls/pyproject.toml b/tls/pyproject.toml index 8a845c31de812..4ae35db079046 100644 --- a/tls/pyproject.toml +++ b/tls/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,11 +38,8 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "cryptography==3.3.2; python_version < '3.0'", - "cryptography==43.0.0; python_version > '3.0'", - "ipaddress==1.0.23; python_version < '3.0'", - "service-identity[idna]==21.1.0; python_version < '3.0'", - "service-identity[idna]==24.1.0; python_version > '3.0'", + "cryptography==43.0.0", + "service-identity[idna]==24.1.0", ] [project.urls] diff --git a/tls/setup.py b/tls/setup.py deleted file mode 100644 index 0ff910f4c3ce3..0000000000000 --- a/tls/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'tls', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-tls', - version=ABOUT['__version__'], - description='The TLS check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent tls check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.tls'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/tokumx/setup.py b/tokumx/setup.py deleted file mode 100644 index 1a60a612ed6cb..0000000000000 --- a/tokumx/setup.py +++ /dev/null @@ -1,78 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'tokumx', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-tokumx', - version=ABOUT['__version__'], - description='The TokuMX check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent tokumx check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - ], - # The package we're going to ship - packages=['datadog_checks.tokumx'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/tomcat/changelog.d/18580.removed b/tomcat/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/tomcat/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/tomcat/pyproject.toml b/tomcat/pyproject.toml index 03a4f50211b94..0066e14daf1b0 100644 --- a/tomcat/pyproject.toml +++ b/tomcat/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/tomcat/setup.py b/tomcat/setup.py deleted file mode 100644 index 186904746e46f..0000000000000 --- a/tomcat/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'tomcat', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-tomcat', - version=ABOUT['__version__'], - description='The Tomcat check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent tomcat check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.tomcat'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/twemproxy/changelog.d/18580.removed b/twemproxy/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/twemproxy/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/twemproxy/pyproject.toml b/twemproxy/pyproject.toml index 808384ccf037d..5dd08ee893c7d 100644 --- a/twemproxy/pyproject.toml +++ b/twemproxy/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/twemproxy/setup.py b/twemproxy/setup.py deleted file mode 100644 index 3685cab1fcf3d..0000000000000 --- a/twemproxy/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'twemproxy', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-twemproxy', - version=ABOUT['__version__'], - description='The Twemproxy check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent twemproxy check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.twemproxy'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/twistlock/changelog.d/18580.removed b/twistlock/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/twistlock/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/twistlock/pyproject.toml b/twistlock/pyproject.toml index 4232973e6f50a..fd3d22e710ac1 100644 --- a/twistlock/pyproject.toml +++ b/twistlock/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/twistlock/setup.py b/twistlock/setup.py deleted file mode 100644 index b48af270f1b60..0000000000000 --- a/twistlock/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'twistlock', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-twistlock', - version=ABOUT['__version__'], - description='The Twistlock check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent twistlock check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.twistlock'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/varnish/changelog.d/18580.removed b/varnish/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/varnish/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/varnish/pyproject.toml b/varnish/pyproject.toml index 4c97d27bc9b2c..cecf1e2183a9f 100644 --- a/varnish/pyproject.toml +++ b/varnish/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/varnish/setup.py b/varnish/setup.py deleted file mode 100644 index 4cf478a2bf48a..0000000000000 --- a/varnish/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "varnish", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-varnish', - version=ABOUT["__version__"], - description='The Varnish check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent varnish check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.varnish'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/vault/changelog.d/18580.removed b/vault/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/vault/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/vault/pyproject.toml b/vault/pyproject.toml index 56fbd39f18cf2..4decac34151a4 100644 --- a/vault/pyproject.toml +++ b/vault/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/vault/setup.py b/vault/setup.py deleted file mode 100644 index bb63644dfbcf7..0000000000000 --- a/vault/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'vault', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-vault', - version=ABOUT['__version__'], - description='The Vault check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent vault check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.vault'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/vertica/changelog.d/18580.removed b/vertica/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/vertica/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/vertica/pyproject.toml b/vertica/pyproject.toml index 93111de7773ee..c54b67d12edc5 100644 --- a/vertica/pyproject.toml +++ b/vertica/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "vertica-python==1.2.0; python_version < '3.0'", - "vertica-python==1.4.0; python_version > '3.0'", + "vertica-python==1.4.0", ] [project.urls] diff --git a/vertica/setup.py b/vertica/setup.py deleted file mode 100644 index c8aa339923c51..0000000000000 --- a/vertica/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2019-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'vertica', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-vertica', - version=ABOUT['__version__'], - description='The Vertica check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent vertica check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.vertica'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/voltdb/changelog.d/18580.removed b/voltdb/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/voltdb/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/voltdb/pyproject.toml b/voltdb/pyproject.toml index f3e44c69e421c..e14e6759a5607 100644 --- a/voltdb/pyproject.toml +++ b/voltdb/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/voltdb/setup.py b/voltdb/setup.py deleted file mode 100644 index 09d491ba074e6..0000000000000 --- a/voltdb/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# (C) Datadog, Inc. 2020-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'voltdb', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-voltdb', - version=ABOUT['__version__'], - description='The VoltDB check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent voltdb check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.voltdb'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/vsphere/changelog.d/18580.removed b/vsphere/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/vsphere/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/vsphere/pyproject.toml b/vsphere/pyproject.toml index 8726007cd4fa0..d60958d76573a 100644 --- a/vsphere/pyproject.toml +++ b/vsphere/pyproject.toml @@ -24,7 +24,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,9 +38,7 @@ license = "BSD-3-Clause" [project.optional-dependencies] deps = [ - "futures==3.4.0; python_version < '3.0'", - "pyvmomi==8.0.0.1; python_version < '3.0'", - "pyvmomi==8.0.3.0.1; python_version > '3.0'", + "pyvmomi==8.0.3.0.1", ] [project.urls] diff --git a/vsphere/setup.py b/vsphere/setup.py deleted file mode 100644 index 2ed1840881bbc..0000000000000 --- a/vsphere/setup.py +++ /dev/null @@ -1,76 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open -from os import path - -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "vsphere", "__about__.py")) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-vsphere', - version=ABOUT["__version__"], - description='The vSphere check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent vSphere check', - url='https://github.com/DataDog/integrations-core', - author='Datadog', - author_email='packages@datadoghq.com', - license='New BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - packages=['datadog_checks.vsphere'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/weblogic/changelog.d/18580.removed b/weblogic/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/weblogic/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/weblogic/pyproject.toml b/weblogic/pyproject.toml index 881889e4916bd..0ecbb9d3d9d0e 100644 --- a/weblogic/pyproject.toml +++ b/weblogic/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/weblogic/setup.py b/weblogic/setup.py deleted file mode 100644 index 32ed1abc45d0c..0000000000000 --- a/weblogic/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -# (C) Datadog, Inc. 2021-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'weblogic', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - - -setup( - name='datadog-weblogic', - version=ABOUT['__version__'], - description='The weblogic check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent weblogic check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD-3-Clause', - # See https://pypi.org/classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.weblogic'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/win32_event_log/changelog.d/18580.removed b/win32_event_log/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/win32_event_log/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/win32_event_log/pyproject.toml b/win32_event_log/pyproject.toml index 54afc55303880..6ddc69694ebfc 100644 --- a/win32_event_log/pyproject.toml +++ b/win32_event_log/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", "uptime==3.0.1", ] diff --git a/win32_event_log/setup.py b/win32_event_log/setup.py deleted file mode 100644 index b4fe57d826e71..0000000000000 --- a/win32_event_log/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'win32_event_log', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-win32_event_log', - version=ABOUT['__version__'], - description='The Win32 Event Log check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent win32_event_log check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.win32_event_log'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/windows_service/changelog.d/18580.removed b/windows_service/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/windows_service/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/windows_service/pyproject.toml b/windows_service/pyproject.toml index f0f9f4bf0257c..c7bcbe2d7e58e 100644 --- a/windows_service/pyproject.toml +++ b/windows_service/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/windows_service/setup.py b/windows_service/setup.py deleted file mode 100644 index 133999d0b7ca9..0000000000000 --- a/windows_service/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'windows_service', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-windows_service', - version=ABOUT['__version__'], - description='The Windows Service check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent windows_service check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.windows_service'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/wmi_check/changelog.d/18580.removed b/wmi_check/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/wmi_check/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/wmi_check/pyproject.toml b/wmi_check/pyproject.toml index 7eaad6009a281..b53c3a3091a41 100644 --- a/wmi_check/pyproject.toml +++ b/wmi_check/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", @@ -39,8 +38,7 @@ dynamic = [ [project.optional-dependencies] deps = [ - "pywin32==228; sys_platform == 'win32' and python_version < '3.0'", - "pywin32==306; sys_platform == 'win32' and python_version > '3.0'", + "pywin32==306; sys_platform == 'win32'", ] [project.urls] diff --git a/wmi_check/setup.py b/wmi_check/setup.py deleted file mode 100644 index 37cbafeec2fbe..0000000000000 --- a/wmi_check/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'wmi_check', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-wmi_check', - version=ABOUT['__version__'], - description='The WMI check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent wmi_check check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.wmi_check'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/yarn/changelog.d/18580.removed b/yarn/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/yarn/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/yarn/pyproject.toml b/yarn/pyproject.toml index 7332e17f11d65..e4a4d85c2365c 100644 --- a/yarn/pyproject.toml +++ b/yarn/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/yarn/setup.py b/yarn/setup.py deleted file mode 100644 index 14bd1f1994374..0000000000000 --- a/yarn/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -# To use a consistent encoding -from codecs import open -from os import path - -# Always prefer setuptools over distutils -from setuptools import setup - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# Get version info -ABOUT = {} -with open(path.join(HERE, "datadog_checks", "yarn", "__about__.py")) as f: - exec(f.read(), ABOUT) - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-yarn', - version=ABOUT['__version__'], - description='The Yarn check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent yarn check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.yarn'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) diff --git a/zk/changelog.d/18580.removed b/zk/changelog.d/18580.removed new file mode 100644 index 0000000000000..1029637dfcead --- /dev/null +++ b/zk/changelog.d/18580.removed @@ -0,0 +1 @@ +Remove support for Python 2. diff --git a/zk/pyproject.toml b/zk/pyproject.toml index 000b4fb312d4a..1474dab090bf5 100644 --- a/zk/pyproject.toml +++ b/zk/pyproject.toml @@ -25,7 +25,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.11", "Topic :: System :: Monitoring", "Private :: Do Not Upload", diff --git a/zk/setup.py b/zk/setup.py deleted file mode 100644 index 53e1d344b1d4a..0000000000000 --- a/zk/setup.py +++ /dev/null @@ -1,80 +0,0 @@ -# (C) Datadog, Inc. 2018-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from codecs import open # To use a consistent encoding -from os import path - -from setuptools import setup - -HERE = path.dirname(path.abspath(__file__)) - -# Get version info -ABOUT = {} -with open(path.join(HERE, 'datadog_checks', 'zk', '__about__.py')) as f: - exec(f.read(), ABOUT) - -# Get the long description from the README file -with open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - - -def get_dependencies(): - dep_file = path.join(HERE, 'requirements.in') - if not path.isfile(dep_file): - return [] - - with open(dep_file, encoding='utf-8') as f: - return f.readlines() - - -def parse_pyproject_array(name): - import os - import re - from ast import literal_eval - - pattern = r'^{} = (\[.*?\])$'.format(name) - - with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f: - # Windows \r\n prevents match - contents = '\n'.join(line.rstrip() for line in f.readlines()) - - array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1) - return literal_eval(array) - - -CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0] - -setup( - name='datadog-zk', - version=ABOUT['__version__'], - description='The ZooKeeper check', - long_description=long_description, - long_description_content_type='text/markdown', - keywords='datadog agent zk check', - # The project's main homepage. - url='https://github.com/DataDog/integrations-core', - # Author details - author='Datadog', - author_email='packages@datadoghq.com', - # License - license='BSD', - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'Topic :: System :: Monitoring', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.11', - ], - # The package we're going to ship - packages=['datadog_checks.zk'], - # Run-time dependencies - install_requires=[CHECKS_BASE_REQ], - extras_require={'deps': parse_pyproject_array('deps')}, - # Extra files to ship with the wheel package - include_package_data=True, -) From 35be56123abb02ef0ab7daccd42083da40f10197 Mon Sep 17 00:00:00 2001 From: "agent-platform-auto-pr[bot]" <153269286+agent-platform-auto-pr[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 07:57:51 +0200 Subject: [PATCH 05/23] Update dependency resolution (#18588) Co-authored-by: iliakur --- .deps/metadata.json | 2 +- .deps/resolved/linux-aarch64_py3.txt | 2 +- .deps/resolved/linux-x86_64_py3.txt | 2 +- .deps/resolved/macos-x86_64_py3.txt | 10 +++++----- .deps/resolved/windows-x86_64_py3.txt | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.deps/metadata.json b/.deps/metadata.json index bd1d69c120dfd..4a7fbd9a84d6b 100644 --- a/.deps/metadata.json +++ b/.deps/metadata.json @@ -1,3 +1,3 @@ { - "sha256": "c6a2b5cce003888652d1b2f4c18f4fc40942bf4132af84a1931e81f37b5fdb17" + "sha256": "2566080e6db100ff1cb0b3b98c011fa1d70fed5bfd9042fc4fcf29a10491ca84" } diff --git a/.deps/resolved/linux-aarch64_py3.txt b/.deps/resolved/linux-aarch64_py3.txt index c26dbcccd23e3..d57a4776d3828 100644 --- a/.deps/resolved/linux-aarch64_py3.txt +++ b/.deps/resolved/linux-aarch64_py3.txt @@ -124,4 +124,4 @@ vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-pytho websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 -zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.1-py3-none-any.whl#sha256=9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 diff --git a/.deps/resolved/linux-x86_64_py3.txt b/.deps/resolved/linux-x86_64_py3.txt index c8db799a83736..38521ee454c8d 100644 --- a/.deps/resolved/linux-x86_64_py3.txt +++ b/.deps/resolved/linux-x86_64_py3.txt @@ -125,4 +125,4 @@ vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-pytho websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 -zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.1-py3-none-any.whl#sha256=9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 diff --git a/.deps/resolved/macos-x86_64_py3.txt b/.deps/resolved/macos-x86_64_py3.txt index 76a662eb6e418..57b4c962314f6 100644 --- a/.deps/resolved/macos-x86_64_py3.txt +++ b/.deps/resolved/macos-x86_64_py3.txt @@ -18,7 +18,7 @@ charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-n clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp311-cp311-macosx_10_9_x86_64.whl#sha256=bb3401bb08d0e27166faac7ff277ff3482e096d0455152a6b7c38b3a632a007b clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl#sha256=5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402154932-py3-none-macosx_10_12_universal2.whl#sha256=aba3c1683ef1b2099933e030464d29b3ad1c206784ebd15d8a7147ecd6ba24e1 -confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240913120822-cp311-cp311-macosx_10_12_universal2.whl#sha256=d2357529d6ade99cfb5bf70a3b42ab3bcd96b35c0ae05a8ed44ac343724528d6 +confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240913180920-cp311-cp311-macosx_10_12_universal2.whl#sha256=67f13c1ac69d0d5699cc34f4636ae92a448c3ab3080e2caa26b964bdbe72e75e cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl#sha256=7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp311-cp311-macosx_12_0_x86_64.whl#sha256=86f209aa4dc544368b593aa56e1f9e302d9329410664f03dbaaf16afb6501b0e @@ -49,7 +49,7 @@ lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp311-cp31 mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896 msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d -netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240913120822-cp311-cp311-macosx_10_12_universal2.whl#sha256=af7088e06f7b24ab704694f9cdacd4582da7ec503771b487ffe5430e936e5284 +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240913180920-cp311-cp311-macosx_10_12_universal2.whl#sha256=3e9b77aac6fa2a039c19286967b00885b3c5cbc5fe48c37778d69ddda5ac7614 oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 @@ -74,7 +74,7 @@ pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2 pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl#sha256=d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312 pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=6b50040d9767197b77ed420ada29b3bf18a638f9552d80f2da817b7c4a4c9c68 -pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240913120823-cp311-cp311-macosx_10_12_universal2.whl#sha256=d921f583750fe092c18cbc0a5e0a5439da8b2294c27a8a6f92cb6f9ccac6fd78 +pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240913180921-cp311-cp311-macosx_10_12_universal2.whl#sha256=d1af9136b3b214223de266a00c251ac72c515b3086272c1665dd62ac92b8a927 pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl#sha256=401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 pyodbc @ https://agent-int-packages.datadoghq.com/external/pyodbc/pyodbc-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=aa6f46377da303bf79bcb4b559899507df4b2559f30dcfdf191358ee4b99f3ab @@ -118,10 +118,10 @@ tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-a typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 -uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240913120824-cp311-cp311-macosx_10_12_universal2.whl#sha256=5022f7754d6ed018d050ec08831d1d0f93b0e48f6a4e4a276e95989b4ec6db9d +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240913180921-cp311-cp311-macosx_10_12_universal2.whl#sha256=ffca93294211e108577b0c3dbd9b1a5b4797712ccce51aa657540e9d4c23c875 urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl#sha256=1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 -zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.1-py3-none-any.whl#sha256=9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 diff --git a/.deps/resolved/windows-x86_64_py3.txt b/.deps/resolved/windows-x86_64_py3.txt index 410ca5c6eddd1..7fbe88717374f 100644 --- a/.deps/resolved/windows-x86_64_py3.txt +++ b/.deps/resolved/windows-x86_64_py3.txt @@ -122,4 +122,4 @@ vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-pytho websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp311-cp311-win_amd64.whl#sha256=aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 -zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.1-py3-none-any.whl#sha256=9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 From 333a5ec2d859eaa35787914a09a961d185515b9b Mon Sep 17 00:00:00 2001 From: aquiladayc <56868556+aquiladayc@users.noreply.github.com> Date: Mon, 16 Sep 2024 21:16:29 +0900 Subject: [PATCH 06/23] Add service account instruction (#18575) Add instruction to set service account when using Sidecar Injection by Admission Controller --- eks_fargate/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eks_fargate/README.md b/eks_fargate/README.md index e151d693d5ed3..e30ed44ae4b00 100644 --- a/eks_fargate/README.md +++ b/eks_fargate/README.md @@ -120,6 +120,7 @@ The setup below configures the Cluster Agent to communicate with the Agent sidec **Prerequisites** * Set up RBAC in the application namespace(s). See the [AWS EKS Fargate RBAC](#aws-eks-fargate-rbac) section on this page. +* Bind above RBAC to application pod by setting Service Account name. * Create a Kubernetes secret containing your Datadog API key and Cluster Agent token in the Datadog installation and application namespaces: ```shell @@ -291,6 +292,7 @@ The setup below configures the Cluster Agent to communicate with the Agent sidec **Prerequisites** * Set up RBAC in the application namespace(s). See the [AWS EKS Fargate RBAC](#aws-eks-fargate-rbac) section on this page. +* Bind above RBAC to application pod by setting Service Account name. * Create a Kubernetes secret containing your Datadog API key and Cluster Agent token in the Datadog installation and application namespaces: ```shell From db6993ca7c7098bfd909a0a6d31d86294a4d90cf Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Mon, 16 Sep 2024 17:13:04 +0200 Subject: [PATCH 07/23] Remove use of 'six' package from most tests (and haproxy and istio) (#18593) * Remove use of 'six' package from most tests * fix redisdb test --- amazon_msk/tests/conftest.py | 2 +- clickhouse/tests/test_unit.py | 5 +-- consul/tests/consul_mocks.py | 4 +- datadog_checks_base/tests/test_metadata.py | 9 +---- datadog_checks_dev/tests/test_conditions.py | 2 +- disk/tests/test_unit.py | 13 +++---- dns_check/tests/mocks.py | 3 +- elastic/tests/test_integration.py | 7 ++-- esxi/tests/ssh_tunnel.py | 7 +--- go_expvar/tests/test_integration.py | 3 +- go_expvar/tests/test_unit.py | 5 +-- .../datadog_checks/haproxy/legacy/haproxy.py | 39 ++++++++----------- hdfs_datanode/tests/test_hdfs_datanode.py | 3 +- hdfs_namenode/tests/test_hdfs_namenode.py | 7 ++-- ibm_mq/tests/conftest.py | 1 - ibm_mq/tests/test_ibm_mq_int.py | 5 +-- ibm_mq/tests/test_ibm_mq_unit.py | 5 +-- istio/datadog_checks/istio/istio.py | 12 +----- kubelet/tests/test_kubelet.py | 3 +- lighttpd/tests/conftest.py | 4 +- mapr/tests/test_unit.py | 3 +- mapreduce/tests/test_unit.py | 25 ++++++------ mesos_master/tests/test_check.py | 7 ++-- mesos_master/tests/test_integration_e2e.py | 3 +- mesos_slave/tests/test_integration_e2e.py | 3 +- mesos_slave/tests/test_unit.py | 5 +-- network/tests/common.py | 17 +------- network/tests/test_ethtool.py | 17 ++++---- network/tests/test_linux.py | 16 ++++---- network/tests/test_network.py | 4 -- network/tests/test_windows.py | 14 +++---- openstack/tests/test_openstack.py | 3 +- openstack_controller/tests/ssh_tunnel.py | 7 +--- postfix/tests/test_e2e.py | 3 +- postfix/tests/test_integration.py | 3 +- postgres/tests/test_statements.py | 5 +-- postgres/tests/test_unit.py | 3 +- process/tests/test_process.py | 3 +- redisdb/tests/test_unit.py | 7 ++-- snmp/tests/common.py | 3 +- system_core/tests/test_system_core.py | 3 +- tls/tests/conftest.py | 5 +-- tls/tests/utils.py | 3 +- vault/tests/test_vault.py | 2 +- vsphere/tests/common.py | 2 +- vsphere/tests/mocked_api.py | 7 ++-- vsphere/tests/test_cache.py | 9 ++--- yarn/tests/conftest.py | 2 +- yarn/tests/test_yarn.py | 21 +++++----- zk/tests/conftest.py | 2 +- 50 files changed, 127 insertions(+), 219 deletions(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 5c3e6dc629c52..1298134d93e65 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -2,10 +2,10 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import json +from urllib.parse import urlparse import mock import pytest -from six.moves.urllib.parse import urlparse from datadog_checks.dev import docker_run from datadog_checks.dev.http import MockResponse diff --git a/clickhouse/tests/test_unit.py b/clickhouse/tests/test_unit.py index 8d191056e3e22..3734bfa6c04bc 100644 --- a/clickhouse/tests/test_unit.py +++ b/clickhouse/tests/test_unit.py @@ -4,7 +4,6 @@ import mock import pytest from clickhouse_driver.errors import Error, NetworkError -from six import PY3 from datadog_checks.clickhouse import ClickhouseCheck, queries @@ -65,9 +64,7 @@ def test_error_query(instance, dd_run_check): ids=['SystemMetrics', 'SystemEvents'], ) def test_latest_metrics_supported(metrics, ignored_columns, metric_source_url): - # While we're here, also check key order - if PY3: - assert list(metrics) == sorted(metrics) + assert list(metrics) == sorted(metrics) described_metrics = parse_described_metrics(metric_source_url) diff --git a/consul/tests/consul_mocks.py b/consul/tests/consul_mocks.py index e9f1ad98f813d..be6cd803f221b 100644 --- a/consul/tests/consul_mocks.py +++ b/consul/tests/consul_mocks.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import random -from six import iteritems - MOCK_CONFIG = {'url': 'http://localhost:8500', 'catalog_checks': True} MOCK_CONFIG_DISABLE_SERVICE_TAG = { 'url': 'http://localhost:8500', @@ -30,7 +28,7 @@ def mock_check(check, mocks): - for f_name, m in iteritems(mocks): + for f_name, m in mocks.items(): if not hasattr(check, f_name): continue else: diff --git a/datadog_checks_base/tests/test_metadata.py b/datadog_checks_base/tests/test_metadata.py index 727669d164fbb..f2de6cd886172 100644 --- a/datadog_checks_base/tests/test_metadata.py +++ b/datadog_checks_base/tests/test_metadata.py @@ -8,7 +8,6 @@ import mock import pytest -from six import PY3 from datadog_checks.base import AgentCheck, ensure_bytes, ensure_unicode @@ -57,12 +56,8 @@ class NewAgentCheck(AgentCheck): def test_encoding(self): check = AgentCheck('test', {}, [{}]) check.check_id = 'test:123' - if PY3: - constructor = ensure_bytes - finalizer = ensure_unicode - else: - constructor = ensure_unicode - finalizer = ensure_bytes + constructor = ensure_bytes + finalizer = ensure_unicode name = constructor(u'nam\u00E9') value = constructor(u'valu\u00E9') diff --git a/datadog_checks_dev/tests/test_conditions.py b/datadog_checks_dev/tests/test_conditions.py index cc32c46bfac6a..a4e96660e7fb3 100644 --- a/datadog_checks_dev/tests/test_conditions.py +++ b/datadog_checks_dev/tests/test_conditions.py @@ -3,9 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os import sys +from urllib.response import addinfourl import pytest -from six.moves.urllib.response import addinfourl from datadog_checks.dev.conditions import CheckCommandOutput, CheckDockerLogs, CheckEndpoints, WaitFor from datadog_checks.dev.errors import RetryError diff --git a/disk/tests/test_unit.py b/disk/tests/test_unit.py index fa92719850075..ed896a4d1672b 100644 --- a/disk/tests/test_unit.py +++ b/disk/tests/test_unit.py @@ -6,7 +6,6 @@ import mock import pytest -from six import iteritems from datadog_checks.base.utils.platform import Platform from datadog_checks.base.utils.timeout import TimeoutException @@ -65,10 +64,10 @@ def test_default(aggregator, gauge_metrics, rate_metrics, count_metrics, dd_run_ else: tags = [] - for name, value in iteritems(gauge_metrics): + for name, value in gauge_metrics.items(): aggregator.assert_metric(name, value=value, count=1, metric_type=aggregator.GAUGE, tags=tags) - for name, value in iteritems(rate_metrics): + for name, value in rate_metrics.items(): aggregator.assert_metric( name, value=value, @@ -77,7 +76,7 @@ def test_default(aggregator, gauge_metrics, rate_metrics, count_metrics, dd_run_ tags=['device:{}'.format(DEFAULT_DEVICE_NAME), 'device_name:{}'.format(DEFAULT_DEVICE_BASE_NAME)], ) - for name, value in iteritems(count_metrics): + for name, value in count_metrics.items(): aggregator.assert_metric( name, value=value, @@ -110,14 +109,14 @@ def test_use_mount(aggregator, instance_basic_mount, gauge_metrics, rate_metrics c = Disk('disk', {}, [instance_basic_mount]) dd_run_check(c) - for name, value in iteritems(gauge_metrics): + for name, value in gauge_metrics.items(): aggregator.assert_metric( name, value=value, tags=['device:{}'.format(DEFAULT_MOUNT_POINT), 'device_name:{}'.format(DEFAULT_DEVICE_BASE_NAME)], ) - for name, value in chain(iteritems(rate_metrics), iteritems(count_metrics)): + for name, value in chain(rate_metrics.items(), count_metrics.items()): aggregator.assert_metric( name, value=value, @@ -155,7 +154,7 @@ def test_device_tagging(aggregator, gauge_metrics, rate_metrics, count_metrics, 'device_label:mylab', ] - for name, value in chain(iteritems(gauge_metrics), iteritems(rate_metrics), iteritems(count_metrics)): + for name, value in chain(gauge_metrics.items(), rate_metrics.items(), count_metrics.items()): aggregator.assert_metric( name, value=value, diff --git a/dns_check/tests/mocks.py b/dns_check/tests/mocks.py index e4f9974fb4ceb..4d33ed376366f 100644 --- a/dns_check/tests/mocks.py +++ b/dns_check/tests/mocks.py @@ -3,7 +3,6 @@ # Licensed under Simplified BSD License (see LICENSE) from dns.resolver import NXDOMAIN -from six import PY3 class MockDNSAnswer: @@ -18,7 +17,7 @@ def __init__(self, address): else: items = [MockDNSAnswer.MockItem(address)] - self.items = {item: None for item in items} if PY3 else items + self.items = {item: None for item in items} class MockItem: def __init__(self, address): diff --git a/elastic/tests/test_integration.py b/elastic/tests/test_integration.py index c02bfb0c91e4c..77e7bba3c7263 100644 --- a/elastic/tests/test_integration.py +++ b/elastic/tests/test_integration.py @@ -6,7 +6,6 @@ import pytest import requests from packaging import version -from six import iteritems from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.elastic import ESCheck @@ -276,7 +275,7 @@ def test_node_name_as_host(dd_environment, instance_normalize_hostname, aggregat elastic_check.check(None) node_name = node_tags[-1].split(':')[1] - for m_name, _ in iteritems(STATS_METRICS): + for m_name in STATS_METRICS: aggregator.assert_metric(m_name, count=1, tags=node_tags, hostname=node_name) @@ -288,7 +287,7 @@ def test_pshard_metrics(dd_environment, aggregator): elastic_check.check(None) pshard_stats_metrics = pshard_stats_for_version(es_version) - for m_name, desc in iteritems(pshard_stats_metrics): + for m_name, desc in pshard_stats_metrics.items(): if desc[0] == 'gauge': aggregator.assert_metric(m_name) @@ -310,7 +309,7 @@ def test_detailed_index_stats(dd_environment, aggregator): es_version = elastic_check._get_es_version() elastic_check.check(None) pshard_stats_metrics = pshard_stats_for_version(es_version) - for m_name, desc in iteritems(pshard_stats_metrics): + for m_name, desc in pshard_stats_metrics.items(): if desc[0] == 'gauge' and desc[1].startswith('_all.'): aggregator.assert_metric(m_name) diff --git a/esxi/tests/ssh_tunnel.py b/esxi/tests/ssh_tunnel.py index 75fbab232fa66..e456444087fd0 100644 --- a/esxi/tests/ssh_tunnel.py +++ b/esxi/tests/ssh_tunnel.py @@ -4,21 +4,16 @@ from __future__ import absolute_import import os +import subprocess from contextlib import contextmanager import psutil -from six import PY3 from datadog_checks.dev.conditions import WaitForPortListening from datadog_checks.dev.env import environment_run from datadog_checks.dev.structures import LazyFunction, TempDir from datadog_checks.dev.utils import ON_WINDOWS, find_free_port, get_ip -if PY3: - import subprocess -else: - import subprocess32 as subprocess - PID_FILE = 'ssh.pid' diff --git a/go_expvar/tests/test_integration.py b/go_expvar/tests/test_integration.py index 19dbd7ae95b96..d1ba30869feb8 100644 --- a/go_expvar/tests/test_integration.py +++ b/go_expvar/tests/test_integration.py @@ -5,7 +5,6 @@ import logging import pytest -from six import iteritems from . import common @@ -25,7 +24,7 @@ def test_go_expvar(check, aggregator): aggregator.assert_metric(gauge, count=1, tags=shared_tags) for rate in common.CHECK_RATES: aggregator.assert_metric(rate, count=1, tags=shared_tags) - for rate, value in iteritems(CHECK_RATES_CUSTOM): + for rate, value in CHECK_RATES_CUSTOM.items(): aggregator.assert_metric(rate, count=1, value=value, tags=shared_tags) for count in common.CHECK_COUNT: aggregator.assert_metric(count, count=1, metric_type=3, tags=shared_tags) diff --git a/go_expvar/tests/test_unit.py b/go_expvar/tests/test_unit.py index d235da0b9aade..12047947dd59d 100644 --- a/go_expvar/tests/test_unit.py +++ b/go_expvar/tests/test_unit.py @@ -6,7 +6,6 @@ import logging import pytest -from six import iteritems from . import common @@ -83,7 +82,7 @@ def test_go_expvar_mocked(go_expvar_mock, check, aggregator): aggregator.assert_metric( gauge.format(common.CHECK_NAME), metric_type=aggregator.GAUGE, count=1, tags=shared_tags ) - for gauge, tags in iteritems(CHECK_GAUGES_CUSTOM_MOCK): + for gauge, tags in CHECK_GAUGES_CUSTOM_MOCK.items(): aggregator.assert_metric( gauge.format(common.CHECK_NAME), metric_type=aggregator.GAUGE, count=1, tags=shared_tags + tags ) @@ -145,7 +144,7 @@ def test_go_expvar_mocked_namespace(go_expvar_mock, check, aggregator): for gauge in CHECK_GAUGES: aggregator.assert_metric(gauge.format(metric_namespace), count=1, tags=shared_tags) - for gauge, tags in iteritems(CHECK_GAUGES_CUSTOM_MOCK): + for gauge, tags in CHECK_GAUGES_CUSTOM_MOCK.items(): aggregator.assert_metric(gauge.format(metric_namespace), count=1, tags=shared_tags + tags) for rate in CHECK_RATES: diff --git a/haproxy/datadog_checks/haproxy/legacy/haproxy.py b/haproxy/datadog_checks/haproxy/legacy/haproxy.py index 96c7a000cbdd3..f34bd9de9dd90 100644 --- a/haproxy/datadog_checks/haproxy/legacy/haproxy.py +++ b/haproxy/datadog_checks/haproxy/legacy/haproxy.py @@ -9,9 +9,7 @@ import socket import time from collections import defaultdict, namedtuple - -from six import PY2, iteritems -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import AgentCheck, is_affirmative, to_string from datadog_checks.base.errors import CheckException @@ -115,20 +113,15 @@ def _fetch_url_data(self): @staticmethod def _decode_response(response): - # it only needs additional decoding in py3, so skip it if it's py2 - if PY2: - return response.content.splitlines() - else: - content = response.content - - # If the content is a string, it can't be decoded again - # But if it's bytes, it can be decoded. - # So, check if it has the decode method - decode_fn = getattr(content, "decode", None) - if callable(decode_fn): - content = content.decode('utf-8') + content = response.content + # If the content is a string, it can't be decoded again + # But if it's bytes, it can be decoded. + # So, check if it has the decode method + decode_fn = getattr(content, "decode", None) + if callable(decode_fn): + content = content.decode('utf-8') - return content.splitlines() + return content.splitlines() @staticmethod def _parse_uptime(uptime): @@ -443,7 +436,7 @@ def _tag_from_regex(self, service_name): # match.groupdict() returns tags dictionary in the form of {'name': 'value'} # convert it to Datadog tag LIST: ['name:value'] - return ["%s:%s" % (name, value) for name, value in iteritems(match.groupdict())] + return ["%s:%s" % (name, value) for name, value in match.groupdict().items()] @staticmethod def _normalize_status(status): @@ -463,7 +456,7 @@ def _process_backend_hosts_metric(self, active_tag=None): agg_statuses = defaultdict(lambda: {status: 0 for status in Services.COLLATED_STATUSES}) active_tag = [] if active_tag is None else active_tag - for host_status, count in iteritems(self.hosts_statuses): + for host_status, count in self.hosts_statuses.items(): try: service, back_or_front, hostname, status = host_status except ValueError: @@ -512,7 +505,7 @@ def _process_status_metric( reported_statuses_dict[reported_status] = 0 statuses_counter = defaultdict(lambda: copy.copy(reported_statuses_dict)) - for host_status, count in iteritems(self.hosts_statuses): + for host_status, count in self.hosts_statuses.items(): hostname = None try: service, _, hostname, status = host_status @@ -555,13 +548,13 @@ def _process_status_metric( status_key = Services.STATUS_TO_COLLATED.get(status, Services.UNAVAILABLE) agg_statuses_counter[tuple(agg_tags)][status_key] += count - for tags, count_per_status in iteritems(statuses_counter): - for status, count in iteritems(count_per_status): + for tags, count_per_status in statuses_counter.items(): + for status, count in count_per_status.items(): self.gauge('haproxy.count_per_status', count, tags=tags + ('status:%s' % status,)) # Send aggregates - for service_tags, service_agg_statuses in iteritems(agg_statuses_counter): - for status, count in iteritems(service_agg_statuses): + for service_tags, service_agg_statuses in agg_statuses_counter.items(): + for status, count in service_agg_statuses.items(): self.gauge("haproxy.count_per_status", count, tags=service_tags + ('status:%s' % status,)) def _process_metrics(self, data, custom_tags=None, active_tag=None): diff --git a/hdfs_datanode/tests/test_hdfs_datanode.py b/hdfs_datanode/tests/test_hdfs_datanode.py index 7bc755316619b..a444f95c9fa45 100644 --- a/hdfs_datanode/tests/test_hdfs_datanode.py +++ b/hdfs_datanode/tests/test_hdfs_datanode.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import mock import pytest -from six import iteritems from datadog_checks.hdfs_datanode import HDFSDataNode @@ -38,7 +37,7 @@ def test_check(aggregator, mocked_request): HDFSDataNode.JMX_SERVICE_CHECK, status=HDFSDataNode.OK, tags=HDFS_DATANODE_METRIC_TAGS + CUSTOM_TAGS, count=1 ) - for metric, value in iteritems(HDFS_DATANODE_METRICS_VALUES): + for metric, value in HDFS_DATANODE_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=HDFS_DATANODE_METRIC_TAGS + CUSTOM_TAGS, count=1) diff --git a/hdfs_namenode/tests/test_hdfs_namenode.py b/hdfs_namenode/tests/test_hdfs_namenode.py index 0112dabc49222..ade519bbe47f4 100644 --- a/hdfs_namenode/tests/test_hdfs_namenode.py +++ b/hdfs_namenode/tests/test_hdfs_namenode.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import mock import pytest -from six import iteritems from datadog_checks.hdfs_namenode import HDFSNameNode @@ -34,13 +33,13 @@ def test_check(aggregator, dd_run_check, mocked_request): HDFSNameNode.JMX_SERVICE_CHECK, HDFSNameNode.OK, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=1 ) - for metric, value in iteritems(HDFS_NAMESYSTEM_STATE_METRICS_VALUES): + for metric, value in HDFS_NAMESYSTEM_STATE_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=1) - for metric, value in iteritems(HDFS_NAMESYSTEM_METRICS_VALUES): + for metric, value in HDFS_NAMESYSTEM_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=1) - for metric, value in iteritems(HDFS_NAMESYSTEM_MUTUAL_METRICS_VALUES): + for metric, value in HDFS_NAMESYSTEM_MUTUAL_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=HDFS_NAMESYSTEM_METRIC_TAGS + CUSTOM_TAGS, count=2) aggregator.assert_all_metrics_covered() diff --git a/ibm_mq/tests/conftest.py b/ibm_mq/tests/conftest.py index 8e9936f47a48e..824795fc3e70b 100644 --- a/ibm_mq/tests/conftest.py +++ b/ibm_mq/tests/conftest.py @@ -7,7 +7,6 @@ import re import pytest -from six.moves import range from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, WaitFor diff --git a/ibm_mq/tests/test_ibm_mq_int.py b/ibm_mq/tests/test_ibm_mq_int.py index 0892df56c511e..72c2655eebc8e 100644 --- a/ibm_mq/tests/test_ibm_mq_int.py +++ b/ibm_mq/tests/test_ibm_mq_int.py @@ -7,7 +7,6 @@ import mock import pytest -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.utils.time import ensure_aware_datetime @@ -280,7 +279,7 @@ def test_check_channel_count(aggregator, get_check, instance_queue_regex_tag, se 'my_channel', pymqi.CMQCFC.MQCHS_RUNNING, ["channel:my_channel"] ) - for status, expected_value in iteritems(metrics_to_assert): + for status, expected_value in metrics_to_assert.items(): aggregator.assert_metric( 'ibm_mq.channel.count', expected_value, tags=["channel:my_channel", "status:" + status] ) @@ -305,7 +304,7 @@ def test_check_channel_count_status_unknown(aggregator, get_check, instance_queu check = get_check(instance_queue_regex_tag) check.channel_metric_collector._submit_channel_count('my_channel', 123, ["channel:my_channel"]) - for status, expected_value in iteritems(metrics_to_assert): + for status, expected_value in metrics_to_assert.items(): aggregator.assert_metric( 'ibm_mq.channel.count', expected_value, tags=["channel:my_channel", "status:" + status] ) diff --git a/ibm_mq/tests/test_ibm_mq_unit.py b/ibm_mq/tests/test_ibm_mq_unit.py index 7a62d19c199d2..ac9f4ef87437c 100644 --- a/ibm_mq/tests/test_ibm_mq_unit.py +++ b/ibm_mq/tests/test_ibm_mq_unit.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import mock import pytest -from six import iteritems from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.dev.testing import requires_py3 @@ -37,7 +36,7 @@ def test_channel_status_service_check_default_mapping(aggregator, get_check, ins 'my_channel', status, ["channel:my_channel_{}".format(status)] ) - for status, service_check_status in iteritems(service_check_map): + for status, service_check_status in service_check_map.items(): aggregator.assert_service_check( 'ibm_mq.channel.status', service_check_status, tags=["channel:my_channel_{}".format(status)] ) @@ -80,7 +79,7 @@ def test_channel_status_service_check_custom_mapping(aggregator, get_check, inst 'my_channel', status, ["channel:my_channel_{}".format(status)] ) - for status, service_check_status in iteritems(service_check_map): + for status, service_check_status in service_check_map.items(): aggregator.assert_service_check( 'ibm_mq.channel.status', service_check_status, tags=["channel:my_channel_{}".format(status)] ) diff --git a/istio/datadog_checks/istio/istio.py b/istio/datadog_checks/istio/istio.py index 26528d51fd0d6..5bccdbf84fc0e 100644 --- a/istio/datadog_checks/istio/istio.py +++ b/istio/datadog_checks/istio/istio.py @@ -1,10 +1,9 @@ # (C) Datadog, Inc. 2018-Present # All rights reserved # Licensed under Simplified BSD License (see LICENSE) -from six import PY2 - from datadog_checks.base import ConfigurationError, OpenMetricsBaseCheck, is_affirmative +from .check import IstioCheckV2 from .constants import BLACKLIST_LABELS from .legacy_1_4 import LegacyIstioCheck_1_4 from .metrics import ISTIOD_METRICS @@ -46,15 +45,6 @@ def __new__(cls, name, init_config, instances): instance = instances[0] if is_affirmative(instance.get('use_openmetrics', False)): - if PY2: - raise ConfigurationError( - "Openmetrics on this integration is only available when using py3. " - "Check https://docs.datadoghq.com/agent/guide/agent-v6-python-3 " - "for more information" - ) - # TODO: when we drop Python 2 move this import up top - from .check import IstioCheckV2 - return IstioCheckV2(name, init_config, instances) else: if instance.get('istiod_endpoint'): diff --git a/kubelet/tests/test_kubelet.py b/kubelet/tests/test_kubelet.py index fcc6c88b78271..4328df2671443 100644 --- a/kubelet/tests/test_kubelet.py +++ b/kubelet/tests/test_kubelet.py @@ -11,7 +11,6 @@ import pytest import requests import requests_mock -from six import iteritems from datadog_checks.base.checks.kubelet_base.base import KubeletCredentials from datadog_checks.base.errors import SkipInstanceError @@ -466,7 +465,7 @@ def test_bad_config(): def test_parse_quantity(): - for raw, res in iteritems(QUANTITIES): + for raw, res in QUANTITIES.items(): assert KubeletCheck.parse_quantity(raw) == res diff --git a/lighttpd/tests/conftest.py b/lighttpd/tests/conftest.py index 0338a98b7ea33..bf086334e83fe 100644 --- a/lighttpd/tests/conftest.py +++ b/lighttpd/tests/conftest.py @@ -2,10 +2,10 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from copy import deepcopy +from urllib import error +from urllib.request import urlopen import pytest -from six.moves.urllib import error -from six.moves.urllib.request import urlopen from datadog_checks.dev import WaitFor, docker_run from datadog_checks.lighttpd import Lighttpd diff --git a/mapr/tests/test_unit.py b/mapr/tests/test_unit.py index a4595c6d03416..08364463296ff 100644 --- a/mapr/tests/test_unit.py +++ b/mapr/tests/test_unit.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest -from six import iteritems from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.mapr import MaprCheck @@ -29,7 +28,7 @@ def test_metrics_constants(): @pytest.mark.unit def test_get_stream_id(): - for (text, rng), value in iteritems(STREAM_ID_FIXTURE): + for (text, rng), value in STREAM_ID_FIXTURE.items(): assert get_stream_id_for_topic(text, rng=rng) == value diff --git a/mapreduce/tests/test_unit.py b/mapreduce/tests/test_unit.py index 7f701b8c683f1..5d7c7b333086c 100644 --- a/mapreduce/tests/test_unit.py +++ b/mapreduce/tests/test_unit.py @@ -1,7 +1,6 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six import iteritems from datadog_checks.mapreduce import MapReduceCheck @@ -41,19 +40,19 @@ def test_check(aggregator, dd_run_check, mocked_request): expected_tags = COMMON_TAGS + CLUSTER_TAGS # Check the MapReduce job metrics - for metric, value in iteritems(MAPREDUCE_JOB_METRIC_VALUES): + for metric, value in MAPREDUCE_JOB_METRIC_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=expected_tags, count=1) # Check the map task metrics - for metric, value in iteritems(MAPREDUCE_MAP_TASK_METRIC_VALUES): + for metric, value in MAPREDUCE_MAP_TASK_METRIC_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_MAP_TASK_METRIC_TAGS + expected_tags, count=1) # Check the reduce task metrics - for metric, value in iteritems(MAPREDUCE_REDUCE_TASK_METRIC_VALUES): + for metric, value in MAPREDUCE_REDUCE_TASK_METRIC_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_REDUCE_TASK_METRIC_TAGS + expected_tags, count=1) # Check the MapReduce job counter metrics - for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_READ): + for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_READ.items(): aggregator.assert_metric( metric, value=attributes["value"], @@ -62,7 +61,7 @@ def test_check(aggregator, dd_run_check, mocked_request): ) # Check the MapReduce job counter metrics - for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_WRITTEN): + for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_WRITTEN.items(): aggregator.assert_metric( metric, value=attributes["value"], @@ -71,7 +70,7 @@ def test_check(aggregator, dd_run_check, mocked_request): ) # Check the MapReduce job counter metrics - for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_RECORDS): + for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_RECORDS.items(): aggregator.assert_metric( metric, value=attributes["value"], @@ -131,19 +130,19 @@ def test_disable_legacy_cluster_tag(aggregator, dd_run_check, mocked_request): expected_tags.append(MAPREDUCE_CLUSTER_TAG) # Check the MapReduce job metrics - for metric, value in iteritems(MAPREDUCE_JOB_METRIC_VALUES): + for metric, value in MAPREDUCE_JOB_METRIC_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=expected_tags, count=1) # Check the map task metrics - for metric, value in iteritems(MAPREDUCE_MAP_TASK_METRIC_VALUES): + for metric, value in MAPREDUCE_MAP_TASK_METRIC_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_MAP_TASK_METRIC_TAGS + expected_tags, count=1) # Check the reduce task metrics - for metric, value in iteritems(MAPREDUCE_REDUCE_TASK_METRIC_VALUES): + for metric, value in MAPREDUCE_REDUCE_TASK_METRIC_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=MAPREDUCE_REDUCE_TASK_METRIC_TAGS + expected_tags, count=1) # Check the MapReduce job counter metrics - for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_READ): + for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_READ.items(): aggregator.assert_metric( metric, value=attributes["value"], @@ -152,7 +151,7 @@ def test_disable_legacy_cluster_tag(aggregator, dd_run_check, mocked_request): ) # Check the MapReduce job counter metrics - for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_WRITTEN): + for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_WRITTEN.items(): aggregator.assert_metric( metric, value=attributes["value"], @@ -161,7 +160,7 @@ def test_disable_legacy_cluster_tag(aggregator, dd_run_check, mocked_request): ) # Check the MapReduce job counter metrics - for metric, attributes in iteritems(MAPREDUCE_JOB_COUNTER_METRIC_VALUES_RECORDS): + for metric, attributes in MAPREDUCE_JOB_COUNTER_METRIC_VALUES_RECORDS.items(): aggregator.assert_metric( metric, value=attributes["value"], diff --git a/mesos_master/tests/test_check.py b/mesos_master/tests/test_check.py index 4707201f31fd2..721acce7a3613 100644 --- a/mesos_master/tests/test_check.py +++ b/mesos_master/tests/test_check.py @@ -4,7 +4,6 @@ import mock import pytest import requests -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException @@ -26,11 +25,11 @@ def test_check(check, instance, aggregator): ): metrics.update(d) - for _, v in iteritems(check.FRAMEWORK_METRICS): + for v in check.FRAMEWORK_METRICS.values(): aggregator.assert_metric(v[0]) - for _, v in iteritems(metrics): + for v in metrics.values(): aggregator.assert_metric(v[0]) - for _, v in iteritems(check.ROLE_RESOURCES_METRICS): + for v in check.ROLE_RESOURCES_METRICS.values(): aggregator.assert_metric(v[0]) aggregator.assert_metric('mesos.cluster.total_frameworks') diff --git a/mesos_master/tests/test_integration_e2e.py b/mesos_master/tests/test_integration_e2e.py index 88334b1a16ebe..482d985e8f0ed 100644 --- a/mesos_master/tests/test_integration_e2e.py +++ b/mesos_master/tests/test_integration_e2e.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest -from six import iteritems from datadog_checks.mesos_master import MesosMaster @@ -37,7 +36,7 @@ def assert_metric_coverage(aggregator): check.CLUSTER_FRAMEWORK_METRICS, check.STATS_METRICS, ): - for _, m in iteritems(d): + for m in d.values(): metrics.append(m[0]) for m in metrics: diff --git a/mesos_slave/tests/test_integration_e2e.py b/mesos_slave/tests/test_integration_e2e.py index 43ca56c798f27..564d06f099103 100644 --- a/mesos_slave/tests/test_integration_e2e.py +++ b/mesos_slave/tests/test_integration_e2e.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.mesos_slave import MesosSlave @@ -41,7 +40,7 @@ def assert_metrics_covered(aggregator): expected_tags = ["instance:mytag1", "url:{}/metrics/snapshot".format(URL), "mesos_node:slave"] - for _, v in iteritems(metrics): + for v in metrics.values(): aggregator.assert_metric(v[0]) for tag in expected_tags: aggregator.assert_metric_has_tag(v[0], tag) diff --git a/mesos_slave/tests/test_unit.py b/mesos_slave/tests/test_unit.py index 9a62af2830111..0cc7b699b6d96 100644 --- a/mesos_slave/tests/test_unit.py +++ b/mesos_slave/tests/test_unit.py @@ -5,7 +5,6 @@ import mock import pytest -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.mesos_slave import MesosSlave @@ -26,9 +25,9 @@ def test_fixtures(check, instance, aggregator): ): metrics.update(d) - for _, v in iteritems(check.TASK_METRICS): + for v in check.TASK_METRICS.values(): aggregator.assert_metric(v[0]) - for _, v in iteritems(metrics): + for v in metrics.values(): aggregator.assert_metric(v[0]) service_check_tags = [ diff --git a/network/tests/common.py b/network/tests/common.py index 812e06cedf64d..eeffd58363045 100644 --- a/network/tests/common.py +++ b/network/tests/common.py @@ -3,8 +3,6 @@ # Licensed under Simplified BSD License (see LICENSE) import os -from six import PY3 - from datadog_checks.dev import get_here HERE = get_here() @@ -159,16 +157,5 @@ } -if PY3: - long = int - ESCAPE_ENCODING = 'unicode-escape' - - def decode_string(s): - return s.decode(ESCAPE_ENCODING) - -else: - ESCAPE_ENCODING = 'string-escape' - - def decode_string(s): - s.decode(ESCAPE_ENCODING) - return s.decode("utf-8") +def decode_string(s): + return s.decode('unicode-escape') diff --git a/network/tests/test_ethtool.py b/network/tests/test_ethtool.py index c9d314cc0ea91..3b5e8b5a446bb 100644 --- a/network/tests/test_ethtool.py +++ b/network/tests/test_ethtool.py @@ -9,7 +9,6 @@ import mock import pytest -from six import PY3, iteritems from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.network import ethtool @@ -445,9 +444,9 @@ def send_ethtool_ioctl_mock(iface, sckt, data): for input, result in common.ETHTOOL_IOCTL_INPUTS_OUTPUTS.items(): - if input == (iface, data.tobytes() if PY3 else data.tostring()): + if input == (iface, data.tobytes()): data[:] = array.array('B', []) - data.frombytes(result) if PY3 else data.fromstring(result) + data.frombytes(result) return raise ValueError("Couldn't match any iface/data combination in the test data") @@ -558,8 +557,8 @@ def test_submit_ena_ethtool_metrics(is_linux, is_bsd, send_ethtool_ioctl, check, send_ethtool_ioctl.side_effect = send_ethtool_ioctl_mock check_instance._handle_ethtool_stats('eth0', []) - for tag, metrics in iteritems(ENA_ETHTOOL_VALUES): - for metric_suffix, value in iteritems(metrics): + for tag, metrics in ENA_ETHTOOL_VALUES.items(): + for metric_suffix, value in metrics.items(): aggregator.assert_metric( 'system.net.' + metric_suffix, count=1, @@ -581,8 +580,8 @@ def test_submit_hv_netvsc_ethtool_metrics(is_linux, is_bsd, send_ethtool_ioctl, send_ethtool_ioctl.side_effect = send_ethtool_ioctl_mock check_instance._handle_ethtool_stats('hv_netvsc', []) - for tag, metrics in iteritems(HV_NETVSC_ETHTOOL_VALUES): - for metric_suffix, value in iteritems(metrics): + for tag, metrics in HV_NETVSC_ETHTOOL_VALUES.items(): + for metric_suffix, value in metrics.items(): aggregator.assert_metric( 'system.net.' + metric_suffix, count=1, @@ -604,8 +603,8 @@ def test_submit_gve_ethtool_metrics(is_linux, is_bsd, send_ethtool_ioctl, check, send_ethtool_ioctl.side_effect = send_ethtool_ioctl_mock check_instance._handle_ethtool_stats('gve', []) - for tag, metrics in iteritems(GVE_ETHTOOL_VALUES): - for metric_suffix, value in iteritems(metrics): + for tag, metrics in GVE_ETHTOOL_VALUES.items(): + for metric_suffix, value in metrics.items(): aggregator.assert_metric( 'system.net.' + metric_suffix, count=1, diff --git a/network/tests/test_linux.py b/network/tests/test_linux.py index aec259dfb9a98..7ebd5e148daf6 100644 --- a/network/tests/test_linux.py +++ b/network/tests/test_linux.py @@ -7,7 +7,6 @@ import mock import pytest -from six import PY3, iteritems from datadog_checks.base.utils.platform import Platform from datadog_checks.base.utils.subprocess_output import get_subprocess_output @@ -221,13 +220,13 @@ def test_cx_state(aggregator): with mock.patch('datadog_checks.network.check_linux.get_subprocess_output') as out: out.side_effect = ss_subprocess_mock check_instance.check(instance) - for metric, value in iteritems(CX_STATE_GAUGES_VALUES): + for metric, value in CX_STATE_GAUGES_VALUES.items(): aggregator.assert_metric(metric, value=value) aggregator.reset() out.side_effect = netstat_subprocess_mock check_instance.check(instance) - for metric, value in iteritems(CX_STATE_GAUGES_VALUES): + for metric, value in CX_STATE_GAUGES_VALUES.items(): aggregator.assert_metric(metric, value=value) aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True) @@ -242,7 +241,7 @@ def test_linux_sys_net(listdir, read_int_file, aggregator): check_instance.check({}) - for metric, value in iteritems(LINUX_SYS_NET_STATS): + for metric, value in LINUX_SYS_NET_STATS.items(): aggregator.assert_metric(metric, value=value[0], tags=['iface:lo']) aggregator.assert_metric(metric, value=value[1], tags=['iface:ens5']) @@ -259,13 +258,13 @@ def test_cx_state_mocked(aggregator): check_instance.get_net_proc_base_location = lambda x: FIXTURE_DIR check_instance.check({}) - for metric, value in iteritems(CX_STATE_GAUGES_VALUES): + for metric, value in CX_STATE_GAUGES_VALUES.items(): aggregator.assert_metric(metric, value=value) aggregator.reset() out.side_effect = netstat_subprocess_mock check_instance.check({}) - for metric, value in iteritems(CX_STATE_GAUGES_VALUES): + for metric, value in CX_STATE_GAUGES_VALUES.items(): aggregator.assert_metric(metric, value=value) aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True) @@ -283,14 +282,13 @@ def test_add_conntrack_stats_metrics(aggregator): subprocess.return_value = mocked_conntrack_stats, None, None check_instance._add_conntrack_stats_metrics(None, None, ['foo:bar']) - for metric, value in iteritems(CONNTRACK_STATS): + for metric, value in CONNTRACK_STATS.items(): aggregator.assert_metric(metric, value=value[0], tags=['foo:bar', 'cpu:0']) aggregator.assert_metric(metric, value=value[1], tags=['foo:bar', 'cpu:1']) aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True) -@pytest.mark.skipif(not PY3, reason="mock builtins only works on Python 3") def test_proc_permissions_error(aggregator, caplog): instance = copy.deepcopy(common.INSTANCE) instance['collect_connection_state'] = False @@ -330,7 +328,7 @@ def test_proc_net_metrics(aggregator): check_instance.get_net_proc_base_location = lambda x: FIXTURE_DIR check_instance.check({}) - for metric, value in iteritems(PROC_NET_STATS): + for metric, value in PROC_NET_STATS.items(): aggregator.assert_metric(metric, value=value) aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True) diff --git a/network/tests/test_network.py b/network/tests/test_network.py index 5061413fe763c..c27fe30b3171d 100644 --- a/network/tests/test_network.py +++ b/network/tests/test_network.py @@ -5,15 +5,11 @@ import mock import pytest -from six import PY3 from datadog_checks.dev import EnvVars from . import common -if PY3: - long = int - @pytest.mark.parametrize( "proc_location, envs, expected_net_proc_base_location", diff --git a/network/tests/test_windows.py b/network/tests/test_windows.py index 8762ac25cc66e..88c448c99ab25 100644 --- a/network/tests/test_windows.py +++ b/network/tests/test_windows.py @@ -16,15 +16,11 @@ from collections import namedtuple import mock -from six import PY3, iteritems from datadog_checks.network.check_windows import TCPSTATS, WindowsNetwork from . import common -if PY3: - long = int - @mock.patch('datadog_checks.network.network.Platform.is_linux', return_value=False) @mock.patch('datadog_checks.network.network.Platform.is_bsd', return_value=False) @@ -107,7 +103,7 @@ def test_get_tcp_stats(aggregator): with mock.patch('datadog_checks.network.check_windows.WindowsNetwork._get_tcp_stats') as mock_get_tcp_stats: mock_get_tcp_stats.return_value = mock_stats # Make _get_tcp_stats return my mock object check_instance.check({}) - for name, value in iteritems(expected_mets): + for name, value in expected_mets.items(): aggregator.assert_metric(name, value=value) @@ -230,7 +226,7 @@ def test_cx_state_psutil(aggregator): mock_psutil.net_connections.return_value = conn check_instance._setup_metrics({}) check_instance._cx_state_psutil() - for _, m in iteritems(aggregator._metrics): + for m in aggregator._metrics.values(): assert results[m[0].name] == m[0].value @@ -240,8 +236,8 @@ def test_cx_counters_psutil(aggregator): ) counters = { 'Ethernet': snetio( - bytes_sent=long(3096403230), - bytes_recv=long(3280598526), + bytes_sent=int(3096403230), + bytes_recv=int(3280598526), packets_sent=6777924, packets_recv=32888147, errin=0, @@ -262,7 +258,7 @@ def test_cx_counters_psutil(aggregator): with mock.patch('datadog_checks.network.check_windows.psutil') as mock_psutil: mock_psutil.net_io_counters.return_value = counters check_instance._cx_counters_psutil() - for _, m in iteritems(aggregator._metrics): + for m in aggregator._metrics.values(): assert 'device:Ethernet' in m[0].tags if 'bytes_rcvd' in m[0].name: assert m[0].value == 3280598526 diff --git a/openstack/tests/test_openstack.py b/openstack/tests/test_openstack.py index b25c5d49cc2a2..b44a1dea155b4 100644 --- a/openstack/tests/test_openstack.py +++ b/openstack/tests/test_openstack.py @@ -7,7 +7,6 @@ import mock import pytest -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.dev.testing import requires_py3 @@ -139,7 +138,7 @@ def test_unscoped_from_config(): assert scope.auth_token == 'fake_token' assert len(scope.project_scope_map) == 1 - for _, project_scope in iteritems(scope.project_scope_map): + for project_scope in scope.project_scope_map.values(): assert isinstance(project_scope, OpenStackProjectScope) assert project_scope.auth_token == 'fake_token' assert project_scope.tenant_id == '263fd9' diff --git a/openstack_controller/tests/ssh_tunnel.py b/openstack_controller/tests/ssh_tunnel.py index 1f9b3a0f359c0..38639a1bc8163 100644 --- a/openstack_controller/tests/ssh_tunnel.py +++ b/openstack_controller/tests/ssh_tunnel.py @@ -4,21 +4,16 @@ from __future__ import absolute_import import os +import subprocess from contextlib import contextmanager import psutil -from six import PY3 from datadog_checks.dev.conditions import WaitForPortListening from datadog_checks.dev.env import environment_run from datadog_checks.dev.structures import LazyFunction, TempDir from datadog_checks.dev.utils import ON_WINDOWS, find_free_port, get_ip -if PY3: - import subprocess -else: - import subprocess32 as subprocess - PID_FILE = 'ssh.pid' diff --git a/postfix/tests/test_e2e.py b/postfix/tests/test_e2e.py index 3f0aa9e94b7e9..0d890175895ce 100644 --- a/postfix/tests/test_e2e.py +++ b/postfix/tests/test_e2e.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest -from six import iteritems from .common import get_e2e_instance, get_e2e_instance_postqueue, get_queue_counts @@ -11,7 +10,7 @@ def test_check_default(dd_agent_check): aggregator = dd_agent_check(get_e2e_instance()) - for queue, count in iteritems(get_queue_counts()): + for queue, count in get_queue_counts().items(): tags = ['instance:postfix_data', 'queue:{}'.format(queue)] aggregator.assert_metric('postfix.queue.size', value=count[0], tags=tags) diff --git a/postfix/tests/test_integration.py b/postfix/tests/test_integration.py index 8e808df1036a3..a797ad3a34fd9 100644 --- a/postfix/tests/test_integration.py +++ b/postfix/tests/test_integration.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest -from six import iteritems from datadog_checks.postfix import PostfixCheck @@ -15,6 +14,6 @@ def test_check(aggregator): check = PostfixCheck('postfix', {}, [instance]) check.check(instance) - for queue, count in iteritems(get_queue_counts()): + for queue, count in get_queue_counts().items(): tags = ['instance:postfix_data', 'queue:{}'.format(queue)] aggregator.assert_metric('postfix.queue.size', value=count[0], tags=tags) diff --git a/postgres/tests/test_statements.py b/postgres/tests/test_statements.py index 66c5c774a5cb8..f486cbf57f3ba 100644 --- a/postgres/tests/test_statements.py +++ b/postgres/tests/test_statements.py @@ -13,7 +13,6 @@ import pytest from dateutil import parser from semver import VersionInfo -from six import string_types from datadog_checks.base.utils.db.sql import compute_sql_signature from datadog_checks.base.utils.db.utils import DBMAsyncJob @@ -798,7 +797,7 @@ def test_statement_samples_collect( dbm_samples = aggregator.get_event_platform_events("dbm-samples") - expected_query = query % ('\'' + arg + '\'' if isinstance(arg, string_types) else arg) + expected_query = query % ('\'' + arg + '\'' if isinstance(arg, str) else arg) # Find matching events by checking if the expected query starts with the event statement. Using this # instead of a direct equality check covers cases of truncated statements @@ -1479,7 +1478,7 @@ def test_statement_samples_dbstrict(aggregator, integration_check, dbm_instance, dbm_samples = aggregator.get_event_platform_events("dbm-samples") for _, _, dbname, query, arg in SAMPLE_QUERIES: - expected_query = query % ('\'' + arg + '\'' if isinstance(arg, string_types) else arg) + expected_query = query % ('\'' + arg + '\'' if isinstance(arg, str) else arg) matching = [e for e in dbm_samples if e['db']['statement'] == expected_query] if not dbstrict or dbname == dbm_instance['dbname']: # when dbstrict=True we expect to only capture those queries for the initial database to which the diff --git a/postgres/tests/test_unit.py b/postgres/tests/test_unit.py index 8143c6b46a5c5..1fa3164fa1f22 100644 --- a/postgres/tests/test_unit.py +++ b/postgres/tests/test_unit.py @@ -8,7 +8,6 @@ import pytest from pytest import fail from semver import VersionInfo -from six import iteritems from datadog_checks.postgres import PostgreSql, util @@ -100,7 +99,7 @@ def test_version_metadata(check, test_case, params): check.check_id = 'test:123' with mock.patch('datadog_checks.base.stubs.datadog_agent.set_check_metadata') as m: check.set_metadata('version', test_case) - for name, value in iteritems(params): + for name, value in params.items(): m.assert_any_call('test:123', name, value) m.assert_any_call('test:123', 'version.scheme', 'semver') m.assert_any_call('test:123', 'version.raw', test_case) diff --git a/process/tests/test_process.py b/process/tests/test_process.py index a1dbd67184c13..8eed74076d967 100644 --- a/process/tests/test_process.py +++ b/process/tests/test_process.py @@ -7,7 +7,6 @@ import psutil import pytest from mock import patch -from six import iteritems from datadog_checks.process import ProcessCheck @@ -329,7 +328,7 @@ def test_relocated_procfs(aggregator, dd_run_check): my_procfs = tempfile.mkdtemp() def _fake_procfs(arg, root=my_procfs): - for key, val in iteritems(arg): + for key, val in arg.items(): path = os.path.join(root, key) if isinstance(val, dict): os.mkdir(path) diff --git a/redisdb/tests/test_unit.py b/redisdb/tests/test_unit.py index 2666d2edb8a5e..83c9666812709 100644 --- a/redisdb/tests/test_unit.py +++ b/redisdb/tests/test_unit.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import mock import pytest -from six import iteritems from datadog_checks.dev.utils import get_metadata_metrics @@ -22,18 +21,18 @@ def test__get_conn(check, redis_instance): # create a connection check._get_conn(instance) - key1, conn1 = next(iteritems(check.connections)) + key1, conn1 = next(iter(check.connections.items())) # assert connection is cached check._get_conn(instance) - key2, conn2 = next(iteritems(check.connections)) + key2, conn2 = next(iter(check.connections.items())) assert key2 == key1 assert conn2 == conn1 # disable cache and assert connection has changed instance['disable_connection_cache'] = True check._get_conn(instance) - key2, conn2 = next(iteritems(check.connections)) + key2, conn2 = next(iter(check.connections.items())) assert key2 == key1 assert conn2 != conn1 diff --git a/snmp/tests/common.py b/snmp/tests/common.py index 289f7e4f19a0e..bb7073a6af2b8 100644 --- a/snmp/tests/common.py +++ b/snmp/tests/common.py @@ -11,7 +11,6 @@ from collections import defaultdict import pytest -from six import iteritems from datadog_checks.base.stubs.aggregator import AggregatorStub from datadog_checks.base.utils.common import get_docker_hostname, to_native_string @@ -336,7 +335,7 @@ def dd_agent_check_wrapper(dd_agent_check, *args, **kwargs): """ aggregator = dd_agent_check(*args, **kwargs) new_agg_metrics = defaultdict(list) - for metric_name, metric_list in iteritems(aggregator._metrics): + for metric_name, metric_list in aggregator._metrics.items(): new_metrics = [] for metric in metric_list: # metric is a Namedtuple, to modify namedtuple fields we need to use `._replace()` diff --git a/system_core/tests/test_system_core.py b/system_core/tests/test_system_core.py index 64a259b60e9ab..fcee3f0da6302 100644 --- a/system_core/tests/test_system_core.py +++ b/system_core/tests/test_system_core.py @@ -4,7 +4,6 @@ from collections import defaultdict import mock -from six import iteritems from datadog_checks.base.utils.platform import Platform from datadog_checks.system_core import SystemCore @@ -57,7 +56,7 @@ def fake_cpu_times(percpu=False): sum_dict = defaultdict(float) for cputimes in common.MOCK_PSUTIL_CPU_TIMES: - for key, value in iteritems(cputimes._asdict()): + for key, value in cputimes._asdict().items(): sum_dict[key] += value / len(common.MOCK_PSUTIL_CPU_TIMES) return common.MOCK_PSUTIL_CPU_TIMES[0].__class__(**sum_dict) diff --git a/tls/tests/conftest.py b/tls/tests/conftest.py index a657e7fb3e986..12483f28b2dab 100644 --- a/tls/tests/conftest.py +++ b/tls/tests/conftest.py @@ -5,7 +5,6 @@ import pytest from datadog_test_libs.utils.mock_dns import mock_local -from six import iteritems from datadog_checks.dev import TempDir, docker_run from datadog_checks.tls.utils import days_to_seconds @@ -51,11 +50,11 @@ def certs(dd_environment): } certs = {} with TempDir('certs') as tmp_dir: - for address, name in iteritems(downloads): + for address, name in downloads.items(): filepath = os.path.join(tmp_dir, name) download_cert(filepath, address) certs[name] = filepath - for address, name in iteritems(raw_downloads): + for address, name in raw_downloads.items(): filepath = os.path.join(tmp_dir, name) certs[name] = download_cert(filepath, address, raw=True) certs[name] = filepath diff --git a/tls/tests/utils.py b/tls/tests/utils.py index 91ef75cf8f97d..6aa4702e28112 100644 --- a/tls/tests/utils.py +++ b/tls/tests/utils.py @@ -6,8 +6,7 @@ import ssl import time from contextlib import contextmanager - -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.dev import TempDir from datadog_checks.tls.utils import closing diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index a262414801478..81c37dceff840 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -2,11 +2,11 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import re +from urllib.parse import urlparse import mock import pytest import requests -from six.moves.urllib.parse import urlparse from datadog_checks.dev.http import MockResponse from datadog_checks.vault import Vault diff --git a/vsphere/tests/common.py b/vsphere/tests/common.py index 4f9390cb82f00..562b990e34dbb 100644 --- a/vsphere/tests/common.py +++ b/vsphere/tests/common.py @@ -3,10 +3,10 @@ # Licensed under Simplified BSD License (see LICENSE) import os import re +from urllib.parse import urlparse import mock from pyVmomi import vim, vmodl -from six.moves.urllib.parse import urlparse from datadog_checks.base.utils.time import get_current_datetime from datadog_checks.dev.http import MockResponse diff --git a/vsphere/tests/mocked_api.py b/vsphere/tests/mocked_api.py index d193ccc480f58..dcd053fe78b96 100644 --- a/vsphere/tests/mocked_api.py +++ b/vsphere/tests/mocked_api.py @@ -9,7 +9,6 @@ from mock import MagicMock from pyVmomi import vim from requests import Response -from six import iteritems from datadog_checks.vsphere.api import VersionInfo from tests.common import HERE, VSPHERE_VERSION @@ -57,7 +56,7 @@ def recursive_parse_topology(self, subtree, parent=None): self.infrastructure_data[current_mor]['guest.hostName'] = subtree['guest.hostName'] if self.config.should_collect_attributes and 'customValue' in subtree: mor_attr = [] - for key_name, value in iteritems(subtree['customValue']): + for key_name, value in subtree['customValue'].items(): mor_attr.append('{}{}:{}'.format(self.config.attr_prefix, key_name, value)) self.infrastructure_data[current_mor]['attributes'] = mor_attr @@ -68,9 +67,9 @@ def recursive_parse_topology(self, subtree, parent=None): return # Resolve the runtime.host_moId into pointers to the mocked mors. - for _, props in iteritems(self.infrastructure_data): + for props in self.infrastructure_data.values(): if 'runtime.host_moid' in props: - hosts = [m for m, p in iteritems(self.infrastructure_data) if p['name'] == props['runtime.host_moid']] + hosts = [m for m, p in self.infrastructure_data.items() if p['name'] == props['runtime.host_moid']] props['runtime.host'] = hosts[0] if hosts else object() del props['runtime.host_moid'] diff --git a/vsphere/tests/test_cache.py b/vsphere/tests/test_cache.py index 64155a06271a9..c74a2a5bf413b 100644 --- a/vsphere/tests/test_cache.py +++ b/vsphere/tests/test_cache.py @@ -6,7 +6,6 @@ import pytest from mock import MagicMock, patch from pyVmomi import vim -from six import iteritems from datadog_checks.vsphere.cache import InfrastructureCache, MetricsMetadataCache, VSphereCache from datadog_checks.vsphere.config import VSphereConfig @@ -68,10 +67,10 @@ def test_metrics_metadata_cache(): data = {k: object() for k in ALL_RESOURCES_WITH_METRICS} with cache.update(): - for k, v in iteritems(data): + for k, v in data.items(): cache.set_metadata(k, v) - for k, v in iteritems(data): + for k, v in data.items(): assert cache.get_metadata(k) == v @@ -83,14 +82,14 @@ def test_infrastructure_cache(realtime_instance): mors = {MagicMock(spec=k, _moId="foo"): object() for k in ALL_RESOURCES_WITH_METRICS * 2} with cache.update(): - for k, v in iteritems(mors): + for k, v in mors.items(): cache.set_mor_props(k, v) cache.set_all_tags(mock_api.get_resource_tags_for_mors(mors)) for r in ALL_RESOURCES_WITH_METRICS: assert len(list(cache.get_mors(r))) == 2 - for k, v in iteritems(mors): + for k, v in mors.items(): assert cache.get_mor_props(k) == v vm_mor = vim.VirtualMachine(moId='VM4-4-1') diff --git a/yarn/tests/conftest.py b/yarn/tests/conftest.py index dedeb382b67f0..ac6efe6588ee8 100644 --- a/yarn/tests/conftest.py +++ b/yarn/tests/conftest.py @@ -4,11 +4,11 @@ import os from copy import deepcopy +from urllib.parse import urljoin import pytest from mock import patch from requests.exceptions import SSLError -from six.moves.urllib.parse import urljoin from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckEndpoints diff --git a/yarn/tests/test_yarn.py b/yarn/tests/test_yarn.py index 63686ee4213c5..11eee87969ded 100644 --- a/yarn/tests/test_yarn.py +++ b/yarn/tests/test_yarn.py @@ -7,7 +7,6 @@ import pytest from requests.exceptions import SSLError -from six import iteritems from datadog_checks.yarn import YarnCheck from datadog_checks.yarn.yarn import ( @@ -84,25 +83,25 @@ def test_check(aggregator, mocked_request): ) # Check the YARN Cluster Metrics - for metric, value in iteritems(YARN_CLUSTER_METRICS_VALUES): + for metric, value in YARN_CLUSTER_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=EXPECTED_TAGS, count=1) # Check the YARN App Metrics - for metric, value in iteritems(YARN_APP_METRICS_VALUES): + for metric, value in YARN_APP_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=YARN_APP_METRICS_TAGS + CUSTOM_TAGS, count=1) - for metric, value in iteritems(DEPRECATED_YARN_APP_METRICS_VALUES): + for metric, value in DEPRECATED_YARN_APP_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=YARN_APP_METRICS_TAGS + CUSTOM_TAGS, count=1) # Check the YARN Node Metrics - for metric, value in iteritems(YARN_NODE_METRICS_VALUES): + for metric, value in YARN_NODE_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=YARN_NODE_METRICS_TAGS + CUSTOM_TAGS, count=1) # Check the YARN Root Queue Metrics - for metric, value in iteritems(YARN_ROOT_QUEUE_METRICS_VALUES): + for metric, value in YARN_ROOT_QUEUE_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=YARN_ROOT_QUEUE_METRICS_TAGS + CUSTOM_TAGS, count=1) # Check the YARN Custom Queue Metrics - for metric, value in iteritems(YARN_QUEUE_METRICS_VALUES): + for metric, value in YARN_QUEUE_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=YARN_QUEUE_METRICS_TAGS + CUSTOM_TAGS, count=1) # Check the YARN Queue Metrics from excluded queues are absent @@ -110,7 +109,7 @@ def test_check(aggregator, mocked_request): aggregator.assert_metric(metric, tags=YARN_QUEUE_NOFOLLOW_METRICS_TAGS + CUSTOM_TAGS, count=0) # Check the YARN Subqueue Metrics - for metric, value in iteritems(YARN_SUBQUEUE_METRICS_VALUES): + for metric, value in YARN_SUBQUEUE_METRICS_VALUES.items(): aggregator.assert_metric(metric, value=value, tags=YARN_SUBQUEUE_METRICS_TAGS + CUSTOM_TAGS, count=1) aggregator.assert_all_metrics_covered() @@ -329,7 +328,7 @@ def test_collect_apps_all_states(dd_run_check, aggregator, mocked_request): dd_run_check(yarn) for app in YARN_APPS_ALL_STATES: - for metric, value in iteritems(app['metric_values']): + for metric, value in app['metric_values'].items(): aggregator.assert_metric(metric, value=value, tags=app['tags'] + EXPECTED_TAGS, count=1) @@ -351,7 +350,7 @@ def test_collect_apps_states_list(dd_run_check, aggregator, mocked_request, conf state_tag_re = re.compile(r'state:.*') for app in YARN_APPS_ALL_STATES: - for metric, value in iteritems(app['metric_values']): + for metric, value in app['metric_values'].items(): m = re.search(state_tag_re, app['tags'][2]) if m: state_tag = m.group(0) @@ -368,7 +367,7 @@ def test_collect_apps_killed_instance_state(dd_run_check, aggregator, mocked_req dd_run_check(yarn) for app in YARN_APPS_ALL_STATES: - for metric, value in iteritems(app['metric_values']): + for metric, value in app['metric_values'].items(): if app['tags'] == "KILLED": aggregator.assert_metric(metric, value=value, tags=app['tags'] + EXPECTED_TAGS, count=1) else: diff --git a/zk/tests/conftest.py b/zk/tests/conftest.py index 929e7f781b764..9cf0fa6fe6f1a 100644 --- a/zk/tests/conftest.py +++ b/zk/tests/conftest.py @@ -5,9 +5,9 @@ import sys import time from copy import deepcopy +from io import StringIO import pytest -from six import StringIO from datadog_checks.base.utils.common import get_docker_hostname from datadog_checks.dev import RetryError, docker_run From ec078464423c5d4581b2ad350ec5927a55d359bf Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Mon, 16 Sep 2024 17:44:23 +0200 Subject: [PATCH 08/23] Fix test for license validation (#18591) --- ddev/tests/cli/validate/test_licenses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ddev/tests/cli/validate/test_licenses.py b/ddev/tests/cli/validate/test_licenses.py index e3198b9e22a19..ef445d6f25abc 100644 --- a/ddev/tests/cli/validate/test_licenses.py +++ b/ddev/tests/cli/validate/test_licenses.py @@ -81,7 +81,7 @@ def test_invalid_requirement(repository, ddev, helpers): with agent_requirements_path.open(encoding='utf-8') as file: requirements = file.readlines() - requirements[0] = requirements[0].replace('==', '==^') + requirements[0] = "aerospike==^4.0.0; sys_platform != 'win32' and sys_platform != 'darwin'\n" with agent_requirements_path.open(mode='w', encoding='utf-8') as file: file.writelines(requirements[:3]) From 93a779a8751a461094d2a3e6e4684a4b0e3d962a Mon Sep 17 00:00:00 2001 From: Nenad Noveljic <18366081+nenadnoveljic@users.noreply.github.com> Date: Mon, 16 Sep 2024 19:18:31 +0200 Subject: [PATCH 09/23] [sqlserver] Fix odbc.ini config handling for Linux (SDBM-1171) (#18586) * func for get embed dir * [sqlserver] Fix ODBC config handling for Linux * changelog * linter * test only for Linux * linter * import error * non-atomic getsize * refactoring file test * linter * fix os * fix tests * fix test * test case for odbc.inst creation * linter * linter * fix test_get_unixodbc_sysconfig * test_linux_connection * linter * unused imports * declare test as non windows test * is_non_empty_file always return boolean --- sqlserver/changelog.d/18586.fixed | 1 + sqlserver/datadog_checks/sqlserver/utils.py | 35 +++++++++++--- sqlserver/tests/test_unit.py | 51 +++++++++++++++------ 3 files changed, 67 insertions(+), 20 deletions(-) create mode 100644 sqlserver/changelog.d/18586.fixed diff --git a/sqlserver/changelog.d/18586.fixed b/sqlserver/changelog.d/18586.fixed new file mode 100644 index 0000000000000..e6177056b7c48 --- /dev/null +++ b/sqlserver/changelog.d/18586.fixed @@ -0,0 +1 @@ +[sqlserver] Fix ODBC config handling for Linux diff --git a/sqlserver/datadog_checks/sqlserver/utils.py b/sqlserver/datadog_checks/sqlserver/utils.py index c9b207320de2a..98825e804c316 100644 --- a/sqlserver/datadog_checks/sqlserver/utils.py +++ b/sqlserver/datadog_checks/sqlserver/utils.py @@ -3,6 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import os import re +import shutil import sys from typing import Dict @@ -11,6 +12,7 @@ CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) DRIVER_CONFIG_DIR = os.path.join(CURRENT_DIR, 'data', 'driver_config') +ODBC_INST_INI = 'odbcinst.ini' # Database is used to store both the name and physical_database_name @@ -32,6 +34,22 @@ def __str__(self): return "name:{}, physical_db_name:{}".format(self.name, self.physical_db_name) +def get_unixodbc_sysconfig(python_executable): + return os.path.join(os.path.dirname(os.path.dirname(python_executable)), "etc") + + +def is_non_empty_file(path): + if not os.path.exists(path): + return False + try: + if os.path.getsize(path) > 0: + return True + # exists and getsize aren't atomic + except FileNotFoundError: + return False + return False + + def set_default_driver_conf(): if Platform.is_containerized(): # Use default `./driver_config/odbcinst.ini` when Agent is running in docker. @@ -54,13 +72,16 @@ def set_default_driver_conf(): # linux_unixodbc_sysconfig is set to the agent embedded /etc directory # this is a hacky way to get the path to the etc directory # by getting the path to the python executable and get the directory above /bin/python - linux_unixodbc_sysconfig = os.path.dirname(os.path.dirname(sys.executable)) - if os.path.exists(os.path.join(linux_unixodbc_sysconfig, 'odbcinst.ini')) or os.path.exists( - os.path.join(linux_unixodbc_sysconfig, 'odbc.ini') - ): - # If there are already drivers or dataSources installed, don't override the ODBCSYSINI - # This means user has copied odbcinst.ini and odbc.ini to the unixODBC sysconfig location - return + linux_unixodbc_sysconfig = get_unixodbc_sysconfig(sys.executable) + odbc_ini = os.path.join(linux_unixodbc_sysconfig, 'odbc.ini') + if is_non_empty_file(odbc_ini): + os.environ.setdefault('ODBCSYSINI', linux_unixodbc_sysconfig) + odbc_inst_ini_sysconfig = os.path.join(linux_unixodbc_sysconfig, ODBC_INST_INI) + if not is_non_empty_file(odbc_inst_ini_sysconfig): + shutil.copy(os.path.join(DRIVER_CONFIG_DIR, ODBC_INST_INI), odbc_inst_ini_sysconfig) + # If there are already drivers or dataSources installed, don't override the ODBCSYSINI + # This means user has copied odbcinst.ini and odbc.ini to the unixODBC sysconfig location + return # Use default `./driver_config/odbcinst.ini` to let the integration use agent embedded odbc driver. os.environ.setdefault('ODBCSYSINI', DRIVER_CONFIG_DIR) diff --git a/sqlserver/tests/test_unit.py b/sqlserver/tests/test_unit.py index 9d46734f1d95f..4aadfa462af2f 100644 --- a/sqlserver/tests/test_unit.py +++ b/sqlserver/tests/test_unit.py @@ -20,12 +20,14 @@ from datadog_checks.sqlserver.utils import ( Database, extract_sql_comments_and_procedure_name, + get_unixodbc_sysconfig, + is_non_empty_file, parse_sqlserver_major_version, set_default_driver_conf, ) from .common import CHECK_NAME, DOCKER_SERVER, assert_metrics -from .utils import deep_compare, windows_ci +from .utils import deep_compare, not_windows_ci, windows_ci try: import pyodbc @@ -435,6 +437,12 @@ def test_set_default_driver_conf(): set_default_driver_conf() assert os.environ['ODBCSYSINI'].endswith(os.path.join('data', 'driver_config')) + with mock.patch("datadog_checks.base.utils.platform.Platform.is_linux", return_value=True): + with EnvVars({}, ignore=['ODBCSYSINI']): + set_default_driver_conf() + assert 'ODBCSYSINI' in os.environ, "ODBCSYSINI should be set" + assert os.environ['ODBCSYSINI'].endswith(os.path.join('data', 'driver_config')) + # `set_default_driver_conf` have no effect on the cases below with EnvVars({'ODBCSYSINI': 'ABC', 'DOCKER_DD_AGENT': 'true'}): set_default_driver_conf() @@ -446,23 +454,27 @@ def test_set_default_driver_conf(): assert 'ODBCSYSINI' in os.environ assert os.environ['ODBCSYSINI'].endswith(os.path.join('tests', 'odbc')) - with EnvVars({}, ignore=['ODBCSYSINI']): - with mock.patch("os.path.exists", return_value=True): - # odbcinst.ini or odbc.ini exists in agent embedded directory - set_default_driver_conf() - assert 'ODBCSYSINI' not in os.environ - - with EnvVars({}, ignore=['ODBCSYSINI']): - set_default_driver_conf() - assert 'ODBCSYSINI' in os.environ # ODBCSYSINI is set by the integration - if pyodbc is not None: - assert pyodbc.drivers() is not None - with EnvVars({'ODBCSYSINI': 'ABC'}): set_default_driver_conf() assert os.environ['ODBCSYSINI'] == 'ABC' +@not_windows_ci +def test_set_default_driver_conf_linux(): + odbc_config_dir = os.path.expanduser('~') + with mock.patch("datadog_checks.sqlserver.utils.get_unixodbc_sysconfig", return_value=odbc_config_dir): + with EnvVars({}, ignore=['ODBCSYSINI']): + odbc_inst = os.path.join(odbc_config_dir, "odbcinst.ini") + odbc_ini = os.path.join(odbc_config_dir, "odbc.ini") + for file in [odbc_inst, odbc_ini]: + if os.path.exists(file): + os.remove(file) + with open(odbc_ini, "x") as file: + file.write("dummy-content") + set_default_driver_conf() + assert is_non_empty_file(odbc_inst), "odbc_inst should have been created when a non empty odbc.ini exists" + + @windows_ci def test_check_local(aggregator, dd_run_check, init_config, instance_docker): sqlserver_check = SQLServer(CHECK_NAME, init_config, [instance_docker]) @@ -866,3 +878,16 @@ def test_exception_handling_by_do_for_dbs(instance_docker): 'datadog_checks.sqlserver.utils.is_azure_sql_database', return_value={} ): schemas._fetch_for_databases() + + +def test_get_unixodbc_sysconfig(): + etc_dir = os.path.sep + for dir in ["opt", "datadog-agent", "embedded", "bin", "python"]: + etc_dir = os.path.join(etc_dir, dir) + assert get_unixodbc_sysconfig(etc_dir).split(os.path.sep) == [ + "", + "opt", + "datadog-agent", + "embedded", + "etc", + ], "incorrect unix odbc config dir" From 426d18b0acfed61cc632b32d3173bd1900d8f8b7 Mon Sep 17 00:00:00 2001 From: HadhemiDD <43783545+HadhemiDD@users.noreply.github.com> Date: Tue, 17 Sep 2024 10:30:34 +0200 Subject: [PATCH 10/23] Add Kubeflow process signature (#18594) * kubeflow proc * remove cmd --- kubeflow/manifest.json | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/kubeflow/manifest.json b/kubeflow/manifest.json index 4bd141401f483..2252c0462db06 100644 --- a/kubeflow/manifest.json +++ b/kubeflow/manifest.json @@ -40,7 +40,16 @@ }, "service_checks": { "metadata_path": "assets/service_checks.json" - } + }, + "process_signatures": [ + "katib-controller", + "katib-db-manager", + "katib-ui", + "kserve-controller-manager", + "ml-pipeline-api-server", + "ml-pipeline-scheduledworkflow", + "ml-pipeline-persistenceagent" + ] }, "monitors": { "Kubeflow Monitor": "assets/monitors/kubeflow.json" From fcfc8f13fb11075007a416220a37e6ca80295c88 Mon Sep 17 00:00:00 2001 From: HadhemiDD <43783545+HadhemiDD@users.noreply.github.com> Date: Tue, 17 Sep 2024 15:43:26 +0200 Subject: [PATCH 11/23] Kubeflow update dashboard and readme (#18600) * update readme * update readme * [Release] Bumped kubeflow version to 1.0.0 * revert release --- kubeflow/README.md | 46 +++++++++++++++++++ kubeflow/assets/dashboards/overview.json | 43 ++++++++++++----- .../changelog.d/18391.added | 0 3 files changed, 78 insertions(+), 11 deletions(-) rename kubeflow/{ => datadog_checks}/changelog.d/18391.added (100%) diff --git a/kubeflow/README.md b/kubeflow/README.md index 0d9df6cb33b3b..634017875995c 100644 --- a/kubeflow/README.md +++ b/kubeflow/README.md @@ -27,6 +27,52 @@ For the Agent to start collecting metrics, the `kubeflow` pods need to be annota Kubeflow has metrics endpoints that can be accessed on port `9090`. +To enable metrics exposure in kubeflow through prometheus, you might need to enable the prometheus service monitoring for the component in question. + +You can use Kube-Prometheus-Stack or a custom Prometheus installation. + +##### How to install Kube-Prometheus-Stack: +1. Add Helm Repository: +``` +helm repo add prometheus-community https://prometheus-community.github.io/helm-charts +helm repo update +``` + +2. Install the Chart: +``` +helm install prometheus-stack prometheus-community/kube-prometheus-stack +``` + +3. Expose Prometheus service externally: +``` +kubectl port-forward prometheus-stack 9090:9090 +``` +##### Set Up ServiceMonitors for Kubeflow Components: + +You need to configure ServiceMonitors for Kubeflow components to expose their Prometheus metrics. +If your Kubeflow component exposes Prometheus metrics by default. You'll just need to configure Prometheus to scrape these metrics. + +The ServiceMonitor would look like this: + +```yaml +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + name: -monitor + labels: + release: prometheus-stack +spec: + selector: + matchLabels: + app: + endpoints: + - port: http + path: /metrics +``` + +Where `` is to be replaced by `pipelines`, `kserve` or `katib` and `` is to be replaced by `ml-pipeline`, `kserve` or `katib`. + + **Note**: The listed metrics can only be collected if they are available(depending on the version). Some metrics are generated only when certain actions are performed. The only parameter required for configuring the `kubeflow` check is `openmetrics_endpoint`. This parameter should be set to the location where the Prometheus-formatted metrics are exposed. The default port is `9090`. In containerized environments, `%%host%%` should be used for [host autodetection][3]. diff --git a/kubeflow/assets/dashboards/overview.json b/kubeflow/assets/dashboards/overview.json index 8b77b0122d965..b573e5f477156 100644 --- a/kubeflow/assets/dashboards/overview.json +++ b/kubeflow/assets/dashboards/overview.json @@ -46,7 +46,7 @@ }, "id": 4610707819074916, "layout": { - "height": 3, + "height": 4, "width": 3, "x": 0, "y": 3 @@ -67,7 +67,7 @@ }, "id": 8366490141273904, "layout": { - "height": 3, + "height": 4, "width": 2, "x": 3, "y": 3 @@ -77,7 +77,7 @@ }, "id": 3375620455700908, "layout": { - "height": 7, + "height": 8, "width": 5, "x": 0, "y": 0 @@ -93,8 +93,8 @@ "widgets": [ { "definition": { - "background_color": "blue", - "content": "The service checks show the Kubeflow OpenMetrics/Prometheus endpoint status.\n\nThe monitor summary shows you any active alerts for the most crucial Metrics. ", + "background_color": "pink", + "content": "If many widgets are empty, you are using a version of Kubeflow that does not expose certain metrics. Refer to the metadata.csv file for metrics list. \n\nReach out to support to indicate version incompatibilities.", "font_size": "14", "has_padding": true, "show_tick": true, @@ -112,6 +112,27 @@ "y": 0 } }, + { + "definition": { + "background_color": "blue", + "content": "The service checks show the Kubeflow OpenMetrics/Prometheus endpoint status.\n\nThe monitor summary shows you any active alerts for the most crucial Metrics. ", + "font_size": "14", + "has_padding": true, + "show_tick": true, + "text_align": "left", + "tick_edge": "left", + "tick_pos": "50%", + "type": "note", + "vertical_align": "center" + }, + "id": 6145599891700518, + "layout": { + "height": 1, + "width": 7, + "x": 0, + "y": 2 + } + }, { "definition": { "color_preference": "text", @@ -134,14 +155,14 @@ "height": 4, "width": 7, "x": 0, - "y": 2 + "y": 3 } } ] }, "id": 3510698085005998, "layout": { - "height": 7, + "height": 8, "width": 7, "x": 5, "y": 0 @@ -273,7 +294,7 @@ "height": 5, "width": 12, "x": 0, - "y": 7 + "y": 8 } }, { @@ -988,7 +1009,7 @@ "is_column_break": true, "width": 12, "x": 0, - "y": 12 + "y": 13 } }, { @@ -1224,7 +1245,7 @@ "height": 8, "width": 12, "x": 0, - "y": 28 + "y": 29 } }, { @@ -1424,7 +1445,7 @@ "height": 7, "width": 12, "x": 0, - "y": 36 + "y": 37 } } ] diff --git a/kubeflow/changelog.d/18391.added b/kubeflow/datadog_checks/changelog.d/18391.added similarity index 100% rename from kubeflow/changelog.d/18391.added rename to kubeflow/datadog_checks/changelog.d/18391.added From 69cffd9e02ffbdc3015294a2c39c9215c0df637a Mon Sep 17 00:00:00 2001 From: Kyle Neale Date: Tue, 17 Sep 2024 10:43:07 -0400 Subject: [PATCH 12/23] Bump Python 3 to 3.12 (#18212) * bump py version to 3.12 * update dbm dev container * update builders and build deps * fix python string * Build lxml dependencies for linux x64 (#18467) * fix rmq version * fix gunicorn tests * Fix lock file generation * comment out if clause to force dep resolution * bump python to 3.12.6 * fix python on Windows sha * remove DD_PYTHON2 unbound variable * fix hatch errors * remove setup.py files * fix CI * revert datadog_checks_test_helper --------- Co-authored-by: Alex Lopez Co-authored-by: Ilia Kurenkov Co-authored-by: Alex Lopez --- .builders/images/linux-aarch64/Dockerfile | 4 +-- .builders/images/linux-x86_64/Dockerfile | 34 +++++++++++++++++-- .builders/images/windows-x86_64/Dockerfile | 12 +++---- .builders/lock.py | 25 +++++++++++--- .devcontainer/Dockerfile | 2 +- .devcontainer/dbm/Dockerfile | 2 +- .devcontainer/dbm/devcontainer.json | 2 +- .devcontainer/devcontainer.json | 2 +- .github/workflows/build-ddev.yml | 2 +- .github/workflows/build-deps.yml | 8 ++--- .github/workflows/cache-shared-deps.yml | 2 +- .github/workflows/compute-matrix.yml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/pr-quick-check.yml | 2 +- .github/workflows/release-base.yml | 2 +- .github/workflows/release-dev.yml | 2 +- .github/workflows/release-hash-check.yml | 2 +- .github/workflows/run-validations.yml | 2 +- .github/workflows/test-target.yml | 4 +-- .github/workflows/update-agent-changelog.yml | 2 +- .github/workflows/update-dependencies.yml | 2 +- .gitlab/tagger/Dockerfile | 2 +- active_directory/changelog.d/18207.added | 1 + active_directory/hatch.toml | 2 +- active_directory/pyproject.toml | 2 +- activemq/changelog.d/18207.added | 1 + activemq/hatch.toml | 4 +-- activemq/pyproject.toml | 2 +- activemq_xml/changelog.d/18207.added | 1 + activemq_xml/hatch.toml | 2 +- activemq_xml/pyproject.toml | 2 +- aerospike/changelog.d/18207.added | 1 + aerospike/hatch.toml | 2 +- aerospike/pyproject.toml | 2 +- airflow/changelog.d/18207.added | 1 + airflow/hatch.toml | 2 +- airflow/pyproject.toml | 2 +- amazon_msk/changelog.d/18207.added | 1 + amazon_msk/hatch.toml | 2 +- amazon_msk/pyproject.toml | 2 +- ambari/changelog.d/18207.added | 1 + ambari/hatch.toml | 2 +- ambari/pyproject.toml | 2 +- apache/changelog.d/18207.added | 1 + apache/hatch.toml | 2 +- apache/pyproject.toml | 2 +- arangodb/changelog.d/18207.added | 1 + arangodb/hatch.toml | 2 +- arangodb/pyproject.toml | 2 +- argo_rollouts/changelog.d/18207.added | 1 + argo_rollouts/hatch.toml | 2 +- argo_rollouts/pyproject.toml | 2 +- argo_workflows/changelog.d/18207.added | 1 + argo_workflows/hatch.toml | 2 +- argo_workflows/pyproject.toml | 2 +- argocd/changelog.d/18207.added | 1 + argocd/hatch.toml | 2 +- argocd/pyproject.toml | 2 +- aspdotnet/changelog.d/18207.added | 1 + aspdotnet/hatch.toml | 2 +- aspdotnet/pyproject.toml | 2 +- avi_vantage/changelog.d/18207.added | 1 + avi_vantage/hatch.toml | 2 +- avi_vantage/pyproject.toml | 2 +- aws_neuron/hatch.toml | 2 +- aws_neuron/pyproject.toml | 2 +- azure_iot_edge/changelog.d/18207.added | 1 + azure_iot_edge/hatch.toml | 2 +- azure_iot_edge/pyproject.toml | 2 +- boundary/changelog.d/18207.added | 1 + boundary/hatch.toml | 2 +- boundary/pyproject.toml | 2 +- btrfs/changelog.d/18207.added | 1 + btrfs/hatch.toml | 2 +- btrfs/pyproject.toml | 2 +- cacti/changelog.d/18207.added | 1 + cacti/hatch.toml | 2 +- cacti/pyproject.toml | 2 +- calico/changelog.d/18207.added | 1 + calico/hatch.toml | 2 +- calico/pyproject.toml | 2 +- cassandra/changelog.d/18207.added | 1 + cassandra/hatch.toml | 2 +- cassandra/pyproject.toml | 2 +- cassandra_nodetool/changelog.d/18207.added | 1 + cassandra_nodetool/hatch.toml | 2 +- cassandra_nodetool/pyproject.toml | 2 +- ceph/changelog.d/18207.added | 1 + ceph/hatch.toml | 2 +- ceph/pyproject.toml | 2 +- cert_manager/changelog.d/18207.added | 1 + cert_manager/hatch.toml | 2 +- cert_manager/pyproject.toml | 2 +- cilium/changelog.d/18207.added | 1 + cilium/hatch.toml | 4 +-- cilium/pyproject.toml | 2 +- cisco_aci/changelog.d/18207.added | 1 + cisco_aci/hatch.toml | 2 +- cisco_aci/pyproject.toml | 2 +- citrix_hypervisor/changelog.d/18207.added | 1 + citrix_hypervisor/hatch.toml | 2 +- citrix_hypervisor/pyproject.toml | 2 +- clickhouse/changelog.d/18207.added | 1 + clickhouse/hatch.toml | 2 +- clickhouse/pyproject.toml | 2 +- cloud_foundry_api/changelog.d/18207.added | 1 + cloud_foundry_api/hatch.toml | 2 +- cloud_foundry_api/pyproject.toml | 2 +- cloudera/changelog.d/18207.added | 1 + cloudera/hatch.toml | 2 +- cloudera/pyproject.toml | 2 +- cockroachdb/changelog.d/18207.added | 1 + cockroachdb/hatch.toml | 2 +- cockroachdb/pyproject.toml | 2 +- confluent_platform/changelog.d/18207.added | 1 + confluent_platform/hatch.toml | 2 +- confluent_platform/pyproject.toml | 2 +- consul/changelog.d/18207.added | 1 + consul/hatch.toml | 2 +- consul/pyproject.toml | 2 +- coredns/changelog.d/18207.added | 1 + coredns/hatch.toml | 2 +- coredns/pyproject.toml | 2 +- couch/changelog.d/18207.added | 1 + couch/hatch.toml | 2 +- couch/pyproject.toml | 2 +- couchbase/changelog.d/18207.added | 1 + couchbase/hatch.toml | 2 +- couchbase/pyproject.toml | 2 +- crio/changelog.d/18207.added | 1 + crio/hatch.toml | 2 +- crio/pyproject.toml | 2 +- datadog_checks_base/changelog.d/18207.added | 1 + datadog_checks_base/hatch.toml | 2 +- datadog_checks_base/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + .../pyproject.toml | 2 +- datadog_checks_dev/changelog.d/18207.added | 1 + .../integration/check/{check_name}/hatch.toml | 2 +- .../check/{check_name}/pyproject.toml | 4 +-- .../integration/jmx/{check_name}/hatch.toml | 2 +- .../jmx/{check_name}/pyproject.toml | 4 +-- .../logs/{check_name}/pyproject.toml | 2 +- datadog_checks_dev/hatch.toml | 6 ++-- datadog_checks_dev/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + datadog_checks_downloader/hatch.toml | 2 +- datadog_checks_downloader/pyproject.toml | 2 +- datadog_cluster_agent/changelog.d/18207.added | 1 + datadog_cluster_agent/hatch.toml | 2 +- datadog_cluster_agent/pyproject.toml | 2 +- dcgm/changelog.d/18207.added | 1 + dcgm/hatch.toml | 2 +- dcgm/pyproject.toml | 2 +- ddev/changelog.d/18207.added | 1 + ddev/hatch.toml | 2 +- ddev/pyproject.toml | 6 ++-- ddev/src/ddev/repo/constants.py | 2 +- directory/changelog.d/18207.added | 1 + directory/hatch.toml | 2 +- directory/pyproject.toml | 2 +- disk/changelog.d/18207.added | 1 + disk/hatch.toml | 2 +- disk/pyproject.toml | 2 +- dns_check/changelog.d/18207.added | 1 + dns_check/hatch.toml | 2 +- dns_check/pyproject.toml | 2 +- docs/developer/setup.md | 12 +++---- dotnetclr/changelog.d/18207.added | 1 + dotnetclr/hatch.toml | 2 +- dotnetclr/pyproject.toml | 2 +- druid/changelog.d/18207.added | 1 + druid/hatch.toml | 2 +- druid/pyproject.toml | 2 +- ecs_fargate/changelog.d/18207.added | 1 + ecs_fargate/hatch.toml | 2 +- ecs_fargate/pyproject.toml | 2 +- eks_fargate/changelog.d/18207.added | 1 + eks_fargate/hatch.toml | 2 +- eks_fargate/pyproject.toml | 2 +- elastic/changelog.d/18207.added | 1 + elastic/hatch.toml | 4 +-- elastic/pyproject.toml | 2 +- envoy/changelog.d/18207.added | 1 + envoy/hatch.toml | 2 +- envoy/pyproject.toml | 2 +- esxi/changelog.d/18207.added | 1 + esxi/hatch.toml | 4 +-- esxi/pyproject.toml | 2 +- etcd/changelog.d/18207.added | 1 + etcd/hatch.toml | 2 +- etcd/pyproject.toml | 2 +- exchange_server/changelog.d/18207.added | 1 + exchange_server/hatch.toml | 2 +- exchange_server/pyproject.toml | 2 +- external_dns/changelog.d/18207.added | 1 + external_dns/hatch.toml | 2 +- external_dns/pyproject.toml | 2 +- flink/changelog.d/18207.added | 1 + flink/pyproject.toml | 2 +- fluentd/changelog.d/18207.added | 1 + fluentd/hatch.toml | 2 +- fluentd/pyproject.toml | 2 +- fluxcd/changelog.d/18207.added | 1 + fluxcd/hatch.toml | 2 +- fluxcd/pyproject.toml | 2 +- fly_io/hatch.toml | 4 +-- fly_io/pyproject.toml | 2 +- foundationdb/changelog.d/18207.added | 1 + foundationdb/hatch.toml | 4 +-- foundationdb/pyproject.toml | 2 +- gearmand/changelog.d/18207.added | 1 + gearmand/hatch.toml | 2 +- gearmand/pyproject.toml | 2 +- gitlab/changelog.d/18207.added | 1 + gitlab/hatch.toml | 2 +- gitlab/pyproject.toml | 2 +- gitlab_runner/changelog.d/18207.added | 1 + gitlab_runner/hatch.toml | 2 +- gitlab_runner/pyproject.toml | 2 +- glusterfs/changelog.d/18207.added | 1 + glusterfs/hatch.toml | 2 +- glusterfs/pyproject.toml | 2 +- go_expvar/changelog.d/18207.added | 1 + go_expvar/hatch.toml | 2 +- go_expvar/pyproject.toml | 2 +- gunicorn/changelog.d/18207.added | 1 + gunicorn/hatch.toml | 9 ++--- gunicorn/pyproject.toml | 2 +- haproxy/changelog.d/18207.added | 1 + haproxy/hatch.toml | 4 +-- haproxy/pyproject.toml | 2 +- harbor/changelog.d/18207.added | 1 + harbor/hatch.toml | 2 +- harbor/pyproject.toml | 2 +- hazelcast/changelog.d/18207.added | 1 + hazelcast/hatch.toml | 2 +- hdfs_datanode/changelog.d/18207.added | 1 + hdfs_datanode/hatch.toml | 2 +- hdfs_datanode/pyproject.toml | 2 +- hdfs_namenode/changelog.d/18207.added | 1 + hdfs_namenode/hatch.toml | 2 +- hdfs_namenode/pyproject.toml | 2 +- hive/changelog.d/18207.added | 1 + hive/hatch.toml | 2 +- hive/pyproject.toml | 2 +- hivemq/changelog.d/18207.added | 1 + hivemq/hatch.toml | 2 +- hivemq/pyproject.toml | 2 +- http_check/changelog.d/18207.added | 1 + http_check/hatch.toml | 2 +- http_check/pyproject.toml | 2 +- hudi/changelog.d/18207.added | 1 + hudi/hatch.toml | 2 +- hudi/pyproject.toml | 2 +- hyperv/changelog.d/18207.added | 1 + hyperv/hatch.toml | 2 +- hyperv/pyproject.toml | 2 +- ibm_ace/changelog.d/18207.added | 1 + ibm_ace/hatch.toml | 2 +- ibm_ace/pyproject.toml | 2 +- ibm_db2/changelog.d/18207.added | 1 + ibm_db2/hatch.toml | 2 +- ibm_db2/pyproject.toml | 2 +- ibm_i/changelog.d/18207.added | 1 + ibm_i/hatch.toml | 2 +- ibm_i/pyproject.toml | 2 +- ibm_mq/changelog.d/18207.added | 1 + ibm_mq/hatch.toml | 4 +-- ibm_mq/pyproject.toml | 2 +- ibm_was/changelog.d/18207.added | 1 + ibm_was/hatch.toml | 2 +- ibm_was/pyproject.toml | 2 +- ignite/changelog.d/18207.added | 1 + ignite/hatch.toml | 2 +- ignite/pyproject.toml | 2 +- iis/changelog.d/18207.added | 1 + iis/hatch.toml | 2 +- iis/pyproject.toml | 2 +- impala/changelog.d/18207.added | 1 + impala/hatch.toml | 2 +- impala/pyproject.toml | 2 +- istio/changelog.d/18207.added | 1 + istio/hatch.toml | 2 +- istio/pyproject.toml | 2 +- jboss_wildfly/changelog.d/18207.added | 1 + jboss_wildfly/hatch.toml | 2 +- jboss_wildfly/pyproject.toml | 2 +- journald/changelog.d/18207.added | 1 + journald/pyproject.toml | 2 +- kafka/changelog.d/18207.added | 1 + kafka/hatch.toml | 2 +- kafka/pyproject.toml | 2 +- kafka_consumer/changelog.d/18207.added | 1 + kafka_consumer/hatch.toml | 4 +-- kafka_consumer/pyproject.toml | 2 +- karpenter/changelog.d/18207.added | 1 + karpenter/hatch.toml | 2 +- karpenter/pyproject.toml | 2 +- kong/changelog.d/18207.added | 1 + kong/hatch.toml | 2 +- kong/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + kube_apiserver_metrics/hatch.toml | 2 +- kube_apiserver_metrics/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + kube_controller_manager/hatch.toml | 2 +- kube_controller_manager/pyproject.toml | 2 +- kube_dns/changelog.d/18207.added | 1 + kube_dns/hatch.toml | 2 +- kube_dns/pyproject.toml | 2 +- kube_metrics_server/changelog.d/18207.added | 1 + kube_metrics_server/hatch.toml | 2 +- kube_metrics_server/pyproject.toml | 2 +- kube_proxy/changelog.d/18207.added | 1 + kube_proxy/hatch.toml | 2 +- kube_proxy/pyproject.toml | 2 +- kube_scheduler/changelog.d/18207.added | 1 + kube_scheduler/hatch.toml | 2 +- kube_scheduler/pyproject.toml | 2 +- kubeflow/pyproject.toml | 2 +- kubelet/changelog.d/18207.added | 1 + kubelet/hatch.toml | 2 +- kubelet/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + kubernetes_cluster_autoscaler/hatch.toml | 2 +- kubernetes_cluster_autoscaler/pyproject.toml | 2 +- kubernetes_state/changelog.d/18207.added | 1 + kubernetes_state/hatch.toml | 2 +- kubernetes_state/pyproject.toml | 2 +- kubevirt_api/hatch.toml | 2 +- kubevirt_api/pyproject.toml | 2 +- kubevirt_controller/hatch.toml | 2 +- kubevirt_controller/pyproject.toml | 2 +- kyototycoon/changelog.d/18207.added | 1 + kyototycoon/hatch.toml | 2 +- kyototycoon/pyproject.toml | 2 +- kyverno/changelog.d/18207.added | 1 + kyverno/hatch.toml | 2 +- kyverno/pyproject.toml | 2 +- lighttpd/changelog.d/18207.added | 1 + lighttpd/hatch.toml | 2 +- lighttpd/pyproject.toml | 2 +- linkerd/changelog.d/18207.added | 1 + linkerd/hatch.toml | 2 +- linkerd/pyproject.toml | 2 +- linux_proc_extras/changelog.d/18207.added | 1 + linux_proc_extras/hatch.toml | 2 +- linux_proc_extras/pyproject.toml | 2 +- mapr/changelog.d/18207.added | 1 + mapr/hatch.toml | 2 +- mapr/pyproject.toml | 2 +- mapreduce/changelog.d/18207.added | 1 + mapreduce/hatch.toml | 2 +- mapreduce/pyproject.toml | 2 +- marathon/changelog.d/18207.added | 1 + marathon/hatch.toml | 2 +- marathon/pyproject.toml | 2 +- marklogic/changelog.d/18207.added | 1 + marklogic/hatch.toml | 2 +- marklogic/pyproject.toml | 2 +- mcache/changelog.d/18207.added | 1 + mcache/hatch.toml | 2 +- mcache/pyproject.toml | 2 +- mesos_master/changelog.d/18207.added | 1 + mesos_master/hatch.toml | 2 +- mesos_master/pyproject.toml | 2 +- mesos_slave/changelog.d/18207.added | 1 + mesos_slave/hatch.toml | 2 +- mesos_slave/pyproject.toml | 2 +- mongo/changelog.d/18207.added | 1 + mongo/hatch.toml | 2 +- mongo/pyproject.toml | 2 +- mysql/changelog.d/18207.added | 1 + mysql/hatch.toml | 6 ++-- mysql/pyproject.toml | 2 +- nagios/changelog.d/18207.added | 1 + nagios/hatch.toml | 2 +- nagios/pyproject.toml | 2 +- network/changelog.d/18207.added | 1 + network/hatch.toml | 2 +- network/pyproject.toml | 2 +- nfsstat/changelog.d/18207.added | 1 + nfsstat/hatch.toml | 2 +- nfsstat/pyproject.toml | 2 +- nginx/changelog.d/18207.added | 1 + nginx/hatch.toml | 2 +- nginx/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + nginx_ingress_controller/hatch.toml | 2 +- nginx_ingress_controller/pyproject.toml | 2 +- nvidia_triton/changelog.d/18207.added | 1 + nvidia_triton/hatch.toml | 2 +- nvidia_triton/pyproject.toml | 2 +- openldap/changelog.d/18207.added | 1 + openldap/hatch.toml | 2 +- openldap/pyproject.toml | 2 +- openmetrics/changelog.d/18207.added | 1 + openmetrics/hatch.toml | 2 +- openmetrics/pyproject.toml | 2 +- openstack/changelog.d/18207.added | 1 + openstack/hatch.toml | 2 +- openstack/pyproject.toml | 2 +- openstack_controller/changelog.d/18207.added | 1 + openstack_controller/hatch.toml | 10 +++--- openstack_controller/pyproject.toml | 2 +- oracle/changelog.d/18207.added | 1 + oracle/pyproject.toml | 2 +- ossec_security/pyproject.toml | 2 +- pan_firewall/changelog.d/18207.added | 1 + pan_firewall/pyproject.toml | 2 +- pdh_check/changelog.d/18207.added | 1 + pdh_check/hatch.toml | 2 +- pdh_check/pyproject.toml | 2 +- pgbouncer/changelog.d/18207.added | 1 + pgbouncer/hatch.toml | 2 +- pgbouncer/pyproject.toml | 2 +- php_fpm/changelog.d/18207.added | 1 + php_fpm/hatch.toml | 2 +- php_fpm/pyproject.toml | 2 +- ping_federate/pyproject.toml | 2 +- postfix/changelog.d/18207.added | 1 + postfix/hatch.toml | 2 +- postfix/pyproject.toml | 2 +- postgres/changelog.d/18207.added | 1 + postgres/hatch.toml | 2 +- postgres/pyproject.toml | 2 +- powerdns_recursor/changelog.d/18207.added | 1 + powerdns_recursor/hatch.toml | 2 +- powerdns_recursor/pyproject.toml | 2 +- presto/changelog.d/18207.added | 1 + presto/hatch.toml | 2 +- presto/pyproject.toml | 2 +- process/changelog.d/18207.added | 1 + process/hatch.toml | 2 +- process/pyproject.toml | 2 +- prometheus/changelog.d/18207.added | 1 + prometheus/hatch.toml | 2 +- prometheus/pyproject.toml | 2 +- proxysql/changelog.d/18207.added | 1 + proxysql/hatch.toml | 2 +- proxysql/pyproject.toml | 2 +- pulsar/changelog.d/18207.added | 1 + pulsar/hatch.toml | 2 +- pulsar/pyproject.toml | 2 +- rabbitmq/changelog.d/18207.added | 1 + rabbitmq/hatch.toml | 2 +- rabbitmq/pyproject.toml | 2 +- ray/changelog.d/18207.added | 1 + ray/hatch.toml | 2 +- ray/pyproject.toml | 2 +- redisdb/changelog.d/18207.added | 1 + redisdb/hatch.toml | 2 +- redisdb/pyproject.toml | 2 +- rethinkdb/changelog.d/18207.added | 1 + rethinkdb/hatch.toml | 2 +- rethinkdb/pyproject.toml | 2 +- riak/changelog.d/18207.added | 1 + riak/hatch.toml | 2 +- riak/pyproject.toml | 2 +- riakcs/changelog.d/18207.added | 1 + riakcs/hatch.toml | 2 +- riakcs/pyproject.toml | 2 +- sap_hana/changelog.d/18207.added | 1 + sap_hana/hatch.toml | 2 +- sap_hana/pyproject.toml | 2 +- scylla/changelog.d/18207.added | 1 + scylla/hatch.toml | 2 +- scylla/pyproject.toml | 2 +- sidekiq/changelog.d/18207.added | 1 + sidekiq/pyproject.toml | 2 +- silk/changelog.d/18207.added | 1 + silk/hatch.toml | 2 +- silk/pyproject.toml | 2 +- singlestore/changelog.d/18207.added | 1 + singlestore/hatch.toml | 2 +- singlestore/pyproject.toml | 2 +- snmp/changelog.d/18207.added | 1 + snmp/hatch.toml | 2 +- snmp/pyproject.toml | 2 +- snowflake/changelog.d/18207.added | 1 + snowflake/hatch.toml | 2 +- snowflake/pyproject.toml | 2 +- solr/changelog.d/18207.added | 1 + solr/hatch.toml | 2 +- solr/pyproject.toml | 2 +- sonarqube/changelog.d/18207.added | 1 + sonarqube/hatch.toml | 2 +- sonarqube/pyproject.toml | 2 +- spark/changelog.d/18207.added | 1 + spark/hatch.toml | 2 +- spark/pyproject.toml | 2 +- sqlserver/changelog.d/18207.added | 1 + sqlserver/hatch.toml | 4 +-- sqlserver/pyproject.toml | 2 +- squid/changelog.d/18207.added | 1 + squid/hatch.toml | 2 +- squid/pyproject.toml | 2 +- ssh_check/changelog.d/18207.added | 1 + ssh_check/hatch.toml | 2 +- ssh_check/pyproject.toml | 2 +- statsd/changelog.d/18207.added | 1 + statsd/hatch.toml | 2 +- statsd/pyproject.toml | 2 +- strimzi/changelog.d/18207.added | 1 + strimzi/hatch.toml | 2 +- strimzi/pyproject.toml | 2 +- supervisord/changelog.d/18207.added | 1 + supervisord/hatch.toml | 2 +- supervisord/pyproject.toml | 2 +- suricata/pyproject.toml | 2 +- system_core/changelog.d/18207.added | 1 + system_core/hatch.toml | 2 +- system_core/pyproject.toml | 2 +- system_swap/changelog.d/18207.added | 1 + system_swap/hatch.toml | 2 +- system_swap/pyproject.toml | 2 +- tcp_check/changelog.d/18207.added | 1 + tcp_check/hatch.toml | 2 +- tcp_check/pyproject.toml | 2 +- teamcity/changelog.d/18207.added | 1 + teamcity/hatch.toml | 2 +- teamcity/pyproject.toml | 2 +- tekton/changelog.d/18207.added | 1 + tekton/hatch.toml | 2 +- tekton/pyproject.toml | 2 +- teleport/changelog.d/18207.added | 1 + teleport/hatch.toml | 2 +- teleport/pyproject.toml | 2 +- temporal/changelog.d/18207.added | 1 + temporal/hatch.toml | 2 +- temporal/pyproject.toml | 2 +- tenable/changelog.d/18207.added | 1 + tenable/pyproject.toml | 2 +- teradata/changelog.d/18207.added | 1 + teradata/hatch.toml | 6 ++-- teradata/pyproject.toml | 2 +- tibco_ems/hatch.toml | 2 +- tibco_ems/pyproject.toml | 2 +- tls/changelog.d/18207.added | 1 + tls/hatch.toml | 2 +- tls/pyproject.toml | 2 +- tomcat/changelog.d/18207.added | 1 + tomcat/hatch.toml | 2 +- tomcat/pyproject.toml | 2 +- torchserve/changelog.d/18207.added | 1 + torchserve/hatch.toml | 2 +- torchserve/pyproject.toml | 2 +- traefik_mesh/changelog.d/18207.added | 1 + traefik_mesh/hatch.toml | 2 +- traefik_mesh/pyproject.toml | 2 +- traffic_server/changelog.d/18207.added | 1 + traffic_server/hatch.toml | 2 +- traffic_server/pyproject.toml | 2 +- twemproxy/changelog.d/18207.added | 1 + twemproxy/hatch.toml | 2 +- twemproxy/pyproject.toml | 2 +- twistlock/changelog.d/18207.added | 1 + twistlock/hatch.toml | 2 +- twistlock/pyproject.toml | 2 +- varnish/changelog.d/18207.added | 1 + varnish/hatch.toml | 2 +- varnish/pyproject.toml | 2 +- vault/changelog.d/18207.added | 1 + vault/hatch.toml | 2 +- vault/pyproject.toml | 2 +- vertica/changelog.d/18207.added | 1 + vertica/hatch.toml | 2 +- vertica/pyproject.toml | 2 +- vllm/changelog.d/18207.added | 1 + vllm/hatch.toml | 2 +- vllm/pyproject.toml | 2 +- voltdb/changelog.d/18207.added | 1 + voltdb/hatch.toml | 2 +- voltdb/pyproject.toml | 2 +- vsphere/changelog.d/18207.added | 1 + vsphere/hatch.toml | 2 +- vsphere/pyproject.toml | 2 +- weaviate/changelog.d/18207.added | 1 + weaviate/hatch.toml | 2 +- weaviate/pyproject.toml | 2 +- weblogic/changelog.d/18207.added | 1 + weblogic/hatch.toml | 2 +- weblogic/pyproject.toml | 2 +- win32_event_log/changelog.d/18207.added | 1 + win32_event_log/hatch.toml | 2 +- win32_event_log/pyproject.toml | 2 +- .../changelog.d/18207.added | 1 + windows_performance_counters/hatch.toml | 2 +- windows_performance_counters/pyproject.toml | 2 +- windows_service/changelog.d/18207.added | 1 + windows_service/hatch.toml | 2 +- windows_service/pyproject.toml | 2 +- wmi_check/changelog.d/18207.added | 1 + wmi_check/hatch.toml | 2 +- wmi_check/pyproject.toml | 2 +- yarn/changelog.d/18207.added | 1 + yarn/hatch.toml | 2 +- yarn/pyproject.toml | 2 +- zk/changelog.d/18207.added | 1 + zk/hatch.toml | 2 +- zk/pyproject.toml | 2 +- 602 files changed, 696 insertions(+), 459 deletions(-) create mode 100644 active_directory/changelog.d/18207.added create mode 100644 activemq/changelog.d/18207.added create mode 100644 activemq_xml/changelog.d/18207.added create mode 100644 aerospike/changelog.d/18207.added create mode 100644 airflow/changelog.d/18207.added create mode 100644 amazon_msk/changelog.d/18207.added create mode 100644 ambari/changelog.d/18207.added create mode 100644 apache/changelog.d/18207.added create mode 100644 arangodb/changelog.d/18207.added create mode 100644 argo_rollouts/changelog.d/18207.added create mode 100644 argo_workflows/changelog.d/18207.added create mode 100644 argocd/changelog.d/18207.added create mode 100644 aspdotnet/changelog.d/18207.added create mode 100644 avi_vantage/changelog.d/18207.added create mode 100644 azure_iot_edge/changelog.d/18207.added create mode 100644 boundary/changelog.d/18207.added create mode 100644 btrfs/changelog.d/18207.added create mode 100644 cacti/changelog.d/18207.added create mode 100644 calico/changelog.d/18207.added create mode 100644 cassandra/changelog.d/18207.added create mode 100644 cassandra_nodetool/changelog.d/18207.added create mode 100644 ceph/changelog.d/18207.added create mode 100644 cert_manager/changelog.d/18207.added create mode 100644 cilium/changelog.d/18207.added create mode 100644 cisco_aci/changelog.d/18207.added create mode 100644 citrix_hypervisor/changelog.d/18207.added create mode 100644 clickhouse/changelog.d/18207.added create mode 100644 cloud_foundry_api/changelog.d/18207.added create mode 100644 cloudera/changelog.d/18207.added create mode 100644 cockroachdb/changelog.d/18207.added create mode 100644 confluent_platform/changelog.d/18207.added create mode 100644 consul/changelog.d/18207.added create mode 100644 coredns/changelog.d/18207.added create mode 100644 couch/changelog.d/18207.added create mode 100644 couchbase/changelog.d/18207.added create mode 100644 crio/changelog.d/18207.added create mode 100644 datadog_checks_base/changelog.d/18207.added create mode 100644 datadog_checks_dependency_provider/changelog.d/18207.added create mode 100644 datadog_checks_dev/changelog.d/18207.added create mode 100644 datadog_checks_downloader/changelog.d/18207.added create mode 100644 datadog_cluster_agent/changelog.d/18207.added create mode 100644 dcgm/changelog.d/18207.added create mode 100644 ddev/changelog.d/18207.added create mode 100644 directory/changelog.d/18207.added create mode 100644 disk/changelog.d/18207.added create mode 100644 dns_check/changelog.d/18207.added create mode 100644 dotnetclr/changelog.d/18207.added create mode 100644 druid/changelog.d/18207.added create mode 100644 ecs_fargate/changelog.d/18207.added create mode 100644 eks_fargate/changelog.d/18207.added create mode 100644 elastic/changelog.d/18207.added create mode 100644 envoy/changelog.d/18207.added create mode 100644 esxi/changelog.d/18207.added create mode 100644 etcd/changelog.d/18207.added create mode 100644 exchange_server/changelog.d/18207.added create mode 100644 external_dns/changelog.d/18207.added create mode 100644 flink/changelog.d/18207.added create mode 100644 fluentd/changelog.d/18207.added create mode 100644 fluxcd/changelog.d/18207.added create mode 100644 foundationdb/changelog.d/18207.added create mode 100644 gearmand/changelog.d/18207.added create mode 100644 gitlab/changelog.d/18207.added create mode 100644 gitlab_runner/changelog.d/18207.added create mode 100644 glusterfs/changelog.d/18207.added create mode 100644 go_expvar/changelog.d/18207.added create mode 100644 gunicorn/changelog.d/18207.added create mode 100644 haproxy/changelog.d/18207.added create mode 100644 harbor/changelog.d/18207.added create mode 100644 hazelcast/changelog.d/18207.added create mode 100644 hdfs_datanode/changelog.d/18207.added create mode 100644 hdfs_namenode/changelog.d/18207.added create mode 100644 hive/changelog.d/18207.added create mode 100644 hivemq/changelog.d/18207.added create mode 100644 http_check/changelog.d/18207.added create mode 100644 hudi/changelog.d/18207.added create mode 100644 hyperv/changelog.d/18207.added create mode 100644 ibm_ace/changelog.d/18207.added create mode 100644 ibm_db2/changelog.d/18207.added create mode 100644 ibm_i/changelog.d/18207.added create mode 100644 ibm_mq/changelog.d/18207.added create mode 100644 ibm_was/changelog.d/18207.added create mode 100644 ignite/changelog.d/18207.added create mode 100644 iis/changelog.d/18207.added create mode 100644 impala/changelog.d/18207.added create mode 100644 istio/changelog.d/18207.added create mode 100644 jboss_wildfly/changelog.d/18207.added create mode 100644 journald/changelog.d/18207.added create mode 100644 kafka/changelog.d/18207.added create mode 100644 kafka_consumer/changelog.d/18207.added create mode 100644 karpenter/changelog.d/18207.added create mode 100644 kong/changelog.d/18207.added create mode 100644 kube_apiserver_metrics/changelog.d/18207.added create mode 100644 kube_controller_manager/changelog.d/18207.added create mode 100644 kube_dns/changelog.d/18207.added create mode 100644 kube_metrics_server/changelog.d/18207.added create mode 100644 kube_proxy/changelog.d/18207.added create mode 100644 kube_scheduler/changelog.d/18207.added create mode 100644 kubelet/changelog.d/18207.added create mode 100644 kubernetes_cluster_autoscaler/changelog.d/18207.added create mode 100644 kubernetes_state/changelog.d/18207.added create mode 100644 kyototycoon/changelog.d/18207.added create mode 100644 kyverno/changelog.d/18207.added create mode 100644 lighttpd/changelog.d/18207.added create mode 100644 linkerd/changelog.d/18207.added create mode 100644 linux_proc_extras/changelog.d/18207.added create mode 100644 mapr/changelog.d/18207.added create mode 100644 mapreduce/changelog.d/18207.added create mode 100644 marathon/changelog.d/18207.added create mode 100644 marklogic/changelog.d/18207.added create mode 100644 mcache/changelog.d/18207.added create mode 100644 mesos_master/changelog.d/18207.added create mode 100644 mesos_slave/changelog.d/18207.added create mode 100644 mongo/changelog.d/18207.added create mode 100644 mysql/changelog.d/18207.added create mode 100644 nagios/changelog.d/18207.added create mode 100644 network/changelog.d/18207.added create mode 100644 nfsstat/changelog.d/18207.added create mode 100644 nginx/changelog.d/18207.added create mode 100644 nginx_ingress_controller/changelog.d/18207.added create mode 100644 nvidia_triton/changelog.d/18207.added create mode 100644 openldap/changelog.d/18207.added create mode 100644 openmetrics/changelog.d/18207.added create mode 100644 openstack/changelog.d/18207.added create mode 100644 openstack_controller/changelog.d/18207.added create mode 100644 oracle/changelog.d/18207.added create mode 100644 pan_firewall/changelog.d/18207.added create mode 100644 pdh_check/changelog.d/18207.added create mode 100644 pgbouncer/changelog.d/18207.added create mode 100644 php_fpm/changelog.d/18207.added create mode 100644 postfix/changelog.d/18207.added create mode 100644 postgres/changelog.d/18207.added create mode 100644 powerdns_recursor/changelog.d/18207.added create mode 100644 presto/changelog.d/18207.added create mode 100644 process/changelog.d/18207.added create mode 100644 prometheus/changelog.d/18207.added create mode 100644 proxysql/changelog.d/18207.added create mode 100644 pulsar/changelog.d/18207.added create mode 100644 rabbitmq/changelog.d/18207.added create mode 100644 ray/changelog.d/18207.added create mode 100644 redisdb/changelog.d/18207.added create mode 100644 rethinkdb/changelog.d/18207.added create mode 100644 riak/changelog.d/18207.added create mode 100644 riakcs/changelog.d/18207.added create mode 100644 sap_hana/changelog.d/18207.added create mode 100644 scylla/changelog.d/18207.added create mode 100644 sidekiq/changelog.d/18207.added create mode 100644 silk/changelog.d/18207.added create mode 100644 singlestore/changelog.d/18207.added create mode 100644 snmp/changelog.d/18207.added create mode 100644 snowflake/changelog.d/18207.added create mode 100644 solr/changelog.d/18207.added create mode 100644 sonarqube/changelog.d/18207.added create mode 100644 spark/changelog.d/18207.added create mode 100644 sqlserver/changelog.d/18207.added create mode 100644 squid/changelog.d/18207.added create mode 100644 ssh_check/changelog.d/18207.added create mode 100644 statsd/changelog.d/18207.added create mode 100644 strimzi/changelog.d/18207.added create mode 100644 supervisord/changelog.d/18207.added create mode 100644 system_core/changelog.d/18207.added create mode 100644 system_swap/changelog.d/18207.added create mode 100644 tcp_check/changelog.d/18207.added create mode 100644 teamcity/changelog.d/18207.added create mode 100644 tekton/changelog.d/18207.added create mode 100644 teleport/changelog.d/18207.added create mode 100644 temporal/changelog.d/18207.added create mode 100644 tenable/changelog.d/18207.added create mode 100644 teradata/changelog.d/18207.added create mode 100644 tls/changelog.d/18207.added create mode 100644 tomcat/changelog.d/18207.added create mode 100644 torchserve/changelog.d/18207.added create mode 100644 traefik_mesh/changelog.d/18207.added create mode 100644 traffic_server/changelog.d/18207.added create mode 100644 twemproxy/changelog.d/18207.added create mode 100644 twistlock/changelog.d/18207.added create mode 100644 varnish/changelog.d/18207.added create mode 100644 vault/changelog.d/18207.added create mode 100644 vertica/changelog.d/18207.added create mode 100644 vllm/changelog.d/18207.added create mode 100644 voltdb/changelog.d/18207.added create mode 100644 vsphere/changelog.d/18207.added create mode 100644 weaviate/changelog.d/18207.added create mode 100644 weblogic/changelog.d/18207.added create mode 100644 win32_event_log/changelog.d/18207.added create mode 100644 windows_performance_counters/changelog.d/18207.added create mode 100644 windows_service/changelog.d/18207.added create mode 100644 wmi_check/changelog.d/18207.added create mode 100644 yarn/changelog.d/18207.added create mode 100644 zk/changelog.d/18207.added diff --git a/.builders/images/linux-aarch64/Dockerfile b/.builders/images/linux-aarch64/Dockerfile index eca90a702cc87..29a7795e968a6 100644 --- a/.builders/images/linux-aarch64/Dockerfile +++ b/.builders/images/linux-aarch64/Dockerfile @@ -53,11 +53,11 @@ RUN yum install -y perl-IPC-Cmd && \ ldconfig # Compile and install Python 3 -ENV PYTHON3_VERSION=3.11.8 +ENV PYTHON3_VERSION=3.12.6 RUN yum install -y libffi-devel && \ DOWNLOAD_URL="https://python.org/ftp/python/{{version}}/Python-{{version}}.tgz" \ VERSION="${PYTHON3_VERSION}" \ - SHA256="d3019a613b9e8761d260d9ebe3bd4df63976de30464e5c0189566e1ae3f61889" \ + SHA256="85a4c1be906d20e5c5a69f2466b00da769c221d6a684acfd3a514dbf5bf10a66" \ RELATIVE_PATH="Python-{{version}}" \ bash install-from-source.sh \ --prefix=/opt/python/${PYTHON_VERSION} \ diff --git a/.builders/images/linux-x86_64/Dockerfile b/.builders/images/linux-x86_64/Dockerfile index 0ea7bf99315d6..6c87ac897dc5e 100644 --- a/.builders/images/linux-x86_64/Dockerfile +++ b/.builders/images/linux-x86_64/Dockerfile @@ -51,11 +51,11 @@ RUN yum install -y perl-IPC-Cmd && \ ldconfig # Compile and install Python 3 -ENV PYTHON3_VERSION=3.11.8 +ENV PYTHON3_VERSION=3.12.6 RUN yum install -y libffi-devel && \ DOWNLOAD_URL="https://python.org/ftp/python/{{version}}/Python-{{version}}.tgz" \ VERSION="${PYTHON3_VERSION}" \ - SHA256="d3019a613b9e8761d260d9ebe3bd4df63976de30464e5c0189566e1ae3f61889" \ + SHA256="85a4c1be906d20e5c5a69f2466b00da769c221d6a684acfd3a514dbf5bf10a66" \ RELATIVE_PATH="Python-{{version}}" \ bash install-from-source.sh --prefix=/opt/python/${PYTHON_VERSION} --with-ensurepip=yes --enable-ipv6 --with-dbmliborder= ENV PATH="/opt/python/${PYTHON_VERSION}/bin:${PATH}" @@ -81,6 +81,36 @@ RUN \ RELATIVE_PATH="krb5-{{version}}/src" \ bash install-from-source.sh --without-keyutils --without-system-verto --without-libedit --disable-static +# libxml & libxslt for lxml +RUN \ + DOWNLOAD_URL="https://download.gnome.org/sources/libxml2/2.12/libxml2-{{version}}.tar.xz" \ + VERSION="2.12.6" \ + SHA256="889c593a881a3db5fdd96cc9318c87df34eb648edfc458272ad46fd607353fbb" \ + RELATIVE_PATH="libxml2-{{version}}" \ + bash install-from-source.sh \ + --without-iconv \ + --without-python \ + --without-icu \ + --without-debug \ + --without-mem-debug \ + --without-run-debug \ + --without-legacy \ + --without-catalog \ + --without-docbook \ + --disable-static + +RUN \ + DOWNLOAD_URL="https://download.gnome.org/sources/libxslt/1.1/libxslt-{{version}}.tar.xz" \ + VERSION="1.1.39" \ + SHA256="2a20ad621148339b0759c4d4e96719362dee64c9a096dbba625ba053846349f0" \ + RELATIVE_PATH="libxslt-{{version}}" \ + bash install-from-source.sh \ + --without-python \ + --without-crypto \ + --without-profiler \ + --without-debugger \ + --disable-static + # libpq and pg_config as needed by psycopg2 RUN \ DOWNLOAD_URL="https://ftp.postgresql.org/pub/source/v{{version}}/postgresql-{{version}}.tar.bz2" \ diff --git a/.builders/images/windows-x86_64/Dockerfile b/.builders/images/windows-x86_64/Dockerfile index 8838692733e27..03c3c80da91fb 100644 --- a/.builders/images/windows-x86_64/Dockerfile +++ b/.builders/images/windows-x86_64/Dockerfile @@ -73,17 +73,17 @@ RUN Get-RemoteFile ` Approve-File -Path $($Env:USERPROFILE + '\.cargo\bin\rustc.exe') -Hash $Env:RUSTC_HASH # Install Python 3 -ENV PYTHON_VERSION="3.11.7" +ENV PYTHON_VERSION="3.12.6" RUN Get-RemoteFile ` -Uri https://www.python.org/ftp/python/$Env:PYTHON_VERSION/python-$Env:PYTHON_VERSION-amd64.exe ` -Path python-$Env:PYTHON_VERSION-amd64.exe ` - -Hash 'c117c6444494bbe4cc937e8a5a61899d53f7f5c5bc573c5d130304e457d54024'; ` + -Hash '5914748e6580e70bedeb7c537a0832b3071de9e09a2e4e7e3d28060616045e0a'; ` Start-Process -Wait python-$Env:PYTHON_VERSION-amd64.exe -ArgumentList '/quiet', 'InstallAllUsers=1'; ` Remove-Item python-$Env:PYTHON_VERSION-amd64.exe; ` - & 'C:\Program Files\Python311\python.exe' -m pip install --no-warn-script-location --upgrade pip; ` - & 'C:\Program Files\Python311\python.exe' -m pip install --no-warn-script-location virtualenv; ` - & 'C:\Program Files\Python311\python.exe' -m virtualenv 'C:\py3'; ` - Add-ToPath -Append 'C:\Program Files\Python311' + & 'C:\Program Files\Python312\python.exe' -m pip install --no-warn-script-location --upgrade pip; ` + & 'C:\Program Files\Python312\python.exe' -m pip install --no-warn-script-location virtualenv; ` + & 'C:\Program Files\Python312\python.exe' -m virtualenv 'C:\py3'; ` + Add-ToPath -Append 'C:\Program Files\Python312' # Install Python 2 ENV PYTHON_VERSION="2.7.18" diff --git a/.builders/lock.py b/.builders/lock.py index 6899054065121..52d5b1f731605 100644 --- a/.builders/lock.py +++ b/.builders/lock.py @@ -36,6 +36,11 @@ def default_python_version() -> str: return match.group(1) +@cache +def target_python_for_major(python_major: str): + return '2.7' if python_major == '2' else default_python_version() + + def is_compatible_wheel( target_name: str, target_python_major: str, @@ -44,7 +49,7 @@ def is_compatible_wheel( platform: str, ) -> bool: if interpreter.startswith('cp'): - target_python = '2.7' if target_python_major == '2' else default_python_version() + target_python = target_python_for_major(target_python_major) expected_tag = f'cp{target_python_major}' if abi == 'abi3' else f'cp{target_python}'.replace('.', '') if expected_tag not in interpreter: return False @@ -59,8 +64,17 @@ def is_compatible_wheel( return True -def generate_lock_file(requirements_file: Path, lock_file: Path) -> None: - target, _, python_version = lock_file.stem.rpartition('_') +def generate_lock_file( + requirements_file: Path, + lock_file_folder: Path, + target: str, + python_version: str, +) -> None: + python_target = target_python_for_major(python_version) + # The lockfiles contain the major.minor Python version + # so that the Agent can transition safely + lock_file = lock_file_folder / f'{target}_{python_target}.txt' + python_major = python_version[-1] dependencies: dict[str, str] = {} @@ -135,7 +149,10 @@ def main(): for python_version in target.iterdir(): if python_version.name.startswith('py'): generate_lock_file( - python_version / 'frozen.txt', LOCK_FILE_DIR / f'{target.name}_{python_version.name}.txt' + python_version / 'frozen.txt', + LOCK_FILE_DIR, + target.name, + python_version.name.strip('py'), ) if (image_digest_file := target / 'image_digest').is_file(): diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 46b5d17f958be..01960e1f9e162 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,6 +1,6 @@ # This dockerfile is used to build the devcontainer environment. # more info about vscode devcontainer: https://code.visualstudio.com/docs/devcontainers/containers -FROM mcr.microsoft.com/devcontainers/python:1-3.11-bullseye +FROM mcr.microsoft.com/devcontainers/python:1-3.12-bullseye RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y liblz4-dev libunwind-dev ca-certificates curl gnupg # Docker and docker-compose installation RUN install -m 0755 -d /etc/apt/keyrings diff --git a/.devcontainer/dbm/Dockerfile b/.devcontainer/dbm/Dockerfile index 9fc50f45142b9..98a347726cfc2 100644 --- a/.devcontainer/dbm/Dockerfile +++ b/.devcontainer/dbm/Dockerfile @@ -1,6 +1,6 @@ # This dockerfile is used to build the devcontainer environment. # more info about vscode devcontainer: https://code.visualstudio.com/docs/devcontainers/containers -FROM mcr.microsoft.com/devcontainers/python:1-3.11-bullseye +FROM mcr.microsoft.com/devcontainers/python:1-3.12-bullseye RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y liblz4-dev libunwind-dev ca-certificates curl gnupg # Docker and docker-compose installation RUN install -m 0755 -d /etc/apt/keyrings diff --git a/.devcontainer/dbm/devcontainer.json b/.devcontainer/dbm/devcontainer.json index 2e841ce8f3231..f136629acad0e 100644 --- a/.devcontainer/dbm/devcontainer.json +++ b/.devcontainer/dbm/devcontainer.json @@ -8,7 +8,7 @@ "privileged": true, "features": { "ghcr.io/devcontainers/features/python:1": { - "version": "3.11" + "version": "3.12" } }, "customizations": { diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 935588a034939..c3e36bc9a4802 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -16,7 +16,7 @@ // When updating it, modify both the base Dockerfile and the devcontainer.json reference. // Ref: https://github.com/devcontainers/features/blob/562305d37b97d47331d96306ffc2a0a3cce55e64/src/python/install.sh#L10 "ghcr.io/devcontainers/features/python:1": { - "version": "3.11" + "version": "3.12" } }, diff --git a/.github/workflows/build-ddev.yml b/.github/workflows/build-ddev.yml index 1019ec3bdd0b3..420238b467ecb 100644 --- a/.github/workflows/build-ddev.yml +++ b/.github/workflows/build-ddev.yml @@ -24,7 +24,7 @@ defaults: env: APP_NAME: ddev - PYTHON_VERSION: "3.11" + PYTHON_VERSION: "3.12" PYOXIDIZER_VERSION: "0.24.0" jobs: diff --git a/.github/workflows/build-deps.yml b/.github/workflows/build-deps.yml index 874989502850f..0052055a900e8 100644 --- a/.github/workflows/build-deps.yml +++ b/.github/workflows/build-deps.yml @@ -29,7 +29,7 @@ defaults: env: PYTHONUNBUFFERED: "1" - PYTHON_VERSION: "3.11" + PYTHON_VERSION: "3.12" DIRECT_DEPENDENCY_FILE: agent_requirements.in # https://reproducible-builds.org/specs/source-date-epoch/ SOURCE_DATE_EPOCH: "1580601600" @@ -109,7 +109,7 @@ jobs: if: matrix.job.image == 'linux-aarch64' run: | mkdir -p ~/miniconda3 - wget https://repo.anaconda.com/miniconda/Miniconda3-py311_24.1.2-0-Linux-aarch64.sh -O ~/miniconda3/miniconda.sh + wget https://repo.anaconda.com/miniconda/Miniconda3-py312_24.5.0-0-Linux-aarch64.sh -O ~/miniconda3/miniconda.sh bash ~/miniconda3/miniconda.sh -b -u -p ~/miniconda3 rm -rf ~/miniconda3/miniconda.sh ~/miniconda3/bin/conda init bash @@ -201,7 +201,7 @@ jobs: env: TARGET_NAME: macos-x86_64 OUT_DIR: output/macos-x86_64 - DD_PYTHON3: "/Library/Frameworks/Python.framework/Versions/3.11/bin/python" + DD_PYTHON3: "/Library/Frameworks/Python.framework/Versions/3.12/bin/python" steps: - name: Set up environment @@ -214,7 +214,7 @@ jobs: - name: Set up Python env: # Despite the name, this is built for the macOS 11 SDK on arm64 and 10.9+ on intel - PYTHON3_DOWNLOAD_URL: "https://www.python.org/ftp/python/3.11.5/python-3.11.5-macos11.pkg" + PYTHON3_DOWNLOAD_URL: "https://www.python.org/ftp/python/3.12.6/python-3.12.6-macos11.pkg" run: |- curl "$PYTHON3_DOWNLOAD_URL" -o python3.pkg sudo installer -pkg python3.pkg -target / diff --git a/.github/workflows/cache-shared-deps.yml b/.github/workflows/cache-shared-deps.yml index 05379e5a5ee52..c3dc212dd9fc7 100644 --- a/.github/workflows/cache-shared-deps.yml +++ b/.github/workflows/cache-shared-deps.yml @@ -16,7 +16,7 @@ jobs: os: [ubuntu-22.04, windows-2022] env: - PYTHON_VERSION: "3.11" + PYTHON_VERSION: "3.12" steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/compute-matrix.yml b/.github/workflows/compute-matrix.yml index d2ca1a5625563..a85759012ab2d 100644 --- a/.github/workflows/compute-matrix.yml +++ b/.github/workflows/compute-matrix.yml @@ -15,7 +15,7 @@ defaults: shell: bash env: - PYTHON_VERSION: "3.11" + PYTHON_VERSION: "3.12" MATRIX_SCRIPT: "ddev/src/ddev/utils/scripts/ci_matrix.py" jobs: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index b79a8b0d046b2..4113adce459bf 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -32,7 +32,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' cache: 'pip' - name: Upgrade Python packaging tools diff --git a/.github/workflows/pr-quick-check.yml b/.github/workflows/pr-quick-check.yml index 4165b491ac5c8..40bf4f8e07cf4 100644 --- a/.github/workflows/pr-quick-check.yml +++ b/.github/workflows/pr-quick-check.yml @@ -12,7 +12,7 @@ defaults: shell: bash env: - PYTHON_VERSION: "3.11" + PYTHON_VERSION: "3.12" CHECK_SCRIPT: "ddev/src/ddev/utils/scripts/check_pr.py" jobs: diff --git a/.github/workflows/release-base.yml b/.github/workflows/release-base.yml index ad61dc4c7e715..3d32a198fdfb8 100644 --- a/.github/workflows/release-base.yml +++ b/.github/workflows/release-base.yml @@ -15,7 +15,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Upgrade Python packaging tools run: pip install --disable-pip-version-check --upgrade pip setuptools wheel diff --git a/.github/workflows/release-dev.yml b/.github/workflows/release-dev.yml index 409c27bd412a0..59d77c92432dc 100644 --- a/.github/workflows/release-dev.yml +++ b/.github/workflows/release-dev.yml @@ -15,7 +15,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Upgrade Python packaging tools run: pip install --disable-pip-version-check --upgrade pip setuptools wheel diff --git a/.github/workflows/release-hash-check.yml b/.github/workflows/release-hash-check.yml index 8eefbdf48b256..9bc603a3ed1d4 100644 --- a/.github/workflows/release-hash-check.yml +++ b/.github/workflows/release-hash-check.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - id: files run: | diff --git a/.github/workflows/run-validations.yml b/.github/workflows/run-validations.yml index ca52dc1d5cda1..0d825da22215c 100644 --- a/.github/workflows/run-validations.yml +++ b/.github/workflows/run-validations.yml @@ -117,7 +117,7 @@ jobs: runs-on: ubuntu-22.04 env: - PYTHON_VERSION: "3.11" + PYTHON_VERSION: "3.12" TARGET: ${{ github.event_name == 'pull_request' && 'changed' || '' }} steps: diff --git a/.github/workflows/test-target.yml b/.github/workflows/test-target.yml index e126b9b00b720..b527a7ee878df 100644 --- a/.github/workflows/test-target.yml +++ b/.github/workflows/test-target.yml @@ -94,8 +94,8 @@ jobs: env: FORCE_COLOR: "1" - PYTHON_VERSION: "${{ inputs.python-version || '3.11' }}" - PYTHON_FILTER: "${{ (inputs.test-py2 && !inputs.test-py3) && '2.7' || (!inputs.test-py2 && inputs.test-py3) && (inputs.python-version || '3.11') || '' }}" + PYTHON_VERSION: "${{ inputs.python-version || '3.12' }}" + PYTHON_FILTER: "${{ (inputs.test-py2 && !inputs.test-py3) && '2.7' || (!inputs.test-py2 && inputs.test-py3) && (inputs.python-version || '3.12') || '' }}" SKIP_ENV_NAME: "${{ (inputs.test-py2 && !inputs.test-py3) && 'py3.*' || (!inputs.test-py2 && inputs.test-py3) && 'py2.*' || '' }}" # Windows E2E requires Windows containers DDEV_E2E_AGENT: "${{ inputs.platform == 'windows' && (inputs.agent-image-windows || 'datadog/agent-dev:master-py3-win-servercore') || inputs.agent-image }}" diff --git a/.github/workflows/update-agent-changelog.yml b/.github/workflows/update-agent-changelog.yml index eeafbd061ae3a..d426d183ca7ea 100644 --- a/.github/workflows/update-agent-changelog.yml +++ b/.github/workflows/update-agent-changelog.yml @@ -18,7 +18,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: 3.12 - name: Install ddev from local folder run: |- pip install -e ./datadog_checks_dev[cli] diff --git a/.github/workflows/update-dependencies.yml b/.github/workflows/update-dependencies.yml index a06135cc8c54c..78a78aa87a69e 100644 --- a/.github/workflows/update-dependencies.yml +++ b/.github/workflows/update-dependencies.yml @@ -18,7 +18,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: 3.12 - name: Install ddev from local folder run: |- pip install -e ./datadog_checks_dev[cli] diff --git a/.gitlab/tagger/Dockerfile b/.gitlab/tagger/Dockerfile index b0835e5b71c38..e6cc27674f73b 100644 --- a/.gitlab/tagger/Dockerfile +++ b/.gitlab/tagger/Dockerfile @@ -1,5 +1,5 @@ # Use a separate image to build ddev from source so the final image doesn't need git -FROM python:3.11 AS ddev-python +FROM python:3.12 AS ddev-python COPY . /home RUN python -m pip install build RUN python -m build -s ./home/ddev diff --git a/active_directory/changelog.d/18207.added b/active_directory/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/active_directory/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/active_directory/hatch.toml b/active_directory/hatch.toml index 63c94c51afff5..f9d790e2603e8 100644 --- a/active_directory/hatch.toml +++ b/active_directory/hatch.toml @@ -10,4 +10,4 @@ platforms = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/active_directory/pyproject.toml b/active_directory/pyproject.toml index 4c890ddf12e26..e3a3d3611e863 100644 --- a/active_directory/pyproject.toml +++ b/active_directory/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/activemq/changelog.d/18207.added b/activemq/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/activemq/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/activemq/hatch.toml b/activemq/hatch.toml index 4dfa61b7f1519..2194aa0cbf91c 100644 --- a/activemq/hatch.toml +++ b/activemq/hatch.toml @@ -1,12 +1,12 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.15.0"] compose-file = ["artemis.yaml"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["5.15.9"] compose-file = ["activemq.yaml"] diff --git a/activemq/pyproject.toml b/activemq/pyproject.toml index b42281225f9dc..d2aad008b6ca4 100644 --- a/activemq/pyproject.toml +++ b/activemq/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/activemq_xml/changelog.d/18207.added b/activemq_xml/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/activemq_xml/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/activemq_xml/hatch.toml b/activemq_xml/hatch.toml index cde2e9940b124..15ccd88f34a1e 100644 --- a/activemq_xml/hatch.toml +++ b/activemq_xml/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["5.11.1"] [envs.default.overrides] diff --git a/activemq_xml/pyproject.toml b/activemq_xml/pyproject.toml index 6280e20a75d14..3c23903fb08c9 100644 --- a/activemq_xml/pyproject.toml +++ b/activemq_xml/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/aerospike/changelog.d/18207.added b/aerospike/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/aerospike/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/aerospike/hatch.toml b/aerospike/hatch.toml index 2e8eaf335531e..addb780e2e411 100644 --- a/aerospike/hatch.toml +++ b/aerospike/hatch.toml @@ -3,7 +3,7 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4", "5.0", "5.3", "5.6"] [envs.default.overrides] diff --git a/aerospike/pyproject.toml b/aerospike/pyproject.toml index ff847c5883619..78d14d3e5d834 100644 --- a/aerospike/pyproject.toml +++ b/aerospike/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/airflow/changelog.d/18207.added b/airflow/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/airflow/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/airflow/hatch.toml b/airflow/hatch.toml index 84ee67e30748c..31074a4c6ccf7 100644 --- a/airflow/hatch.toml +++ b/airflow/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.1", "2.6"] [envs.default.overrides] diff --git a/airflow/pyproject.toml b/airflow/pyproject.toml index 8a7582317bd24..9d883d928e13c 100644 --- a/airflow/pyproject.toml +++ b/airflow/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/amazon_msk/changelog.d/18207.added b/amazon_msk/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/amazon_msk/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/amazon_msk/hatch.toml b/amazon_msk/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/amazon_msk/hatch.toml +++ b/amazon_msk/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/amazon_msk/pyproject.toml b/amazon_msk/pyproject.toml index f47548b11d133..475ce4e7031de 100644 --- a/amazon_msk/pyproject.toml +++ b/amazon_msk/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ambari/changelog.d/18207.added b/ambari/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ambari/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ambari/hatch.toml b/ambari/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/ambari/hatch.toml +++ b/ambari/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/ambari/pyproject.toml b/ambari/pyproject.toml index 77806c16a9596..23c506e9b8a0c 100644 --- a/ambari/pyproject.toml +++ b/ambari/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/apache/changelog.d/18207.added b/apache/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/apache/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/apache/hatch.toml b/apache/hatch.toml index d57e8fb4781a8..9b487f51ed6c4 100644 --- a/apache/hatch.toml +++ b/apache/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.4.27"] [envs.default.overrides] diff --git a/apache/pyproject.toml b/apache/pyproject.toml index 977063e134e8c..c5eb9434f534e 100644 --- a/apache/pyproject.toml +++ b/apache/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/arangodb/changelog.d/18207.added b/arangodb/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/arangodb/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/arangodb/hatch.toml b/arangodb/hatch.toml index 63797eda67bf6..cf7caf8f2678b 100644 --- a/arangodb/hatch.toml +++ b/arangodb/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.8"] [envs.default.overrides] diff --git a/arangodb/pyproject.toml b/arangodb/pyproject.toml index 77918dd730a3a..58702df689362 100644 --- a/arangodb/pyproject.toml +++ b/arangodb/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/argo_rollouts/changelog.d/18207.added b/argo_rollouts/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/argo_rollouts/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/argo_rollouts/hatch.toml b/argo_rollouts/hatch.toml index 777aac7b7ec9e..e39921cdb93c2 100644 --- a/argo_rollouts/hatch.toml +++ b/argo_rollouts/hatch.toml @@ -4,7 +4,7 @@ DDEV_SKIP_GENERIC_TAGS_CHECK = "true" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.6.6"] [envs.default.overrides] diff --git a/argo_rollouts/pyproject.toml b/argo_rollouts/pyproject.toml index 219f13ff22d36..d05e7a5c0c01b 100644 --- a/argo_rollouts/pyproject.toml +++ b/argo_rollouts/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/argo_workflows/changelog.d/18207.added b/argo_workflows/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/argo_workflows/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/argo_workflows/hatch.toml b/argo_workflows/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/argo_workflows/hatch.toml +++ b/argo_workflows/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/argo_workflows/pyproject.toml b/argo_workflows/pyproject.toml index b806ec9b49e37..42bcc168aba65 100644 --- a/argo_workflows/pyproject.toml +++ b/argo_workflows/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/argocd/changelog.d/18207.added b/argocd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/argocd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/argocd/hatch.toml b/argocd/hatch.toml index 6c5cba84584d7..e31ecbafc6d6f 100644 --- a/argocd/hatch.toml +++ b/argocd/hatch.toml @@ -4,7 +4,7 @@ DDEV_SKIP_GENERIC_TAGS_CHECK = "true" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.4.7"] [envs.default.overrides] diff --git a/argocd/pyproject.toml b/argocd/pyproject.toml index 0d6a92f5dcb11..ce1cd506cc3f2 100644 --- a/argocd/pyproject.toml +++ b/argocd/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/aspdotnet/changelog.d/18207.added b/aspdotnet/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/aspdotnet/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/aspdotnet/hatch.toml b/aspdotnet/hatch.toml index 63c94c51afff5..f9d790e2603e8 100644 --- a/aspdotnet/hatch.toml +++ b/aspdotnet/hatch.toml @@ -10,4 +10,4 @@ platforms = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/aspdotnet/pyproject.toml b/aspdotnet/pyproject.toml index ec447d479c201..c73f179dea07f 100644 --- a/aspdotnet/pyproject.toml +++ b/aspdotnet/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/avi_vantage/changelog.d/18207.added b/avi_vantage/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/avi_vantage/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/avi_vantage/hatch.toml b/avi_vantage/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/avi_vantage/hatch.toml +++ b/avi_vantage/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/avi_vantage/pyproject.toml b/avi_vantage/pyproject.toml index 1f45ae5136866..39fd1d7798e3b 100644 --- a/avi_vantage/pyproject.toml +++ b/avi_vantage/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/aws_neuron/hatch.toml b/aws_neuron/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/aws_neuron/hatch.toml +++ b/aws_neuron/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/aws_neuron/pyproject.toml b/aws_neuron/pyproject.toml index efd983eb0051d..4a7005002add4 100644 --- a/aws_neuron/pyproject.toml +++ b/aws_neuron/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/azure_iot_edge/changelog.d/18207.added b/azure_iot_edge/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/azure_iot_edge/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/azure_iot_edge/hatch.toml b/azure_iot_edge/hatch.toml index 52b6d241c7bf8..f58f23834bfbe 100644 --- a/azure_iot_edge/hatch.toml +++ b/azure_iot_edge/hatch.toml @@ -3,7 +3,7 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] protocol = ["tls"] [envs.default.overrides] diff --git a/azure_iot_edge/pyproject.toml b/azure_iot_edge/pyproject.toml index bf2283ba5c6bf..0080f809a7865 100644 --- a/azure_iot_edge/pyproject.toml +++ b/azure_iot_edge/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/boundary/changelog.d/18207.added b/boundary/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/boundary/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/boundary/hatch.toml b/boundary/hatch.toml index e95adb4f00b0b..7187108da4532 100644 --- a/boundary/hatch.toml +++ b/boundary/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["0.8"] [envs.default.overrides] diff --git a/boundary/pyproject.toml b/boundary/pyproject.toml index 36ebe626ff783..1874f346a8e66 100644 --- a/boundary/pyproject.toml +++ b/boundary/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/btrfs/changelog.d/18207.added b/btrfs/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/btrfs/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/btrfs/hatch.toml b/btrfs/hatch.toml index fb5625a080ed5..93bc6bef22ef8 100644 --- a/btrfs/hatch.toml +++ b/btrfs/hatch.toml @@ -8,4 +8,4 @@ platforms = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/btrfs/pyproject.toml b/btrfs/pyproject.toml index 69ac8f8563694..b357e8e8da0ee 100644 --- a/btrfs/pyproject.toml +++ b/btrfs/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cacti/changelog.d/18207.added b/cacti/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cacti/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cacti/hatch.toml b/cacti/hatch.toml index 282320c2bd3dd..c3be24889bf10 100644 --- a/cacti/hatch.toml +++ b/cacti/hatch.toml @@ -6,4 +6,4 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/cacti/pyproject.toml b/cacti/pyproject.toml index 081135d2a36f7..45f68faa8e788 100644 --- a/cacti/pyproject.toml +++ b/cacti/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/calico/changelog.d/18207.added b/calico/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/calico/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/calico/hatch.toml b/calico/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/calico/hatch.toml +++ b/calico/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/calico/pyproject.toml b/calico/pyproject.toml index 56abfd47d08f3..4854fe0f4590a 100644 --- a/calico/pyproject.toml +++ b/calico/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cassandra/changelog.d/18207.added b/cassandra/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cassandra/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cassandra/hatch.toml b/cassandra/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/cassandra/hatch.toml +++ b/cassandra/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/cassandra/pyproject.toml b/cassandra/pyproject.toml index b247564c5d336..1c6714ec2aafb 100644 --- a/cassandra/pyproject.toml +++ b/cassandra/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cassandra_nodetool/changelog.d/18207.added b/cassandra_nodetool/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cassandra_nodetool/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cassandra_nodetool/hatch.toml b/cassandra_nodetool/hatch.toml index c9c6b5d824311..2abdc87cdea11 100644 --- a/cassandra_nodetool/hatch.toml +++ b/cassandra_nodetool/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.1", "3.0"] [envs.default.overrides] diff --git a/cassandra_nodetool/pyproject.toml b/cassandra_nodetool/pyproject.toml index 10d14859d3313..4d41749f33d38 100644 --- a/cassandra_nodetool/pyproject.toml +++ b/cassandra_nodetool/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ceph/changelog.d/18207.added b/ceph/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ceph/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ceph/hatch.toml b/ceph/hatch.toml index c58aa74eaf910..2147947a70f95 100644 --- a/ceph/hatch.toml +++ b/ceph/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.0", "5.0"] [envs.default.overrides] diff --git a/ceph/pyproject.toml b/ceph/pyproject.toml index ecfde2326b5d4..97e3696c4c478 100644 --- a/ceph/pyproject.toml +++ b/ceph/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cert_manager/changelog.d/18207.added b/cert_manager/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cert_manager/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cert_manager/hatch.toml b/cert_manager/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/cert_manager/hatch.toml +++ b/cert_manager/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/cert_manager/pyproject.toml b/cert_manager/pyproject.toml index 1c698a5ad99b7..3855c0dcf5a30 100644 --- a/cert_manager/pyproject.toml +++ b/cert_manager/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cilium/changelog.d/18207.added b/cilium/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cilium/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cilium/hatch.toml b/cilium/hatch.toml index 80639298c8037..68cc58696dfb3 100644 --- a/cilium/hatch.toml +++ b/cilium/hatch.toml @@ -1,12 +1,12 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] impl = ["legacy"] version = ["1.9"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.9", "1.10", "1.11"] [envs.default.env-vars] diff --git a/cilium/pyproject.toml b/cilium/pyproject.toml index b8a31cc3cc53b..4dd8108d9a079 100644 --- a/cilium/pyproject.toml +++ b/cilium/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cisco_aci/changelog.d/18207.added b/cisco_aci/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cisco_aci/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cisco_aci/hatch.toml b/cisco_aci/hatch.toml index b271cca6d3e45..0169207d8cffb 100644 --- a/cisco_aci/hatch.toml +++ b/cisco_aci/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/cisco_aci/pyproject.toml b/cisco_aci/pyproject.toml index 703e39f823cf4..eb08546ee1fa2 100644 --- a/cisco_aci/pyproject.toml +++ b/cisco_aci/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/citrix_hypervisor/changelog.d/18207.added b/citrix_hypervisor/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/citrix_hypervisor/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/citrix_hypervisor/hatch.toml b/citrix_hypervisor/hatch.toml index 9d33ad72c8eef..0cc6f15224980 100644 --- a/citrix_hypervisor/hatch.toml +++ b/citrix_hypervisor/hatch.toml @@ -3,5 +3,5 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/citrix_hypervisor/pyproject.toml b/citrix_hypervisor/pyproject.toml index b1efb277e4c16..584263112d453 100644 --- a/citrix_hypervisor/pyproject.toml +++ b/citrix_hypervisor/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/clickhouse/changelog.d/18207.added b/clickhouse/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/clickhouse/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/clickhouse/hatch.toml b/clickhouse/hatch.toml index 7f7517e6e5e86..fa5265c7e434a 100644 --- a/clickhouse/hatch.toml +++ b/clickhouse/hatch.toml @@ -4,7 +4,7 @@ CLICKHOUSE_REPOSITORY = "clickhouse/clickhouse-server" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["18", "19", "20", "21.8", "22.7"] [envs.default.overrides] diff --git a/clickhouse/pyproject.toml b/clickhouse/pyproject.toml index 1f16c0f0e5523..0944ab13d5d9d 100644 --- a/clickhouse/pyproject.toml +++ b/clickhouse/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cloud_foundry_api/changelog.d/18207.added b/cloud_foundry_api/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cloud_foundry_api/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cloud_foundry_api/hatch.toml b/cloud_foundry_api/hatch.toml index 84290a4e2ebe1..056785192a3b5 100644 --- a/cloud_foundry_api/hatch.toml +++ b/cloud_foundry_api/hatch.toml @@ -9,4 +9,4 @@ mypy-deps = [ e2e-env = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/cloud_foundry_api/pyproject.toml b/cloud_foundry_api/pyproject.toml index 9b7921cbfd05e..d1909b403b19d 100644 --- a/cloud_foundry_api/pyproject.toml +++ b/cloud_foundry_api/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/cloudera/changelog.d/18207.added b/cloudera/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cloudera/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cloudera/hatch.toml b/cloudera/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/cloudera/hatch.toml +++ b/cloudera/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/cloudera/pyproject.toml b/cloudera/pyproject.toml index c7f78be036b73..da6cd2de5037e 100644 --- a/cloudera/pyproject.toml +++ b/cloudera/pyproject.toml @@ -27,7 +27,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/cockroachdb/changelog.d/18207.added b/cockroachdb/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/cockroachdb/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/cockroachdb/hatch.toml b/cockroachdb/hatch.toml index 62b5bcb8475d2..e7ee92c1ab643 100644 --- a/cockroachdb/hatch.toml +++ b/cockroachdb/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.0", "22.1", "23.2"] [envs.default.overrides] diff --git a/cockroachdb/pyproject.toml b/cockroachdb/pyproject.toml index f13e06a527fa9..9118478397f09 100644 --- a/cockroachdb/pyproject.toml +++ b/cockroachdb/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/confluent_platform/changelog.d/18207.added b/confluent_platform/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/confluent_platform/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/confluent_platform/hatch.toml b/confluent_platform/hatch.toml index ebbb590666b02..9271d848e8336 100644 --- a/confluent_platform/hatch.toml +++ b/confluent_platform/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["5.4", "6.2"] [envs.default.overrides] diff --git a/confluent_platform/pyproject.toml b/confluent_platform/pyproject.toml index b3b5252141166..9f26a28d15eda 100644 --- a/confluent_platform/pyproject.toml +++ b/confluent_platform/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/consul/changelog.d/18207.added b/consul/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/consul/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/consul/hatch.toml b/consul/hatch.toml index 67de49b21a7cc..42b3b74d52300 100644 --- a/consul/hatch.toml +++ b/consul/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.6", "1.9"] [envs.default.overrides] diff --git a/consul/pyproject.toml b/consul/pyproject.toml index 55cf34f8ebb11..0573106958db0 100644 --- a/consul/pyproject.toml +++ b/consul/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/coredns/changelog.d/18207.added b/coredns/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/coredns/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/coredns/hatch.toml b/coredns/hatch.toml index 8d3bcc0f3d099..62fcdb921d5f0 100644 --- a/coredns/hatch.toml +++ b/coredns/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.2", "1.8"] [envs.default.overrides] diff --git a/coredns/pyproject.toml b/coredns/pyproject.toml index ceec99e2da61b..0a768df59a1b6 100644 --- a/coredns/pyproject.toml +++ b/coredns/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/couch/changelog.d/18207.added b/couch/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/couch/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/couch/hatch.toml b/couch/hatch.toml index ee7cc27754fed..b3f69ac5f66bd 100644 --- a/couch/hatch.toml +++ b/couch/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.6", "2.3", "3.1"] [envs.default.overrides] diff --git a/couch/pyproject.toml b/couch/pyproject.toml index 993a987d05f7e..4b7ad5f700d06 100644 --- a/couch/pyproject.toml +++ b/couch/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/couchbase/changelog.d/18207.added b/couchbase/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/couchbase/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/couchbase/hatch.toml b/couchbase/hatch.toml index 32cc203185db6..21a594adf0315 100644 --- a/couchbase/hatch.toml +++ b/couchbase/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] # https://www.couchbase.com/support-policy/enterprise-software version = ["6.6", "7.0", "7.1"] diff --git a/couchbase/pyproject.toml b/couchbase/pyproject.toml index 9b3d7c5557ff7..f1538ebe2d970 100644 --- a/couchbase/pyproject.toml +++ b/couchbase/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/crio/changelog.d/18207.added b/crio/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/crio/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/crio/hatch.toml b/crio/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/crio/hatch.toml +++ b/crio/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/crio/pyproject.toml b/crio/pyproject.toml index 6a1fc4966154a..f5a1bf83a5c00 100644 --- a/crio/pyproject.toml +++ b/crio/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/datadog_checks_base/changelog.d/18207.added b/datadog_checks_base/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/datadog_checks_base/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/datadog_checks_base/hatch.toml b/datadog_checks_base/hatch.toml index f3d334a271abe..f3c5084b6a5d3 100644 --- a/datadog_checks_base/hatch.toml +++ b/datadog_checks_base/hatch.toml @@ -3,7 +3,7 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] features = ["db", "deps", "http", "json", "kube"] diff --git a/datadog_checks_base/pyproject.toml b/datadog_checks_base/pyproject.toml index 6134ffd9228ef..1abe90220249a 100644 --- a/datadog_checks_base/pyproject.toml +++ b/datadog_checks_base/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dynamic = [ diff --git a/datadog_checks_dependency_provider/changelog.d/18207.added b/datadog_checks_dependency_provider/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/datadog_checks_dependency_provider/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/datadog_checks_dependency_provider/pyproject.toml b/datadog_checks_dependency_provider/pyproject.toml index e2c1264517a6a..0463186ed5754 100644 --- a/datadog_checks_dependency_provider/pyproject.toml +++ b/datadog_checks_dependency_provider/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "Topic :: System :: Monitoring", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ "datadog-checks-base>=11.2.0", diff --git a/datadog_checks_dev/changelog.d/18207.added b/datadog_checks_dev/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/datadog_checks_dev/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/hatch.toml b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/hatch.toml +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/pyproject.toml b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/pyproject.toml index 1a1a0d5b43ba5..f64b51df1418b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/pyproject.toml +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/pyproject.toml @@ -9,7 +9,7 @@ name = "datadog-{project_name}" description = "The {integration_name} check" readme = "README.md" license = "BSD-3-Clause" -requires-python = ">=3.11" +requires-python = ">=3.12" keywords = [ "datadog", "datadog agent", @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/hatch.toml b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/hatch.toml +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/pyproject.toml b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/pyproject.toml index 1a1a0d5b43ba5..f64b51df1418b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/pyproject.toml +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/pyproject.toml @@ -9,7 +9,7 @@ name = "datadog-{project_name}" description = "The {integration_name} check" readme = "README.md" license = "BSD-3-Clause" -requires-python = ">=3.11" +requires-python = ">=3.12" keywords = [ "datadog", "datadog agent", @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/logs/{check_name}/pyproject.toml b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/logs/{check_name}/pyproject.toml index 75ccd780b7e7a..bda6b5be9e8e6 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/logs/{check_name}/pyproject.toml +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/logs/{check_name}/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/datadog_checks_dev/hatch.toml b/datadog_checks_dev/hatch.toml index ff0f5e90597e4..7bb3c9e11baae 100644 --- a/datadog_checks_dev/hatch.toml +++ b/datadog_checks_dev/hatch.toml @@ -7,13 +7,13 @@ e2e-env = false DDEV_TESTING_PLUGIN = "true" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.overrides] matrix.python.features = [ - { value = "cli", if = ["3.11"] }, + { value = "cli", if = ["3.12"] }, ] # TODO: remove this when the old CLI is gone matrix.python.pre-install-commands = [ - { value = "python -m pip install --no-deps --disable-pip-version-check {verbosity:flag:-1} -e ../ddev", if = ["3.11"] }, + { value = "python -m pip install --no-deps --disable-pip-version-check {verbosity:flag:-1} -e ../ddev", if = ["3.12"] }, ] diff --git a/datadog_checks_dev/pyproject.toml b/datadog_checks_dev/pyproject.toml index 9c4b6796cb238..e9e8f703c9281 100644 --- a/datadog_checks_dev/pyproject.toml +++ b/datadog_checks_dev/pyproject.toml @@ -28,7 +28,7 @@ classifiers = [ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ "coverage>=5.0.3", diff --git a/datadog_checks_downloader/changelog.d/18207.added b/datadog_checks_downloader/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/datadog_checks_downloader/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/datadog_checks_downloader/hatch.toml b/datadog_checks_downloader/hatch.toml index ef1e2233f7c8a..185e0e2516cb4 100644 --- a/datadog_checks_downloader/hatch.toml +++ b/datadog_checks_downloader/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] e2e-env = false diff --git a/datadog_checks_downloader/pyproject.toml b/datadog_checks_downloader/pyproject.toml index 89ad5c54ec84d..2974e65abcc60 100644 --- a/datadog_checks_downloader/pyproject.toml +++ b/datadog_checks_downloader/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dynamic = [ diff --git a/datadog_cluster_agent/changelog.d/18207.added b/datadog_cluster_agent/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/datadog_cluster_agent/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/datadog_cluster_agent/hatch.toml b/datadog_cluster_agent/hatch.toml index ce1f02498d72b..8eb7559f584f9 100644 --- a/datadog_cluster_agent/hatch.toml +++ b/datadog_cluster_agent/hatch.toml @@ -1,5 +1,5 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/datadog_cluster_agent/pyproject.toml b/datadog_cluster_agent/pyproject.toml index a26faebb5773b..4592b159c015d 100644 --- a/datadog_cluster_agent/pyproject.toml +++ b/datadog_cluster_agent/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/dcgm/changelog.d/18207.added b/dcgm/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/dcgm/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/dcgm/hatch.toml b/dcgm/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/dcgm/hatch.toml +++ b/dcgm/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/dcgm/pyproject.toml b/dcgm/pyproject.toml index 0f6c724933974..e17cc60b915a6 100644 --- a/dcgm/pyproject.toml +++ b/dcgm/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/ddev/changelog.d/18207.added b/ddev/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ddev/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ddev/hatch.toml b/ddev/hatch.toml index 2f299a9ceb09c..398997183a7e4 100644 --- a/ddev/hatch.toml +++ b/ddev/hatch.toml @@ -5,7 +5,7 @@ mypy-args = [ ] [envs.default] -python = "3.11" +python = "3.12" e2e-env = false dependencies = [ "pyyaml", diff --git a/ddev/pyproject.toml b/ddev/pyproject.toml index 49f5f471453ec..45765f43639e5 100644 --- a/ddev/pyproject.toml +++ b/ddev/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ "click~=8.1.6", @@ -76,12 +76,12 @@ scripts = ["ddev"] include = '\.pyi?$' line-length = 120 skip-string-normalization = true -target-version = ["py311"] +target-version = ["py312"] extend-exclude = "src/ddev/_version.py" [tool.ruff] exclude = [] -target-version = "py311" +target-version = "py312" line-length = 120 [tool.ruff.lint] diff --git a/ddev/src/ddev/repo/constants.py b/ddev/src/ddev/repo/constants.py index 446b36b2a7a60..d2a40cb3334b5 100644 --- a/ddev/src/ddev/repo/constants.py +++ b/ddev/src/ddev/repo/constants.py @@ -11,4 +11,4 @@ } # This is automatically maintained -PYTHON_VERSION = '3.11' +PYTHON_VERSION = '3.12' diff --git a/directory/changelog.d/18207.added b/directory/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/directory/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/directory/hatch.toml b/directory/hatch.toml index 801832c211847..57ca29df505a9 100644 --- a/directory/hatch.toml +++ b/directory/hatch.toml @@ -1,6 +1,6 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.bench] diff --git a/directory/pyproject.toml b/directory/pyproject.toml index d86492021fe18..77e70ff27ced5 100644 --- a/directory/pyproject.toml +++ b/directory/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/disk/changelog.d/18207.added b/disk/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/disk/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/disk/hatch.toml b/disk/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/disk/hatch.toml +++ b/disk/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/disk/pyproject.toml b/disk/pyproject.toml index 638bae2a48b34..7e5ed5c3f73b6 100644 --- a/disk/pyproject.toml +++ b/disk/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/dns_check/changelog.d/18207.added b/dns_check/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/dns_check/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/dns_check/hatch.toml b/dns_check/hatch.toml index 61068f0d3abf9..70bd33fcab628 100644 --- a/dns_check/hatch.toml +++ b/dns_check/hatch.toml @@ -3,4 +3,4 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/dns_check/pyproject.toml b/dns_check/pyproject.toml index f9c7f80ae6ca4..afa020b28b601 100644 --- a/dns_check/pyproject.toml +++ b/dns_check/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/docs/developer/setup.md b/docs/developer/setup.md index 14c89b383d8ac..3915fa131a7bd 100644 --- a/docs/developer/setup.md +++ b/docs/developer/setup.md @@ -11,7 +11,7 @@ you intend to work on. ## Python -To work on any integration you must install Python 3.11. +To work on any integration you must install Python 3.12. After installation, restart your terminal and ensure that your newly installed Python comes first in your `PATH`. @@ -25,7 +25,7 @@ After installation, restart your terminal and ensure that your newly installed P then install Python: ``` - brew install python@3.11 + brew install python@3.12 ``` After it completes, check the output to see if it asked you to run any extra commands and if so, execute them. @@ -39,7 +39,7 @@ After installation, restart your terminal and ensure that your newly installed P === "Windows" Windows users have it the easiest. - Download the [Python 3.11 64-bit executable installer](https://www.python.org/downloads/release/python-3115/) and run it. + Download the [Python 3.12 64-bit executable installer](https://www.python.org/downloads/release/python-3124/) and run it. When prompted, be sure to select the option to add to your `PATH`. Also, it is recommended that you choose the per-user installation method. Verify successful `PATH` modification: @@ -51,7 +51,7 @@ After installation, restart your terminal and ensure that your newly installed P === "Linux" Ah, you enjoy difficult things. Are you using Gentoo? - We recommend using either [Miniconda][miniconda-docs] or [pyenv][pyenv-github] to install Python 3.11. Whatever you do, never modify the system Python. + We recommend using either [Miniconda][miniconda-docs] or [pyenv][pyenv-github] to install Python 3.12. Whatever you do, never modify the system Python. Verify successful `PATH` modification: @@ -242,11 +242,11 @@ This is if you cloned [integrations-core][] and want to always use the version b === "ARM" ``` - pipx install -e /path/to/integrations-core/ddev --python /opt/homebrew/opt/python@3.11/bin/python3.11 + pipx install -e /path/to/integrations-core/ddev --python /opt/homebrew/opt/python@3.12/bin/python3.12 ``` === "Intel" ``` - pipx install -e /path/to/integrations-core/ddev --python /usr/local/opt/python@3.11/bin/python3.11 + pipx install -e /path/to/integrations-core/ddev --python /usr/local/opt/python@3.12/bin/python3.12 ``` !!! warning diff --git a/dotnetclr/changelog.d/18207.added b/dotnetclr/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/dotnetclr/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/dotnetclr/hatch.toml b/dotnetclr/hatch.toml index 6f2e122f880de..4f891ddac37fd 100644 --- a/dotnetclr/hatch.toml +++ b/dotnetclr/hatch.toml @@ -9,4 +9,4 @@ platforms = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/dotnetclr/pyproject.toml b/dotnetclr/pyproject.toml index 72a9a1f23eeb6..f46f5887ec543 100644 --- a/dotnetclr/pyproject.toml +++ b/dotnetclr/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/druid/changelog.d/18207.added b/druid/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/druid/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/druid/hatch.toml b/druid/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/druid/hatch.toml +++ b/druid/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/druid/pyproject.toml b/druid/pyproject.toml index 5748ccdea4708..ac0fce91714fa 100644 --- a/druid/pyproject.toml +++ b/druid/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ecs_fargate/changelog.d/18207.added b/ecs_fargate/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ecs_fargate/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ecs_fargate/hatch.toml b/ecs_fargate/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/ecs_fargate/hatch.toml +++ b/ecs_fargate/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/ecs_fargate/pyproject.toml b/ecs_fargate/pyproject.toml index ee4dcf77128ce..602ff576e1419 100644 --- a/ecs_fargate/pyproject.toml +++ b/ecs_fargate/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/eks_fargate/changelog.d/18207.added b/eks_fargate/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/eks_fargate/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/eks_fargate/hatch.toml b/eks_fargate/hatch.toml index 169926426dae2..87b66d031883f 100644 --- a/eks_fargate/hatch.toml +++ b/eks_fargate/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] e2e-env = false diff --git a/eks_fargate/pyproject.toml b/eks_fargate/pyproject.toml index c0d369b029518..03620d946d5fa 100644 --- a/eks_fargate/pyproject.toml +++ b/eks_fargate/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/elastic/changelog.d/18207.added b/elastic/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/elastic/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/elastic/hatch.toml b/elastic/hatch.toml index cdae4fd101625..8ab01c4ecbb58 100644 --- a/elastic/hatch.toml +++ b/elastic/hatch.toml @@ -2,12 +2,12 @@ base-package-features = ["deps", "http"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] flavor = ["elasticsearch"] version = ["7.2", "7.7", "7.9", "7.10", "8.8"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] # Opensearch, compatible with elasticsearch flavor = ["opensearch"] version = ["1.1"] diff --git a/elastic/pyproject.toml b/elastic/pyproject.toml index b81be0f851e5a..4280b54f62c18 100644 --- a/elastic/pyproject.toml +++ b/elastic/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/envoy/changelog.d/18207.added b/envoy/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/envoy/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/envoy/hatch.toml b/envoy/hatch.toml index e2359d03581ea..6e6ec397259d1 100644 --- a/envoy/hatch.toml +++ b/envoy/hatch.toml @@ -6,7 +6,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] api-version = ["2", "3"] [envs.default.overrides] diff --git a/envoy/pyproject.toml b/envoy/pyproject.toml index cab901eee95b1..1d549fcae090d 100644 --- a/envoy/pyproject.toml +++ b/envoy/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/esxi/changelog.d/18207.added b/esxi/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/esxi/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/esxi/hatch.toml b/esxi/hatch.toml index 9fad694f9b43b..fc3f5d074ed9a 100644 --- a/esxi/hatch.toml +++ b/esxi/hatch.toml @@ -1,11 +1,11 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["lab"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["vcsim"] version = ["6.5", "7.0"] diff --git a/esxi/pyproject.toml b/esxi/pyproject.toml index ce9b56b83225d..cbb6d6aea813e 100644 --- a/esxi/pyproject.toml +++ b/esxi/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/etcd/changelog.d/18207.added b/etcd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/etcd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/etcd/hatch.toml b/etcd/hatch.toml index b4788aebb9a34..f3d30b89ee778 100644 --- a/etcd/hatch.toml +++ b/etcd/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["v3.4.26" , "v3.5.9"] [envs.default.overrides] diff --git a/etcd/pyproject.toml b/etcd/pyproject.toml index 09024a3f45830..1bfbef8e3ae09 100644 --- a/etcd/pyproject.toml +++ b/etcd/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/exchange_server/changelog.d/18207.added b/exchange_server/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/exchange_server/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/exchange_server/hatch.toml b/exchange_server/hatch.toml index 413e6c51fe94c..0f9f06cf58486 100644 --- a/exchange_server/hatch.toml +++ b/exchange_server/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/exchange_server/pyproject.toml b/exchange_server/pyproject.toml index d48128619c891..88b90d1f9c0f5 100644 --- a/exchange_server/pyproject.toml +++ b/exchange_server/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/external_dns/changelog.d/18207.added b/external_dns/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/external_dns/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/external_dns/hatch.toml b/external_dns/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/external_dns/hatch.toml +++ b/external_dns/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/external_dns/pyproject.toml b/external_dns/pyproject.toml index 6e3e4e253cae1..f6ad12bc045ba 100644 --- a/external_dns/pyproject.toml +++ b/external_dns/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/flink/changelog.d/18207.added b/flink/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/flink/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/flink/pyproject.toml b/flink/pyproject.toml index 2130b12282146..da7755c0c7ac1 100644 --- a/flink/pyproject.toml +++ b/flink/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/fluentd/changelog.d/18207.added b/fluentd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/fluentd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/fluentd/hatch.toml b/fluentd/hatch.toml index 7890ff4df450a..351f3b2acfba8 100644 --- a/fluentd/hatch.toml +++ b/fluentd/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.17"] [envs.default.overrides] diff --git a/fluentd/pyproject.toml b/fluentd/pyproject.toml index 4a41be9d1c2f7..cf89e04ec7ed9 100644 --- a/fluentd/pyproject.toml +++ b/fluentd/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/fluxcd/changelog.d/18207.added b/fluxcd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/fluxcd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/fluxcd/hatch.toml b/fluxcd/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/fluxcd/hatch.toml +++ b/fluxcd/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/fluxcd/pyproject.toml b/fluxcd/pyproject.toml index 138a357f0ebd1..d3a5dc43fad68 100644 --- a/fluxcd/pyproject.toml +++ b/fluxcd/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/fly_io/hatch.toml b/fly_io/hatch.toml index fb8856ae1da1d..9cde6362848ab 100644 --- a/fly_io/hatch.toml +++ b/fly_io/hatch.toml @@ -1,11 +1,11 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["caddy"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["lab"] [envs.default.overrides] diff --git a/fly_io/pyproject.toml b/fly_io/pyproject.toml index b65bd46e1ff08..ae7927991c535 100644 --- a/fly_io/pyproject.toml +++ b/fly_io/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/foundationdb/changelog.d/18207.added b/foundationdb/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/foundationdb/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/foundationdb/hatch.toml b/foundationdb/hatch.toml index 97a77e47030a1..2dfbc037ca63c 100644 --- a/foundationdb/hatch.toml +++ b/foundationdb/hatch.toml @@ -7,10 +7,10 @@ dependencies = [ e2e-env = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] protocol = ["tls"] [envs.default.overrides] diff --git a/foundationdb/pyproject.toml b/foundationdb/pyproject.toml index 470049575ebe9..852c3de459e5a 100644 --- a/foundationdb/pyproject.toml +++ b/foundationdb/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/gearmand/changelog.d/18207.added b/gearmand/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/gearmand/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/gearmand/hatch.toml b/gearmand/hatch.toml index f9ae641fd89e2..faa624cac3d33 100644 --- a/gearmand/hatch.toml +++ b/gearmand/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.0", "1.1"] [envs.default.overrides] diff --git a/gearmand/pyproject.toml b/gearmand/pyproject.toml index 6cbb39f22e913..182308e461433 100644 --- a/gearmand/pyproject.toml +++ b/gearmand/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/gitlab/changelog.d/18207.added b/gitlab/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/gitlab/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/gitlab/hatch.toml b/gitlab/hatch.toml index 106c9a86d6018..b7ded317ffcd8 100644 --- a/gitlab/hatch.toml +++ b/gitlab/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] # https://about.gitlab.com/support/statement-of-support/#version-support version = ["13.12", "14.10", "15.10"] diff --git a/gitlab/pyproject.toml b/gitlab/pyproject.toml index 2b18e80e4f688..7d201fcd21a28 100644 --- a/gitlab/pyproject.toml +++ b/gitlab/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/gitlab_runner/changelog.d/18207.added b/gitlab_runner/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/gitlab_runner/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/gitlab_runner/hatch.toml b/gitlab_runner/hatch.toml index f723a4530d931..e10701f1d8874 100644 --- a/gitlab_runner/hatch.toml +++ b/gitlab_runner/hatch.toml @@ -5,7 +5,7 @@ DDEV_SKIP_GENERIC_TAGS_CHECK = "true" GITLAB_IMAGE = "gitlab/gitlab-ce" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["10.8.0"] [envs.default.overrides] diff --git a/gitlab_runner/pyproject.toml b/gitlab_runner/pyproject.toml index 6a98e5af356a6..ffd85f719b3bf 100644 --- a/gitlab_runner/pyproject.toml +++ b/gitlab_runner/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/glusterfs/changelog.d/18207.added b/glusterfs/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/glusterfs/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/glusterfs/hatch.toml b/glusterfs/hatch.toml index 6e69a5a11402a..e9d5dffb64861 100644 --- a/glusterfs/hatch.toml +++ b/glusterfs/hatch.toml @@ -6,7 +6,7 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["7.1"] [envs.default.env-vars] diff --git a/glusterfs/pyproject.toml b/glusterfs/pyproject.toml index b6dc2025ac9e5..e70370c454c8e 100644 --- a/glusterfs/pyproject.toml +++ b/glusterfs/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/go_expvar/changelog.d/18207.added b/go_expvar/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/go_expvar/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/go_expvar/hatch.toml b/go_expvar/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/go_expvar/hatch.toml +++ b/go_expvar/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/go_expvar/pyproject.toml b/go_expvar/pyproject.toml index 1ab17cbdbb1cc..0bdea0592f989 100644 --- a/go_expvar/pyproject.toml +++ b/go_expvar/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/gunicorn/changelog.d/18207.added b/gunicorn/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/gunicorn/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/gunicorn/hatch.toml b/gunicorn/hatch.toml index d2fe13c38ff4b..5a9c5432fd90e 100644 --- a/gunicorn/hatch.toml +++ b/gunicorn/hatch.toml @@ -1,13 +1,14 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] -version = ["19.9", "20.1"] +python = ["3.12"] +# v22 of gunicorn is the first that supports python 3.12 +version = ["22", "23"] [envs.default.overrides] matrix.version.env-vars = [ - { key = "GUNICORN_VERSION", value = "19.9.0", if = ["19.9"] }, - { key = "GUNICORN_VERSION", value = "20.1.0", if = ["20.1"] }, + { key = "GUNICORN_VERSION", value = "22.0.0", if = ["22"] }, + { key = "GUNICORN_VERSION", value = "23.0.0", if = ["23"] }, ] [envs.default] diff --git a/gunicorn/pyproject.toml b/gunicorn/pyproject.toml index 36eecd88d930e..3b01aaeedcb86 100644 --- a/gunicorn/pyproject.toml +++ b/gunicorn/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/haproxy/changelog.d/18207.added b/haproxy/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/haproxy/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/haproxy/hatch.toml b/haproxy/hatch.toml index 1ed6a7c98c1b6..4e4fff488c80d 100644 --- a/haproxy/hatch.toml +++ b/haproxy/hatch.toml @@ -1,11 +1,11 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.0", "2.2", "2.4", "2.5", "2.6", "2.7"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.0"] impl = ["legacy"] diff --git a/haproxy/pyproject.toml b/haproxy/pyproject.toml index 331fd6d769f85..8b78030486b59 100644 --- a/haproxy/pyproject.toml +++ b/haproxy/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/harbor/changelog.d/18207.added b/harbor/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/harbor/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/harbor/hatch.toml b/harbor/hatch.toml index 155d1da3f4ca9..fee63406f4cb5 100644 --- a/harbor/hatch.toml +++ b/harbor/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.10", "2.0", "2.3"] [envs.default.overrides] diff --git a/harbor/pyproject.toml b/harbor/pyproject.toml index cfa46f3e817ba..c5220e2e61b2c 100644 --- a/harbor/pyproject.toml +++ b/harbor/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/hazelcast/changelog.d/18207.added b/hazelcast/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hazelcast/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hazelcast/hatch.toml b/hazelcast/hatch.toml index af5357b0b4b14..32f8bc109ed3c 100644 --- a/hazelcast/hatch.toml +++ b/hazelcast/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.0", "5.0"] [envs.default.overrides] diff --git a/hdfs_datanode/changelog.d/18207.added b/hdfs_datanode/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hdfs_datanode/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hdfs_datanode/hatch.toml b/hdfs_datanode/hatch.toml index e4c471521e26d..44e0c07abc17b 100644 --- a/hdfs_datanode/hatch.toml +++ b/hdfs_datanode/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] HDFS_RAW_VERSION = "3.1.3" diff --git a/hdfs_datanode/pyproject.toml b/hdfs_datanode/pyproject.toml index 3052f61d40d3d..e22554bcdc479 100644 --- a/hdfs_datanode/pyproject.toml +++ b/hdfs_datanode/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/hdfs_namenode/changelog.d/18207.added b/hdfs_namenode/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hdfs_namenode/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hdfs_namenode/hatch.toml b/hdfs_namenode/hatch.toml index e4c471521e26d..44e0c07abc17b 100644 --- a/hdfs_namenode/hatch.toml +++ b/hdfs_namenode/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] HDFS_RAW_VERSION = "3.1.3" diff --git a/hdfs_namenode/pyproject.toml b/hdfs_namenode/pyproject.toml index 7cff415a294fe..9b56ef48480d6 100644 --- a/hdfs_namenode/pyproject.toml +++ b/hdfs_namenode/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/hive/changelog.d/18207.added b/hive/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hive/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hive/hatch.toml b/hive/hatch.toml index ce1f02498d72b..8eb7559f584f9 100644 --- a/hive/hatch.toml +++ b/hive/hatch.toml @@ -1,5 +1,5 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/hive/pyproject.toml b/hive/pyproject.toml index 8fda69785c74c..4b29a04186362 100644 --- a/hive/pyproject.toml +++ b/hive/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/hivemq/changelog.d/18207.added b/hivemq/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hivemq/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hivemq/hatch.toml b/hivemq/hatch.toml index 29c96ca15410c..c89132b5629e0 100644 --- a/hivemq/hatch.toml +++ b/hivemq/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.3"] [envs.default.overrides] diff --git a/hivemq/pyproject.toml b/hivemq/pyproject.toml index 36cbeaf9e5072..a55b56328dd44 100644 --- a/hivemq/pyproject.toml +++ b/hivemq/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/http_check/changelog.d/18207.added b/http_check/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/http_check/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/http_check/hatch.toml b/http_check/hatch.toml index 91dfdd58ecd27..1bd2b7ae9753c 100644 --- a/http_check/hatch.toml +++ b/http_check/hatch.toml @@ -6,4 +6,4 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/http_check/pyproject.toml b/http_check/pyproject.toml index 414969c8f7962..078d51899ecaf 100644 --- a/http_check/pyproject.toml +++ b/http_check/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/hudi/changelog.d/18207.added b/hudi/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hudi/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hudi/hatch.toml b/hudi/hatch.toml index 74960053bd5e8..ff5633dcc7f65 100644 --- a/hudi/hatch.toml +++ b/hudi/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] SPARK_VERSION = "3.2.0" diff --git a/hudi/pyproject.toml b/hudi/pyproject.toml index 24f95771d0f47..83e80c2170139 100644 --- a/hudi/pyproject.toml +++ b/hudi/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/hyperv/changelog.d/18207.added b/hyperv/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/hyperv/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/hyperv/hatch.toml b/hyperv/hatch.toml index dab04656200a9..0a3f9b470993b 100644 --- a/hyperv/hatch.toml +++ b/hyperv/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] platforms = [ diff --git a/hyperv/pyproject.toml b/hyperv/pyproject.toml index 572102aee2449..82f3a96acb8ac 100644 --- a/hyperv/pyproject.toml +++ b/hyperv/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ibm_ace/changelog.d/18207.added b/ibm_ace/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ibm_ace/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ibm_ace/hatch.toml b/ibm_ace/hatch.toml index 9cd5c8e7d957b..6ad867c8725d4 100644 --- a/ibm_ace/hatch.toml +++ b/ibm_ace/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["12"] [envs.default.overrides] diff --git a/ibm_ace/pyproject.toml b/ibm_ace/pyproject.toml index 1b27a99a0bdde..b601bac23083d 100644 --- a/ibm_ace/pyproject.toml +++ b/ibm_ace/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ibm_db2/changelog.d/18207.added b/ibm_db2/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ibm_db2/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ibm_db2/hatch.toml b/ibm_db2/hatch.toml index d7f555ee23243..e57d89a6eff93 100644 --- a/ibm_db2/hatch.toml +++ b/ibm_db2/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["11.1"] [envs.default.env-vars] diff --git a/ibm_db2/pyproject.toml b/ibm_db2/pyproject.toml index ed4b3a696092b..2078c82f10c23 100644 --- a/ibm_db2/pyproject.toml +++ b/ibm_db2/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ibm_i/changelog.d/18207.added b/ibm_i/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ibm_i/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ibm_i/hatch.toml b/ibm_i/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/ibm_i/hatch.toml +++ b/ibm_i/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/ibm_i/pyproject.toml b/ibm_i/pyproject.toml index fe8b3fe6d7229..104f0f2b53af7 100644 --- a/ibm_i/pyproject.toml +++ b/ibm_i/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ibm_mq/changelog.d/18207.added b/ibm_mq/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ibm_mq/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ibm_mq/hatch.toml b/ibm_mq/hatch.toml index 4c4f57357d588..84e7759604957 100644 --- a/ibm_mq/hatch.toml +++ b/ibm_mq/hatch.toml @@ -6,11 +6,11 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["9"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["9"] setup = ["cluster"] diff --git a/ibm_mq/pyproject.toml b/ibm_mq/pyproject.toml index c5d42f6be30c3..3cd5ce9f36be8 100644 --- a/ibm_mq/pyproject.toml +++ b/ibm_mq/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ibm_was/changelog.d/18207.added b/ibm_was/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ibm_was/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ibm_was/hatch.toml b/ibm_was/hatch.toml index 845a7f93657c4..87d4bcf1402c2 100644 --- a/ibm_was/hatch.toml +++ b/ibm_was/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] # We do not test IBM WAS v8. IBM themselves no longer build a docker image for this # version and we don't see the need to build and host an image ourselves. For reference: # https://github.com/WASdev/ci.docker.websphere-traditional/issues/198 diff --git a/ibm_was/pyproject.toml b/ibm_was/pyproject.toml index d56eac6de342a..59dfa8bd0dfd6 100644 --- a/ibm_was/pyproject.toml +++ b/ibm_was/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ignite/changelog.d/18207.added b/ignite/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ignite/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ignite/hatch.toml b/ignite/hatch.toml index 759749b520cbd..29430b5f3ae3a 100644 --- a/ignite/hatch.toml +++ b/ignite/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.8", "2.14"] [envs.default.overrides] diff --git a/ignite/pyproject.toml b/ignite/pyproject.toml index e42014a2a7e6e..c60bab739962f 100644 --- a/ignite/pyproject.toml +++ b/ignite/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/iis/changelog.d/18207.added b/iis/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/iis/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/iis/hatch.toml b/iis/hatch.toml index 7e3cb4fcef5db..53691c07eaa4c 100644 --- a/iis/hatch.toml +++ b/iis/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/iis/pyproject.toml b/iis/pyproject.toml index b367c7ed75362..2606590ffc384 100644 --- a/iis/pyproject.toml +++ b/iis/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/impala/changelog.d/18207.added b/impala/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/impala/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/impala/hatch.toml b/impala/hatch.toml index 866c962647def..a402ec87b3ca2 100644 --- a/impala/hatch.toml +++ b/impala/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.0.0"] [envs.default.overrides] diff --git a/impala/pyproject.toml b/impala/pyproject.toml index 1676f6ad0f0f4..cfbea39777d50 100644 --- a/impala/pyproject.toml +++ b/impala/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/istio/changelog.d/18207.added b/istio/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/istio/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/istio/hatch.toml b/istio/hatch.toml index 3d89bb385df58..5305eac462c46 100644 --- a/istio/hatch.toml +++ b/istio/hatch.toml @@ -6,7 +6,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.13"] [envs.default.overrides] diff --git a/istio/pyproject.toml b/istio/pyproject.toml index 7ca39cfb40e54..7bbf30c2ef909 100644 --- a/istio/pyproject.toml +++ b/istio/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/jboss_wildfly/changelog.d/18207.added b/jboss_wildfly/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/jboss_wildfly/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/jboss_wildfly/hatch.toml b/jboss_wildfly/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/jboss_wildfly/hatch.toml +++ b/jboss_wildfly/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/jboss_wildfly/pyproject.toml b/jboss_wildfly/pyproject.toml index fba55b673c0b7..52c633037058f 100644 --- a/jboss_wildfly/pyproject.toml +++ b/jboss_wildfly/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/journald/changelog.d/18207.added b/journald/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/journald/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/journald/pyproject.toml b/journald/pyproject.toml index 010340badfe1b..968b34e9a8aff 100644 --- a/journald/pyproject.toml +++ b/journald/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kafka/changelog.d/18207.added b/kafka/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kafka/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kafka/hatch.toml b/kafka/hatch.toml index c368c6753b5a1..2029b40f6bec6 100644 --- a/kafka/hatch.toml +++ b/kafka/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.8", "3.3"] [envs.default.overrides] diff --git a/kafka/pyproject.toml b/kafka/pyproject.toml index 19a35c76e8c35..e3551f1dbcdcd 100644 --- a/kafka/pyproject.toml +++ b/kafka/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kafka_consumer/changelog.d/18207.added b/kafka_consumer/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kafka_consumer/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kafka_consumer/hatch.toml b/kafka_consumer/hatch.toml index c2ae46bf79694..2c54fc0f8f402 100644 --- a/kafka_consumer/hatch.toml +++ b/kafka_consumer/hatch.toml @@ -13,11 +13,11 @@ ZK_VERSION = "3.6.4" AUTHENTICATION = "noauth" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.6", "3.3"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.3"] auth = ["ssl", "kerberos"] diff --git a/kafka_consumer/pyproject.toml b/kafka_consumer/pyproject.toml index 3e3eb29708129..49cf77e4a3aea 100644 --- a/kafka_consumer/pyproject.toml +++ b/kafka_consumer/pyproject.toml @@ -22,7 +22,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/karpenter/changelog.d/18207.added b/karpenter/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/karpenter/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/karpenter/hatch.toml b/karpenter/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/karpenter/hatch.toml +++ b/karpenter/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/karpenter/pyproject.toml b/karpenter/pyproject.toml index c2c65ee312910..3b9ed9d4acad5 100644 --- a/karpenter/pyproject.toml +++ b/karpenter/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/kong/changelog.d/18207.added b/kong/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kong/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kong/hatch.toml b/kong/hatch.toml index 8f714cf1fe6de..12f000163835e 100644 --- a/kong/hatch.toml +++ b/kong/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.5.0", "3.0.0"] [envs.default.overrides] diff --git a/kong/pyproject.toml b/kong/pyproject.toml index 5e637f640fd1c..a3e1d8f6a6b8c 100644 --- a/kong/pyproject.toml +++ b/kong/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kube_apiserver_metrics/changelog.d/18207.added b/kube_apiserver_metrics/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kube_apiserver_metrics/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kube_apiserver_metrics/hatch.toml b/kube_apiserver_metrics/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/kube_apiserver_metrics/hatch.toml +++ b/kube_apiserver_metrics/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/kube_apiserver_metrics/pyproject.toml b/kube_apiserver_metrics/pyproject.toml index b13c476bffdb4..42aa982f44cdb 100644 --- a/kube_apiserver_metrics/pyproject.toml +++ b/kube_apiserver_metrics/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kube_controller_manager/changelog.d/18207.added b/kube_controller_manager/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kube_controller_manager/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kube_controller_manager/hatch.toml b/kube_controller_manager/hatch.toml index e3a9dc3faa4d7..9184e295226c9 100644 --- a/kube_controller_manager/hatch.toml +++ b/kube_controller_manager/hatch.toml @@ -11,7 +11,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/kube_controller_manager/pyproject.toml b/kube_controller_manager/pyproject.toml index 17c7abbf86546..571bae1836952 100644 --- a/kube_controller_manager/pyproject.toml +++ b/kube_controller_manager/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kube_dns/changelog.d/18207.added b/kube_dns/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kube_dns/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kube_dns/hatch.toml b/kube_dns/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kube_dns/hatch.toml +++ b/kube_dns/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kube_dns/pyproject.toml b/kube_dns/pyproject.toml index 67ea433f081d0..6c8c2b793f826 100644 --- a/kube_dns/pyproject.toml +++ b/kube_dns/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kube_metrics_server/changelog.d/18207.added b/kube_metrics_server/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kube_metrics_server/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kube_metrics_server/hatch.toml b/kube_metrics_server/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kube_metrics_server/hatch.toml +++ b/kube_metrics_server/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kube_metrics_server/pyproject.toml b/kube_metrics_server/pyproject.toml index ed80bd29a6270..e169008a48904 100644 --- a/kube_metrics_server/pyproject.toml +++ b/kube_metrics_server/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kube_proxy/changelog.d/18207.added b/kube_proxy/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kube_proxy/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kube_proxy/hatch.toml b/kube_proxy/hatch.toml index 7514917802f38..b9882b7af0527 100644 --- a/kube_proxy/hatch.toml +++ b/kube_proxy/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] platforms = [ diff --git a/kube_proxy/pyproject.toml b/kube_proxy/pyproject.toml index 6a4c1a1cdafc7..59b2b46a4ba28 100644 --- a/kube_proxy/pyproject.toml +++ b/kube_proxy/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kube_scheduler/changelog.d/18207.added b/kube_scheduler/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kube_scheduler/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kube_scheduler/hatch.toml b/kube_scheduler/hatch.toml index cc70f94c080ac..1e5a9a43783c7 100644 --- a/kube_scheduler/hatch.toml +++ b/kube_scheduler/hatch.toml @@ -5,7 +5,7 @@ base-package-features = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/kube_scheduler/pyproject.toml b/kube_scheduler/pyproject.toml index 620f541b83b70..871ea3e106fcc 100644 --- a/kube_scheduler/pyproject.toml +++ b/kube_scheduler/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kubeflow/pyproject.toml b/kubeflow/pyproject.toml index a7a3f0c00afda..507e230c0b3f3 100644 --- a/kubeflow/pyproject.toml +++ b/kubeflow/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/kubelet/changelog.d/18207.added b/kubelet/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kubelet/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kubelet/hatch.toml b/kubelet/hatch.toml index 7797d96f22bc1..2c4aa4d9760ec 100644 --- a/kubelet/hatch.toml +++ b/kubelet/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/kubelet/pyproject.toml b/kubelet/pyproject.toml index 7fd6a3cc53cc4..075bd5b2e3e23 100644 --- a/kubelet/pyproject.toml +++ b/kubelet/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kubernetes_cluster_autoscaler/changelog.d/18207.added b/kubernetes_cluster_autoscaler/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kubernetes_cluster_autoscaler/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kubernetes_cluster_autoscaler/hatch.toml b/kubernetes_cluster_autoscaler/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kubernetes_cluster_autoscaler/hatch.toml +++ b/kubernetes_cluster_autoscaler/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kubernetes_cluster_autoscaler/pyproject.toml b/kubernetes_cluster_autoscaler/pyproject.toml index 0b704a8614b3c..b3d94f3a08466 100644 --- a/kubernetes_cluster_autoscaler/pyproject.toml +++ b/kubernetes_cluster_autoscaler/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/kubernetes_state/changelog.d/18207.added b/kubernetes_state/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kubernetes_state/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kubernetes_state/hatch.toml b/kubernetes_state/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/kubernetes_state/hatch.toml +++ b/kubernetes_state/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/kubernetes_state/pyproject.toml b/kubernetes_state/pyproject.toml index 1d5704d3b20c1..ece60796d07fb 100644 --- a/kubernetes_state/pyproject.toml +++ b/kubernetes_state/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kubevirt_api/hatch.toml b/kubevirt_api/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kubevirt_api/hatch.toml +++ b/kubevirt_api/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kubevirt_api/pyproject.toml b/kubevirt_api/pyproject.toml index 0b70603c9b141..5b09028db4589 100644 --- a/kubevirt_api/pyproject.toml +++ b/kubevirt_api/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/kubevirt_controller/hatch.toml b/kubevirt_controller/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kubevirt_controller/hatch.toml +++ b/kubevirt_controller/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kubevirt_controller/pyproject.toml b/kubevirt_controller/pyproject.toml index 7ef8a76365552..dfd3797339a24 100644 --- a/kubevirt_controller/pyproject.toml +++ b/kubevirt_controller/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/kyototycoon/changelog.d/18207.added b/kyototycoon/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kyototycoon/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kyototycoon/hatch.toml b/kyototycoon/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kyototycoon/hatch.toml +++ b/kyototycoon/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kyototycoon/pyproject.toml b/kyototycoon/pyproject.toml index aac5a3f24775a..7fa95a604f50a 100644 --- a/kyototycoon/pyproject.toml +++ b/kyototycoon/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/kyverno/changelog.d/18207.added b/kyverno/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/kyverno/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/kyverno/hatch.toml b/kyverno/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kyverno/hatch.toml +++ b/kyverno/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/kyverno/pyproject.toml b/kyverno/pyproject.toml index 8769aace3c165..51079fea365e8 100644 --- a/kyverno/pyproject.toml +++ b/kyverno/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/lighttpd/changelog.d/18207.added b/lighttpd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/lighttpd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/lighttpd/hatch.toml b/lighttpd/hatch.toml index cafdb3788cad1..261c4802bdf29 100644 --- a/lighttpd/hatch.toml +++ b/lighttpd/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] flavor = ["auth", "noauth"] [envs.default.overrides] diff --git a/lighttpd/pyproject.toml b/lighttpd/pyproject.toml index 28eb6b254122e..ac8ff3cac5eb6 100644 --- a/lighttpd/pyproject.toml +++ b/lighttpd/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/linkerd/changelog.d/18207.added b/linkerd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/linkerd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/linkerd/hatch.toml b/linkerd/hatch.toml index c4f5c489e98a5..46952cc77c421 100644 --- a/linkerd/hatch.toml +++ b/linkerd/hatch.toml @@ -6,7 +6,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/linkerd/pyproject.toml b/linkerd/pyproject.toml index 45f685a892bf4..458a3e1fb10ac 100644 --- a/linkerd/pyproject.toml +++ b/linkerd/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/linux_proc_extras/changelog.d/18207.added b/linux_proc_extras/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/linux_proc_extras/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/linux_proc_extras/hatch.toml b/linux_proc_extras/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/linux_proc_extras/hatch.toml +++ b/linux_proc_extras/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/linux_proc_extras/pyproject.toml b/linux_proc_extras/pyproject.toml index b92af0be3195b..9d77452d0b67f 100644 --- a/linux_proc_extras/pyproject.toml +++ b/linux_proc_extras/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mapr/changelog.d/18207.added b/mapr/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mapr/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mapr/hatch.toml b/mapr/hatch.toml index 0089c2a95f7eb..efa3452fe613c 100644 --- a/mapr/hatch.toml +++ b/mapr/hatch.toml @@ -7,4 +7,4 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/mapr/pyproject.toml b/mapr/pyproject.toml index ad39020b8805b..dd3c0e309f8f1 100644 --- a/mapr/pyproject.toml +++ b/mapr/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mapreduce/changelog.d/18207.added b/mapreduce/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mapreduce/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mapreduce/hatch.toml b/mapreduce/hatch.toml index 85c9830af9b9e..11adac3e3342c 100644 --- a/mapreduce/hatch.toml +++ b/mapreduce/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/mapreduce/pyproject.toml b/mapreduce/pyproject.toml index 1f77e1e9e45e8..cf57dab669bf7 100644 --- a/mapreduce/pyproject.toml +++ b/mapreduce/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/marathon/changelog.d/18207.added b/marathon/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/marathon/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/marathon/hatch.toml b/marathon/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/marathon/hatch.toml +++ b/marathon/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/marathon/pyproject.toml b/marathon/pyproject.toml index be2ec12a08957..0417f459c148c 100644 --- a/marathon/pyproject.toml +++ b/marathon/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/marklogic/changelog.d/18207.added b/marklogic/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/marklogic/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/marklogic/hatch.toml b/marklogic/hatch.toml index 4883100aa7231..b7093e50a81fc 100644 --- a/marklogic/hatch.toml +++ b/marklogic/hatch.toml @@ -6,7 +6,7 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["9.0", "10.0", "11.0"] [envs.default.overrides] diff --git a/marklogic/pyproject.toml b/marklogic/pyproject.toml index c36da31616366..32a75ba8be794 100644 --- a/marklogic/pyproject.toml +++ b/marklogic/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mcache/changelog.d/18207.added b/mcache/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mcache/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mcache/hatch.toml b/mcache/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/mcache/hatch.toml +++ b/mcache/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/mcache/pyproject.toml b/mcache/pyproject.toml index 535b4bb5a02ab..3416ceb26b7ab 100644 --- a/mcache/pyproject.toml +++ b/mcache/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mesos_master/changelog.d/18207.added b/mesos_master/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mesos_master/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mesos_master/hatch.toml b/mesos_master/hatch.toml index b34d77b810e1f..9fd016769c5bf 100644 --- a/mesos_master/hatch.toml +++ b/mesos_master/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.7"] [envs.default.overrides] diff --git a/mesos_master/pyproject.toml b/mesos_master/pyproject.toml index e08054638645c..ceed88f8f8ba7 100644 --- a/mesos_master/pyproject.toml +++ b/mesos_master/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mesos_slave/changelog.d/18207.added b/mesos_slave/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mesos_slave/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mesos_slave/hatch.toml b/mesos_slave/hatch.toml index 6c838971ec02e..6b47bf4bf3e3e 100644 --- a/mesos_slave/hatch.toml +++ b/mesos_slave/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.7"] [envs.default.overrides] diff --git a/mesos_slave/pyproject.toml b/mesos_slave/pyproject.toml index ef842e6069708..caa42505979eb 100644 --- a/mesos_slave/pyproject.toml +++ b/mesos_slave/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mongo/changelog.d/18207.added b/mongo/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mongo/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mongo/hatch.toml b/mongo/hatch.toml index 57204ea122ef2..30c0e594ed026 100644 --- a/mongo/hatch.toml +++ b/mongo/hatch.toml @@ -7,7 +7,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.4", "5.0", "6.0", "7.0"] flavor = ["standalone", "shard", "auth", "tls"] diff --git a/mongo/pyproject.toml b/mongo/pyproject.toml index ae31a970f390d..de71c7d49ce1a 100644 --- a/mongo/pyproject.toml +++ b/mongo/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/mysql/changelog.d/18207.added b/mysql/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/mysql/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/mysql/hatch.toml b/mysql/hatch.toml index 103bee10d87a2..2f6784e9ce02d 100644 --- a/mysql/hatch.toml +++ b/mysql/hatch.toml @@ -14,7 +14,7 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = [ "5.7", # EOL October 21, 2023 "8.0.36", # EOL April, 2026 @@ -22,12 +22,12 @@ version = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["8.0"] replication = ["group"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] flavor = ["mariadb"] version = [ "10.2", # EOL 23 May 2022 (ended) diff --git a/mysql/pyproject.toml b/mysql/pyproject.toml index 01d6c9a97c5ba..097ffe0d70c8c 100644 --- a/mysql/pyproject.toml +++ b/mysql/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/nagios/changelog.d/18207.added b/nagios/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/nagios/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/nagios/hatch.toml b/nagios/hatch.toml index 8ca57231866cf..1002bb8b79cb3 100644 --- a/nagios/hatch.toml +++ b/nagios/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.4"] [envs.default.overrides] diff --git a/nagios/pyproject.toml b/nagios/pyproject.toml index 67de6ad22912d..559e5828704ce 100644 --- a/nagios/pyproject.toml +++ b/nagios/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/network/changelog.d/18207.added b/network/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/network/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/network/hatch.toml b/network/hatch.toml index 265647e85c0b8..89a24834a388b 100644 --- a/network/hatch.toml +++ b/network/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.overrides] platform.windows.e2e-env = { value = false } diff --git a/network/pyproject.toml b/network/pyproject.toml index f038e2d5eeb73..dc9cc589fcbac 100644 --- a/network/pyproject.toml +++ b/network/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/nfsstat/changelog.d/18207.added b/nfsstat/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/nfsstat/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/nfsstat/hatch.toml b/nfsstat/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/nfsstat/hatch.toml +++ b/nfsstat/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/nfsstat/pyproject.toml b/nfsstat/pyproject.toml index 78f478a8bf029..63ba1349c8801 100644 --- a/nfsstat/pyproject.toml +++ b/nfsstat/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/nginx/changelog.d/18207.added b/nginx/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/nginx/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/nginx/hatch.toml b/nginx/hatch.toml index 63446a3942773..0a0a7e7475720 100644 --- a/nginx/hatch.toml +++ b/nginx/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.12", "1.13", "vts"] [envs.default.env-vars] diff --git a/nginx/pyproject.toml b/nginx/pyproject.toml index 0c6c5a37a3f5a..739baaff89c9c 100644 --- a/nginx/pyproject.toml +++ b/nginx/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/nginx_ingress_controller/changelog.d/18207.added b/nginx_ingress_controller/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/nginx_ingress_controller/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/nginx_ingress_controller/hatch.toml b/nginx_ingress_controller/hatch.toml index 821e6ed8e37ca..f62423b83c762 100644 --- a/nginx_ingress_controller/hatch.toml +++ b/nginx_ingress_controller/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/nginx_ingress_controller/pyproject.toml b/nginx_ingress_controller/pyproject.toml index c6ade0257b65b..8fe0a7fdaea79 100644 --- a/nginx_ingress_controller/pyproject.toml +++ b/nginx_ingress_controller/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/nvidia_triton/changelog.d/18207.added b/nvidia_triton/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/nvidia_triton/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/nvidia_triton/hatch.toml b/nvidia_triton/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/nvidia_triton/hatch.toml +++ b/nvidia_triton/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/nvidia_triton/pyproject.toml b/nvidia_triton/pyproject.toml index 17e9c35e0f92a..2803d702469d5 100644 --- a/nvidia_triton/pyproject.toml +++ b/nvidia_triton/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/openldap/changelog.d/18207.added b/openldap/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/openldap/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/openldap/hatch.toml b/openldap/hatch.toml index 912bbc34f2b46..3ffcb288c9702 100644 --- a/openldap/hatch.toml +++ b/openldap/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.4", "2.6"] [envs.default.overrides] diff --git a/openldap/pyproject.toml b/openldap/pyproject.toml index cdbbc1723d277..49683d15481fb 100644 --- a/openldap/pyproject.toml +++ b/openldap/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/openmetrics/changelog.d/18207.added b/openmetrics/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/openmetrics/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/openmetrics/hatch.toml b/openmetrics/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/openmetrics/hatch.toml +++ b/openmetrics/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/openmetrics/pyproject.toml b/openmetrics/pyproject.toml index 999a17e36146f..3175dc1edc5a3 100644 --- a/openmetrics/pyproject.toml +++ b/openmetrics/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/openstack/changelog.d/18207.added b/openstack/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/openstack/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/openstack/hatch.toml b/openstack/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/openstack/hatch.toml +++ b/openstack/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/openstack/pyproject.toml b/openstack/pyproject.toml index 096272e19f7de..7a8e5c4b24995 100644 --- a/openstack/pyproject.toml +++ b/openstack/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/openstack_controller/changelog.d/18207.added b/openstack_controller/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/openstack_controller/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/openstack_controller/hatch.toml b/openstack_controller/hatch.toml index 8d6ec546035fd..a7511eb2248ce 100644 --- a/openstack_controller/hatch.toml +++ b/openstack_controller/hatch.toml @@ -1,19 +1,19 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["legacy"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["gcp"] [envs.default.overrides] -name."^py3.11$".e2e-env = { value = true } -name."^py3.11-legacy$".e2e-env = { value = true } +name."^py3.12$".e2e-env = { value = true } +name."^py3.12-legacy$".e2e-env = { value = true } matrix.setup.e2e-env = { value = true, if = ["gcp"], env = ["TF_VAR_credentials_file", "TF_VAR_instance_name", "TF_VAR_desired_status", "TF_VAR_nat_ip", "TF_VAR_network_ip", "TF_VAR_user"] } matrix.setup.env-vars = [ { key = "USE_OPENSTACK_GCP", value = "true", if = ["gcp"] }, diff --git a/openstack_controller/pyproject.toml b/openstack_controller/pyproject.toml index 92241eb9e8f30..44382e19555e2 100644 --- a/openstack_controller/pyproject.toml +++ b/openstack_controller/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/oracle/changelog.d/18207.added b/oracle/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/oracle/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/oracle/pyproject.toml b/oracle/pyproject.toml index ffdc20014a96f..13d54f400810e 100644 --- a/oracle/pyproject.toml +++ b/oracle/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ossec_security/pyproject.toml b/ossec_security/pyproject.toml index 29062b7091fdf..13e4a768ca451 100644 --- a/ossec_security/pyproject.toml +++ b/ossec_security/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/pan_firewall/changelog.d/18207.added b/pan_firewall/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/pan_firewall/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/pan_firewall/pyproject.toml b/pan_firewall/pyproject.toml index 4fba326e78205..1a856ed8e9e84 100644 --- a/pan_firewall/pyproject.toml +++ b/pan_firewall/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/pdh_check/changelog.d/18207.added b/pdh_check/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/pdh_check/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/pdh_check/hatch.toml b/pdh_check/hatch.toml index 413e6c51fe94c..0f9f06cf58486 100644 --- a/pdh_check/hatch.toml +++ b/pdh_check/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/pdh_check/pyproject.toml b/pdh_check/pyproject.toml index 92fb3eebe16b4..998815602818c 100644 --- a/pdh_check/pyproject.toml +++ b/pdh_check/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/pgbouncer/changelog.d/18207.added b/pgbouncer/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/pgbouncer/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/pgbouncer/hatch.toml b/pgbouncer/hatch.toml index 9e4ba1af299e8..b951fdc61f075 100644 --- a/pgbouncer/hatch.toml +++ b/pgbouncer/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.7", "1.8", "1.12"] [envs.default.env-vars] diff --git a/pgbouncer/pyproject.toml b/pgbouncer/pyproject.toml index 884c207d131e9..a79a5e653d120 100644 --- a/pgbouncer/pyproject.toml +++ b/pgbouncer/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/php_fpm/changelog.d/18207.added b/php_fpm/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/php_fpm/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/php_fpm/hatch.toml b/php_fpm/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/php_fpm/hatch.toml +++ b/php_fpm/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/php_fpm/pyproject.toml b/php_fpm/pyproject.toml index dee5c967d0b09..959b853eab6bb 100644 --- a/php_fpm/pyproject.toml +++ b/php_fpm/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ping_federate/pyproject.toml b/ping_federate/pyproject.toml index 8f38a7050384b..2940f7c233d93 100644 --- a/ping_federate/pyproject.toml +++ b/ping_federate/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/postfix/changelog.d/18207.added b/postfix/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/postfix/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/postfix/hatch.toml b/postfix/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/postfix/hatch.toml +++ b/postfix/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/postfix/pyproject.toml b/postfix/pyproject.toml index 004b7abc40e11..c9ff6a26814ab 100644 --- a/postfix/pyproject.toml +++ b/postfix/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/postgres/changelog.d/18207.added b/postgres/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/postgres/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/postgres/hatch.toml b/postgres/hatch.toml index 97823d4e37957..1f920fae0f74b 100644 --- a/postgres/hatch.toml +++ b/postgres/hatch.toml @@ -11,7 +11,7 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["9.6", "10.0", "11.0", "12.17", "13.0", "14.0", "15.0", "16.0"] [envs.default.overrides] diff --git a/postgres/pyproject.toml b/postgres/pyproject.toml index 8a9846261bd37..deed82366ec5d 100644 --- a/postgres/pyproject.toml +++ b/postgres/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/powerdns_recursor/changelog.d/18207.added b/powerdns_recursor/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/powerdns_recursor/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/powerdns_recursor/hatch.toml b/powerdns_recursor/hatch.toml index 2aa20377ea5df..457d6dc0e2ba0 100644 --- a/powerdns_recursor/hatch.toml +++ b/powerdns_recursor/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.7", "4.0"] [envs.default.overrides] diff --git a/powerdns_recursor/pyproject.toml b/powerdns_recursor/pyproject.toml index b1a671cb02c8c..86d4a8e7397ca 100644 --- a/powerdns_recursor/pyproject.toml +++ b/powerdns_recursor/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/presto/changelog.d/18207.added b/presto/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/presto/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/presto/hatch.toml b/presto/hatch.toml index d434b8b3fe36b..a57d0922250db 100644 --- a/presto/hatch.toml +++ b/presto/hatch.toml @@ -6,7 +6,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["346"] [envs.default.overrides] diff --git a/presto/pyproject.toml b/presto/pyproject.toml index d26d8a9ddb241..6e385d08d26e4 100644 --- a/presto/pyproject.toml +++ b/presto/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/process/changelog.d/18207.added b/process/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/process/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/process/hatch.toml b/process/hatch.toml index 3d61446b5d5b6..48af19a1f76fa 100644 --- a/process/hatch.toml +++ b/process/hatch.toml @@ -1,6 +1,6 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.bench] \ No newline at end of file diff --git a/process/pyproject.toml b/process/pyproject.toml index 8978b66f0fb6b..c55c26760190c 100644 --- a/process/pyproject.toml +++ b/process/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/prometheus/changelog.d/18207.added b/prometheus/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/prometheus/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/prometheus/hatch.toml b/prometheus/hatch.toml index 2e077452e744b..ec24537869bac 100644 --- a/prometheus/hatch.toml +++ b/prometheus/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] platforms = [ diff --git a/prometheus/pyproject.toml b/prometheus/pyproject.toml index 51a4fe0408275..410862fb39e9c 100644 --- a/prometheus/pyproject.toml +++ b/prometheus/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/proxysql/changelog.d/18207.added b/proxysql/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/proxysql/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/proxysql/hatch.toml b/proxysql/hatch.toml index b5cd0a46d8dbd..1640d02d24d12 100644 --- a/proxysql/hatch.toml +++ b/proxysql/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.4", "2.0"] [envs.default.overrides] diff --git a/proxysql/pyproject.toml b/proxysql/pyproject.toml index 09647491222ab..b01ff51923b82 100644 --- a/proxysql/pyproject.toml +++ b/proxysql/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/pulsar/changelog.d/18207.added b/pulsar/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/pulsar/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/pulsar/hatch.toml b/pulsar/hatch.toml index 3de6c23c1a3b5..0355df95f829c 100644 --- a/pulsar/hatch.toml +++ b/pulsar/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.9"] [envs.default.overrides] diff --git a/pulsar/pyproject.toml b/pulsar/pyproject.toml index e6b54194e4b4b..51820fbd38004 100644 --- a/pulsar/pyproject.toml +++ b/pulsar/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/rabbitmq/changelog.d/18207.added b/rabbitmq/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/rabbitmq/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/rabbitmq/hatch.toml b/rabbitmq/hatch.toml index 72ec427026312..1e6d2c4ba85a6 100644 --- a/rabbitmq/hatch.toml +++ b/rabbitmq/hatch.toml @@ -9,7 +9,7 @@ dependencies = [ # Rabbitmq versions 3.8+ introduce the Prometheus plugin. This is the preferred way to collect metrics. # We still support metrics from management plugin as a legacy option. [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.7", "3.11"] flavor = ["mgmt", "openmetrics"] diff --git a/rabbitmq/pyproject.toml b/rabbitmq/pyproject.toml index fd4bd248d71ee..a88015878d478 100644 --- a/rabbitmq/pyproject.toml +++ b/rabbitmq/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ray/changelog.d/18207.added b/ray/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ray/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ray/hatch.toml b/ray/hatch.toml index 4f348c29ba42d..ce67c426c90cd 100644 --- a/ray/hatch.toml +++ b/ray/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.8"] [envs.default.overrides] diff --git a/ray/pyproject.toml b/ray/pyproject.toml index d523a530380f0..2fb0787081869 100644 --- a/ray/pyproject.toml +++ b/ray/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/redisdb/changelog.d/18207.added b/redisdb/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/redisdb/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/redisdb/hatch.toml b/redisdb/hatch.toml index ecf7d726206f8..b4f02e3d707e8 100644 --- a/redisdb/hatch.toml +++ b/redisdb/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["5.0", "6.0", "7.0", "cloud"] [envs.default.overrides] diff --git a/redisdb/pyproject.toml b/redisdb/pyproject.toml index 8bf1515cfd702..357cff34f951f 100644 --- a/redisdb/pyproject.toml +++ b/redisdb/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/rethinkdb/changelog.d/18207.added b/rethinkdb/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/rethinkdb/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/rethinkdb/hatch.toml b/rethinkdb/hatch.toml index 53e68f253fad0..f379e04150daf 100644 --- a/rethinkdb/hatch.toml +++ b/rethinkdb/hatch.toml @@ -6,7 +6,7 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] # Can't support lower 2.3 patch versions due to: https://github.com/rethinkdb/rethinkdb/issues/6108 version = ["2.3", "2.4"] diff --git a/rethinkdb/pyproject.toml b/rethinkdb/pyproject.toml index 88caf7d283c61..722a36b217430 100644 --- a/rethinkdb/pyproject.toml +++ b/rethinkdb/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/riak/changelog.d/18207.added b/riak/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/riak/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/riak/hatch.toml b/riak/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/riak/hatch.toml +++ b/riak/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/riak/pyproject.toml b/riak/pyproject.toml index f956b2e9eda5b..5aea83a4b044c 100644 --- a/riak/pyproject.toml +++ b/riak/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/riakcs/changelog.d/18207.added b/riakcs/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/riakcs/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/riakcs/hatch.toml b/riakcs/hatch.toml index bd72781786c80..769d79d915cc1 100644 --- a/riakcs/hatch.toml +++ b/riakcs/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] e2e-env = false \ No newline at end of file diff --git a/riakcs/pyproject.toml b/riakcs/pyproject.toml index 3f8df8b6a4c56..2f90c1aa18457 100644 --- a/riakcs/pyproject.toml +++ b/riakcs/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/sap_hana/changelog.d/18207.added b/sap_hana/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/sap_hana/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/sap_hana/hatch.toml b/sap_hana/hatch.toml index 7dcab11f35177..ada46df010993 100644 --- a/sap_hana/hatch.toml +++ b/sap_hana/hatch.toml @@ -6,7 +6,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.0"] [envs.default.overrides] diff --git a/sap_hana/pyproject.toml b/sap_hana/pyproject.toml index 826b4166ae66e..04d415ff21bb9 100644 --- a/sap_hana/pyproject.toml +++ b/sap_hana/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/scylla/changelog.d/18207.added b/scylla/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/scylla/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/scylla/hatch.toml b/scylla/hatch.toml index 1b37eeb75e5cd..7f0416adaee51 100644 --- a/scylla/hatch.toml +++ b/scylla/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.1", "3.3", "5.2"] [envs.default.overrides] diff --git a/scylla/pyproject.toml b/scylla/pyproject.toml index 5e4355b0f5383..18be1cee6fab3 100644 --- a/scylla/pyproject.toml +++ b/scylla/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/sidekiq/changelog.d/18207.added b/sidekiq/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/sidekiq/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/sidekiq/pyproject.toml b/sidekiq/pyproject.toml index 9cc7236bfd39c..c2ca2e73f32c9 100644 --- a/sidekiq/pyproject.toml +++ b/sidekiq/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/silk/changelog.d/18207.added b/silk/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/silk/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/silk/hatch.toml b/silk/hatch.toml index b271cca6d3e45..0169207d8cffb 100644 --- a/silk/hatch.toml +++ b/silk/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] dependencies = [ diff --git a/silk/pyproject.toml b/silk/pyproject.toml index c3fc8baac47f1..02fbc039bf98c 100644 --- a/silk/pyproject.toml +++ b/silk/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/singlestore/changelog.d/18207.added b/singlestore/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/singlestore/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/singlestore/hatch.toml b/singlestore/hatch.toml index c671e67928494..142ef97f7acb9 100644 --- a/singlestore/hatch.toml +++ b/singlestore/hatch.toml @@ -6,4 +6,4 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/singlestore/pyproject.toml b/singlestore/pyproject.toml index 86e5e9297b603..1016e3715cf42 100644 --- a/singlestore/pyproject.toml +++ b/singlestore/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/snmp/changelog.d/18207.added b/snmp/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/snmp/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/snmp/hatch.toml b/snmp/hatch.toml index dddf61c8ad498..10e870f34fb6d 100644 --- a/snmp/hatch.toml +++ b/snmp/hatch.toml @@ -2,7 +2,7 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] snmplistener = ["false", "true"] [envs.default.overrides] diff --git a/snmp/pyproject.toml b/snmp/pyproject.toml index 9e4b5c3e439ae..6ca2c91126604 100644 --- a/snmp/pyproject.toml +++ b/snmp/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/snowflake/changelog.d/18207.added b/snowflake/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/snowflake/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/snowflake/hatch.toml b/snowflake/hatch.toml index 8dd3b0acc729e..0165ed0f9b0d0 100644 --- a/snowflake/hatch.toml +++ b/snowflake/hatch.toml @@ -6,4 +6,4 @@ mypy-deps = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/snowflake/pyproject.toml b/snowflake/pyproject.toml index ee5113b4c0b6f..735f4c2543306 100644 --- a/snowflake/pyproject.toml +++ b/snowflake/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/solr/changelog.d/18207.added b/solr/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/solr/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/solr/hatch.toml b/solr/hatch.toml index ac11d1443411d..d71e1005cecdf 100644 --- a/solr/hatch.toml +++ b/solr/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["8.11", "9.3"] [envs.default.overrides] diff --git a/solr/pyproject.toml b/solr/pyproject.toml index 849370d716b6d..7eb592a6226e8 100644 --- a/solr/pyproject.toml +++ b/solr/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/sonarqube/changelog.d/18207.added b/sonarqube/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/sonarqube/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/sonarqube/hatch.toml b/sonarqube/hatch.toml index 543b1094534fa..7f7f353c14ddb 100644 --- a/sonarqube/hatch.toml +++ b/sonarqube/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["7.9", "8.5", "9.3"] # You can use M1 env if you want to run on M1 processor machine diff --git a/sonarqube/pyproject.toml b/sonarqube/pyproject.toml index b67b915b76d58..c1c3183c7a03b 100644 --- a/sonarqube/pyproject.toml +++ b/sonarqube/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/spark/changelog.d/18207.added b/spark/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/spark/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/spark/hatch.toml b/spark/hatch.toml index ef871733dabda..137a0e3269190 100644 --- a/spark/hatch.toml +++ b/spark/hatch.toml @@ -6,7 +6,7 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["2.4", "3.0"] [envs.default.overrides] diff --git a/spark/pyproject.toml b/spark/pyproject.toml index e81d5ee0df3d3..cf6722c7ceb09 100644 --- a/spark/pyproject.toml +++ b/spark/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/sqlserver/changelog.d/18207.added b/sqlserver/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/sqlserver/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/sqlserver/hatch.toml b/sqlserver/hatch.toml index a305f161e8fcf..843ea38127536 100644 --- a/sqlserver/hatch.toml +++ b/sqlserver/hatch.toml @@ -2,7 +2,7 @@ base-package-features = ["deps", "db", "json"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] os = ["linux"] version = ["2017", "2019", "2022"] setup = ["single", "ha"] @@ -12,7 +12,7 @@ setup = ["single", "ha"] # time out. until we're able to modify and parallelize the work we'll limit the per-driver tests to only a single # sqlserver version [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] os = ["windows"] driver = ["SQLOLEDB", "SQLNCLI11", "MSOLEDBSQL", "odbc"] version = ["2019", "2022"] diff --git a/sqlserver/pyproject.toml b/sqlserver/pyproject.toml index 14015e55328f5..18c28b1749927 100644 --- a/sqlserver/pyproject.toml +++ b/sqlserver/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/squid/changelog.d/18207.added b/squid/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/squid/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/squid/hatch.toml b/squid/hatch.toml index d423b3a2de5a0..d5db0f0834835 100644 --- a/squid/hatch.toml +++ b/squid/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["4.13", "5.6"] [envs.default.overrides] diff --git a/squid/pyproject.toml b/squid/pyproject.toml index 4184551c8ec92..8f144cfa18e63 100644 --- a/squid/pyproject.toml +++ b/squid/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/ssh_check/changelog.d/18207.added b/ssh_check/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/ssh_check/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/ssh_check/hatch.toml b/ssh_check/hatch.toml index ac510b7a85bc6..4913f773c9f75 100644 --- a/ssh_check/hatch.toml +++ b/ssh_check/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["8.1", "9.1"] [envs.default.overrides] diff --git a/ssh_check/pyproject.toml b/ssh_check/pyproject.toml index d3d8d32bf2bb5..c0131118f6a21 100644 --- a/ssh_check/pyproject.toml +++ b/ssh_check/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/statsd/changelog.d/18207.added b/statsd/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/statsd/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/statsd/hatch.toml b/statsd/hatch.toml index e7b33c9f27801..ea46b02834dad 100644 --- a/statsd/hatch.toml +++ b/statsd/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["0.9"] [envs.default.overrides] diff --git a/statsd/pyproject.toml b/statsd/pyproject.toml index f5e3fd2f1033d..c2639c6b9b4ef 100644 --- a/statsd/pyproject.toml +++ b/statsd/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/strimzi/changelog.d/18207.added b/strimzi/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/strimzi/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/strimzi/hatch.toml b/strimzi/hatch.toml index ac8e44b6c7f05..bce6b232560ab 100644 --- a/strimzi/hatch.toml +++ b/strimzi/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["0.34"] [envs.default.overrides] diff --git a/strimzi/pyproject.toml b/strimzi/pyproject.toml index a9f27b18da786..c9a760e36dfa2 100644 --- a/strimzi/pyproject.toml +++ b/strimzi/pyproject.toml @@ -27,7 +27,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/supervisord/changelog.d/18207.added b/supervisord/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/supervisord/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/supervisord/hatch.toml b/supervisord/hatch.toml index ace1fdff1a0c7..17fa18cb5164e 100644 --- a/supervisord/hatch.toml +++ b/supervisord/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.3"] [envs.default.overrides] diff --git a/supervisord/pyproject.toml b/supervisord/pyproject.toml index d2c516ce1668a..e7eab7fbad10a 100644 --- a/supervisord/pyproject.toml +++ b/supervisord/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/suricata/pyproject.toml b/suricata/pyproject.toml index e32ad6774087e..efc4f57207b0f 100644 --- a/suricata/pyproject.toml +++ b/suricata/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/system_core/changelog.d/18207.added b/system_core/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/system_core/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/system_core/hatch.toml b/system_core/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/system_core/hatch.toml +++ b/system_core/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/system_core/pyproject.toml b/system_core/pyproject.toml index 6c1b9d79c7b6f..0892c8bb8766c 100644 --- a/system_core/pyproject.toml +++ b/system_core/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/system_swap/changelog.d/18207.added b/system_swap/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/system_swap/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/system_swap/hatch.toml b/system_swap/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/system_swap/hatch.toml +++ b/system_swap/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/system_swap/pyproject.toml b/system_swap/pyproject.toml index 338cde59201cc..6d867738a2022 100644 --- a/system_swap/pyproject.toml +++ b/system_swap/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/tcp_check/changelog.d/18207.added b/tcp_check/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/tcp_check/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/tcp_check/hatch.toml b/tcp_check/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/tcp_check/hatch.toml +++ b/tcp_check/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/tcp_check/pyproject.toml b/tcp_check/pyproject.toml index b19ac5ae591c2..abbf435a04d73 100644 --- a/tcp_check/pyproject.toml +++ b/tcp_check/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/teamcity/changelog.d/18207.added b/teamcity/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/teamcity/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/teamcity/hatch.toml b/teamcity/hatch.toml index 747bf45e6b75a..aba75ffb7e27b 100644 --- a/teamcity/hatch.toml +++ b/teamcity/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] impl = ["legacy", "openmetrics"] [envs.default.overrides] diff --git a/teamcity/pyproject.toml b/teamcity/pyproject.toml index 6c2a2dd305174..e235daa243e39 100644 --- a/teamcity/pyproject.toml +++ b/teamcity/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/tekton/changelog.d/18207.added b/tekton/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/tekton/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/tekton/hatch.toml b/tekton/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/tekton/hatch.toml +++ b/tekton/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/tekton/pyproject.toml b/tekton/pyproject.toml index cd0031341db8e..cb1bf1423018b 100644 --- a/tekton/pyproject.toml +++ b/tekton/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/teleport/changelog.d/18207.added b/teleport/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/teleport/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/teleport/hatch.toml b/teleport/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/teleport/hatch.toml +++ b/teleport/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/teleport/pyproject.toml b/teleport/pyproject.toml index 38b162f52c627..6572facaf8a00 100644 --- a/teleport/pyproject.toml +++ b/teleport/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/temporal/changelog.d/18207.added b/temporal/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/temporal/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/temporal/hatch.toml b/temporal/hatch.toml index ef9dd47791214..29f12d02b8667 100644 --- a/temporal/hatch.toml +++ b/temporal/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.19"] [envs.default.overrides] diff --git a/temporal/pyproject.toml b/temporal/pyproject.toml index 56607a9dbe9f9..b9f0768aa6151 100644 --- a/temporal/pyproject.toml +++ b/temporal/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/tenable/changelog.d/18207.added b/tenable/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/tenable/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/tenable/pyproject.toml b/tenable/pyproject.toml index 8ab5356276b6d..fe274a81abac0 100644 --- a/tenable/pyproject.toml +++ b/tenable/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/teradata/changelog.d/18207.added b/teradata/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/teradata/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/teradata/hatch.toml b/teradata/hatch.toml index 847ebdf063c08..3c87a67bab2a4 100644 --- a/teradata/hatch.toml +++ b/teradata/hatch.toml @@ -1,14 +1,14 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] setup = ["sandbox"] [envs.default.overrides] -name."^py3.11$".e2e-env = { value = true } +name."^py3.12$".e2e-env = { value = true } matrix.setup.e2e-env = { value = true, if = ["sandbox"], env = ["TERADATA_SERVER"] } matrix.setup.env-vars = [ { key = "USE_TD_SANDBOX", value = "true", if = ["sandbox"] }, diff --git a/teradata/pyproject.toml b/teradata/pyproject.toml index d1199e10cce27..e102a5fd8a90b 100644 --- a/teradata/pyproject.toml +++ b/teradata/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/tibco_ems/hatch.toml b/tibco_ems/hatch.toml index bd72781786c80..769d79d915cc1 100644 --- a/tibco_ems/hatch.toml +++ b/tibco_ems/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] e2e-env = false \ No newline at end of file diff --git a/tibco_ems/pyproject.toml b/tibco_ems/pyproject.toml index c903069b76e4c..282743eef2730 100644 --- a/tibco_ems/pyproject.toml +++ b/tibco_ems/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/tls/changelog.d/18207.added b/tls/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/tls/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/tls/hatch.toml b/tls/hatch.toml index 91dfdd58ecd27..1bd2b7ae9753c 100644 --- a/tls/hatch.toml +++ b/tls/hatch.toml @@ -6,4 +6,4 @@ dependencies = [ ] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/tls/pyproject.toml b/tls/pyproject.toml index 4ae35db079046..42a568fe730bd 100644 --- a/tls/pyproject.toml +++ b/tls/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/tomcat/changelog.d/18207.added b/tomcat/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/tomcat/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/tomcat/hatch.toml b/tomcat/hatch.toml index 4b877dba0897c..94161a21b76ed 100644 --- a/tomcat/hatch.toml +++ b/tomcat/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["9.0", "10.0", "10.1"] flavor = ["standalone", "embedded"] diff --git a/tomcat/pyproject.toml b/tomcat/pyproject.toml index 0066e14daf1b0..9f0c160029382 100644 --- a/tomcat/pyproject.toml +++ b/tomcat/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/torchserve/changelog.d/18207.added b/torchserve/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/torchserve/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/torchserve/hatch.toml b/torchserve/hatch.toml index 3a72e9f93a15a..eff98ccee69bc 100644 --- a/torchserve/hatch.toml +++ b/torchserve/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["0.8"] [envs.default.overrides] diff --git a/torchserve/pyproject.toml b/torchserve/pyproject.toml index c759b902488f2..66bd379468471 100644 --- a/torchserve/pyproject.toml +++ b/torchserve/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/traefik_mesh/changelog.d/18207.added b/traefik_mesh/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/traefik_mesh/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/traefik_mesh/hatch.toml b/traefik_mesh/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/traefik_mesh/hatch.toml +++ b/traefik_mesh/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/traefik_mesh/pyproject.toml b/traefik_mesh/pyproject.toml index 2e6f16a26f5c8..69f2dfd774b3b 100644 --- a/traefik_mesh/pyproject.toml +++ b/traefik_mesh/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/traffic_server/changelog.d/18207.added b/traffic_server/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/traffic_server/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/traffic_server/hatch.toml b/traffic_server/hatch.toml index 6366d9ca2ae04..399e1b63b28b4 100644 --- a/traffic_server/hatch.toml +++ b/traffic_server/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] TRAFFIC_SERVER_VERSION = "9.1.1" diff --git a/traffic_server/pyproject.toml b/traffic_server/pyproject.toml index 237a7c2169cd9..67931b928ec4f 100644 --- a/traffic_server/pyproject.toml +++ b/traffic_server/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/twemproxy/changelog.d/18207.added b/twemproxy/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/twemproxy/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/twemproxy/hatch.toml b/twemproxy/hatch.toml index ec2b228de127e..d81e8c9cf96b0 100644 --- a/twemproxy/hatch.toml +++ b/twemproxy/hatch.toml @@ -4,7 +4,7 @@ platforms = ["linux", "macos"] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["0.5.0"] [envs.default.env-vars] diff --git a/twemproxy/pyproject.toml b/twemproxy/pyproject.toml index 5dd08ee893c7d..08cbf57491673 100644 --- a/twemproxy/pyproject.toml +++ b/twemproxy/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/twistlock/changelog.d/18207.added b/twistlock/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/twistlock/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/twistlock/hatch.toml b/twistlock/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/twistlock/hatch.toml +++ b/twistlock/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/twistlock/pyproject.toml b/twistlock/pyproject.toml index fd3d22e710ac1..0329130a09502 100644 --- a/twistlock/pyproject.toml +++ b/twistlock/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/varnish/changelog.d/18207.added b/varnish/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/varnish/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/varnish/hatch.toml b/varnish/hatch.toml index 315e875283099..e4feae8302fbe 100644 --- a/varnish/hatch.toml +++ b/varnish/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["5.2", "6.5"] [envs.default.overrides] diff --git a/varnish/pyproject.toml b/varnish/pyproject.toml index cecf1e2183a9f..4411d3db54f02 100644 --- a/varnish/pyproject.toml +++ b/varnish/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/vault/changelog.d/18207.added b/vault/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/vault/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/vault/hatch.toml b/vault/hatch.toml index 36bb2bc062149..13be1af587ad7 100644 --- a/vault/hatch.toml +++ b/vault/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.9.0"] auth = ["token-auth", "noauth"] diff --git a/vault/pyproject.toml b/vault/pyproject.toml index 4decac34151a4..730bb5f9aca6c 100644 --- a/vault/pyproject.toml +++ b/vault/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/vertica/changelog.d/18207.added b/vertica/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/vertica/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/vertica/hatch.toml b/vertica/hatch.toml index 129c75f0003c9..bdb9496b55c49 100644 --- a/vertica/hatch.toml +++ b/vertica/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["10.1", "11.1", "12.0", "23.3"] [envs.default.overrides] diff --git a/vertica/pyproject.toml b/vertica/pyproject.toml index c54b67d12edc5..bd239d161cf12 100644 --- a/vertica/pyproject.toml +++ b/vertica/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/vllm/changelog.d/18207.added b/vllm/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/vllm/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/vllm/hatch.toml b/vllm/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/vllm/hatch.toml +++ b/vllm/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/vllm/pyproject.toml b/vllm/pyproject.toml index ada53013ef377..5df58ba7e3c8e 100644 --- a/vllm/pyproject.toml +++ b/vllm/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/voltdb/changelog.d/18207.added b/voltdb/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/voltdb/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/voltdb/hatch.toml b/voltdb/hatch.toml index 2353ada783248..fde300a90930a 100644 --- a/voltdb/hatch.toml +++ b/voltdb/hatch.toml @@ -9,7 +9,7 @@ mypy-deps = [ DDEV_SKIP_GENERIC_TAGS_CHECK = "true" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["8.4", "10.0"] tls = ["false", "true"] diff --git a/voltdb/pyproject.toml b/voltdb/pyproject.toml index e14e6759a5607..1e81ac3c6f957 100644 --- a/voltdb/pyproject.toml +++ b/voltdb/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/vsphere/changelog.d/18207.added b/vsphere/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/vsphere/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/vsphere/hatch.toml b/vsphere/hatch.toml index a0f6624b570b8..4686ee4444838 100644 --- a/vsphere/hatch.toml +++ b/vsphere/hatch.toml @@ -3,7 +3,7 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["6.7", "7.0"] [envs.default] diff --git a/vsphere/pyproject.toml b/vsphere/pyproject.toml index d60958d76573a..399ca557e460b 100644 --- a/vsphere/pyproject.toml +++ b/vsphere/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/weaviate/changelog.d/18207.added b/weaviate/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/weaviate/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/weaviate/hatch.toml b/weaviate/hatch.toml index 332a4ab893ca6..c27560d0695d7 100644 --- a/weaviate/hatch.toml +++ b/weaviate/hatch.toml @@ -4,7 +4,7 @@ DDEV_SKIP_GENERIC_TAGS_CHECK = "true" [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["1.20"] auth = ["no-auth", "auth"] diff --git a/weaviate/pyproject.toml b/weaviate/pyproject.toml index abcac32f8d2d9..3e5cf3924bb78 100644 --- a/weaviate/pyproject.toml +++ b/weaviate/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ diff --git a/weblogic/changelog.d/18207.added b/weblogic/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/weblogic/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/weblogic/hatch.toml b/weblogic/hatch.toml index 9bfdb9f9df723..a117109c05929 100644 --- a/weblogic/hatch.toml +++ b/weblogic/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["12", "14"] [envs.default.overrides] diff --git a/weblogic/pyproject.toml b/weblogic/pyproject.toml index 0ecbb9d3d9d0e..d59c3481ab998 100644 --- a/weblogic/pyproject.toml +++ b/weblogic/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/win32_event_log/changelog.d/18207.added b/win32_event_log/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/win32_event_log/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/win32_event_log/hatch.toml b/win32_event_log/hatch.toml index 443ff4847ff5b..3400c2946f954 100644 --- a/win32_event_log/hatch.toml +++ b/win32_event_log/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] e2e-env = false diff --git a/win32_event_log/pyproject.toml b/win32_event_log/pyproject.toml index 6ddc69694ebfc..5405823ebfb06 100644 --- a/win32_event_log/pyproject.toml +++ b/win32_event_log/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/windows_performance_counters/changelog.d/18207.added b/windows_performance_counters/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/windows_performance_counters/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/windows_performance_counters/hatch.toml b/windows_performance_counters/hatch.toml index 6026000e5fb50..ec12a94baac91 100644 --- a/windows_performance_counters/hatch.toml +++ b/windows_performance_counters/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] platforms = [ diff --git a/windows_performance_counters/pyproject.toml b/windows_performance_counters/pyproject.toml index 98b5e81533591..851a85f31d463 100644 --- a/windows_performance_counters/pyproject.toml +++ b/windows_performance_counters/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/windows_service/changelog.d/18207.added b/windows_service/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/windows_service/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/windows_service/hatch.toml b/windows_service/hatch.toml index 71ba922f308fd..15860b143c506 100644 --- a/windows_service/hatch.toml +++ b/windows_service/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] platforms = [ diff --git a/windows_service/pyproject.toml b/windows_service/pyproject.toml index c7bcbe2d7e58e..b3d5f1be9560c 100644 --- a/windows_service/pyproject.toml +++ b/windows_service/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/wmi_check/changelog.d/18207.added b/wmi_check/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/wmi_check/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/wmi_check/hatch.toml b/wmi_check/hatch.toml index 44f3195b61fa0..353f4419cb7cf 100644 --- a/wmi_check/hatch.toml +++ b/wmi_check/hatch.toml @@ -3,7 +3,7 @@ check-types = false [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default] e2e-env = false diff --git a/wmi_check/pyproject.toml b/wmi_check/pyproject.toml index b53c3a3091a41..9637eeb6797dd 100644 --- a/wmi_check/pyproject.toml +++ b/wmi_check/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/yarn/changelog.d/18207.added b/yarn/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/yarn/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/yarn/hatch.toml b/yarn/hatch.toml index b55a0ed85515f..2a7ba6534e9df 100644 --- a/yarn/hatch.toml +++ b/yarn/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] [envs.default.env-vars] DDEV_SKIP_GENERIC_TAGS_CHECK = "true" diff --git a/yarn/pyproject.toml b/yarn/pyproject.toml index e4a4d85c2365c..964786604a91e 100644 --- a/yarn/pyproject.toml +++ b/yarn/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] diff --git a/zk/changelog.d/18207.added b/zk/changelog.d/18207.added new file mode 100644 index 0000000000000..624cd9836c9fe --- /dev/null +++ b/zk/changelog.d/18207.added @@ -0,0 +1 @@ +Bump the python version from 3.11 to 3.12 \ No newline at end of file diff --git a/zk/hatch.toml b/zk/hatch.toml index bf778cb3b4d12..1f5458b389203 100644 --- a/zk/hatch.toml +++ b/zk/hatch.toml @@ -1,7 +1,7 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] version = ["3.5.8", "3.6.2"] ssl = ["true", "false"] diff --git a/zk/pyproject.toml b/zk/pyproject.toml index 1474dab090bf5..8cc1312dac750 100644 --- a/zk/pyproject.toml +++ b/zk/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", "Private :: Do Not Upload", ] From 968a0fed63de32332a927ba63fc3b49157c4c041 Mon Sep 17 00:00:00 2001 From: savandalasaniya-crest Date: Tue, 17 Sep 2024 22:52:27 +0530 Subject: [PATCH 13/23] Add Freshservice integration with no assets (#18563) Co-authored-by: ravindrasojitra-crest --- .github/CODEOWNERS | 5 +++ .github/workflows/config/labeler.yml | 2 ++ freshservice/CHANGELOG.md | 7 ++++ freshservice/README.md | 43 +++++++++++++++++++++++++ freshservice/assets/service_checks.json | 1 + freshservice/manifest.json | 41 +++++++++++++++++++++++ 6 files changed, 99 insertions(+) create mode 100644 freshservice/CHANGELOG.md create mode 100644 freshservice/README.md create mode 100644 freshservice/assets/service_checks.json create mode 100644 freshservice/manifest.json diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index b4914e8ab5194..aa04297fa2021 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -260,6 +260,11 @@ datadog_checks_base/datadog_checks/base/checks/windows/ @DataDog/wi /contentful/manifest.json @DataDog/saas-integrations @DataDog/documentation /contentful/assets/logs/ @DataDog/saas-integrations @DataDog/documentation @DataDog/logs-backend +/freshservice/ @DataDog/saas-integrations +/freshservice/*.md @DataDog/saas-integrations @DataDog/documentation +/freshservice/manifest.json @DataDog/saas-integrations @DataDog/documentation +/freshservice/assets/logs/ @DataDog/saas-integrations @DataDog/documentation @DataDog/logs-backend + /godaddy/ @DataDog/saas-integrations /godaddy/*.md @DataDog/saas-integrations @DataDog/documentation /godaddy/manifest.json @DataDog/saas-integrations @DataDog/documentation diff --git a/.github/workflows/config/labeler.yml b/.github/workflows/config/labeler.yml index c9a6794590730..bebee0a7ae6f8 100644 --- a/.github/workflows/config/labeler.yml +++ b/.github/workflows/config/labeler.yml @@ -193,6 +193,8 @@ integration/fly_io: - fly_io/**/* integration/foundationdb: - foundationdb/**/* +integration/freshservice: +- freshservice/**/* integration/gearmand: - gearmand/**/* integration/gitlab: diff --git a/freshservice/CHANGELOG.md b/freshservice/CHANGELOG.md new file mode 100644 index 0000000000000..7a82ad4c0ef67 --- /dev/null +++ b/freshservice/CHANGELOG.md @@ -0,0 +1,7 @@ +# CHANGELOG - freshservice + +## 1.0.0 / 2024-09-11 + +***Added***: + +* Initial Release diff --git a/freshservice/README.md b/freshservice/README.md new file mode 100644 index 0000000000000..f01d2756c87a3 --- /dev/null +++ b/freshservice/README.md @@ -0,0 +1,43 @@ +# Agent Check: freshservice + +## Overview + +This check monitors [freshservice][1]. + +## Setup + +### Installation + +The Freshservice check is included in the [Datadog Agent][2] package. +No additional installation is needed on your server. + +### Configuration + +!!! Add list of steps to set up this integration !!! + +### Validation + +!!! Add steps to validate integration is functioning as expected !!! + +## Data Collected + +### Metrics + +Freshservice does not include any metrics. + +### Service Checks + +Freshservice does not include any service checks. + +### Events + +Freshservice does not include any events. + +## Troubleshooting + +Need help? Contact [Datadog support][3]. + +[1]: **LINK_TO_INTEGRATION_SITE** +[2]: https://app.datadoghq.com/account/settings/agent/latest +[3]: https://docs.datadoghq.com/help/ + diff --git a/freshservice/assets/service_checks.json b/freshservice/assets/service_checks.json new file mode 100644 index 0000000000000..fe51488c7066f --- /dev/null +++ b/freshservice/assets/service_checks.json @@ -0,0 +1 @@ +[] diff --git a/freshservice/manifest.json b/freshservice/manifest.json new file mode 100644 index 0000000000000..3682b14a2e436 --- /dev/null +++ b/freshservice/manifest.json @@ -0,0 +1,41 @@ +{ + "manifest_version": "2.0.0", + "app_uuid": "44b2812e-fea7-4d6a-b0c1-b5e836627090", + "app_id": "freshservice", + "display_on_public_website": false, + "tile": { + "overview": "README.md#Overview", + "configuration": "README.md#Setup", + "support": "README.md#Support", + "changelog": "CHANGELOG.md", + "description": "Gain insights into Freshservice logs", + "title": "Freshservice", + "media": [], + "classifier_tags": [ + "Category::Log Collection", + "Category::Security", + "Offering::Integration", + "Submitted Data Type::Logs" + ] + }, + "assets": { + "integration": { + "auto_install": false, + "source_type_id": 25506291, + "source_type_name": "Freshservice", + "events": { + "creates_events": false + }, + "service_checks": { + "metadata_path": "assets/service_checks.json" + } + } + }, + "author": { + "support_email": "help@datadoghq.com", + "name": "Datadog", + "homepage": "https://www.datadoghq.com", + "sales_email": "info@datadoghq.com" + }, + "oauth": {} +} \ No newline at end of file From 59b93a8558b45e701dccd237c4328cf98b98c577 Mon Sep 17 00:00:00 2001 From: Steven Yuen Date: Tue, 17 Sep 2024 15:42:53 -0400 Subject: [PATCH 14/23] [Release] Bumped sqlserver version to 17.5.3 (#18609) * [Release] Bumped sqlserver version to 17.5.3 * delete changelog file --- requirements-agent-release.txt | 2 +- sqlserver/CHANGELOG.md | 6 ++++++ sqlserver/changelog.d/18586.fixed | 1 - sqlserver/datadog_checks/sqlserver/__about__.py | 2 +- 4 files changed, 8 insertions(+), 3 deletions(-) delete mode 100644 sqlserver/changelog.d/18586.fixed diff --git a/requirements-agent-release.txt b/requirements-agent-release.txt index b102e0633871b..ca87469758a64 100644 --- a/requirements-agent-release.txt +++ b/requirements-agent-release.txt @@ -159,7 +159,7 @@ datadog-snowflake==5.9.0 datadog-solr==1.13.0 datadog-sonarqube==3.2.2 datadog-spark==4.3.1 -datadog-sqlserver==17.5.2 +datadog-sqlserver==17.5.3 datadog-squid==2.5.1 datadog-ssh-check==2.10.0 datadog-statsd==1.12.0 diff --git a/sqlserver/CHANGELOG.md b/sqlserver/CHANGELOG.md index e8696079bb49f..f47c2f59107a9 100644 --- a/sqlserver/CHANGELOG.md +++ b/sqlserver/CHANGELOG.md @@ -2,6 +2,12 @@ +## 17.5.3 / 2024-09-17 + +***Fixed***: + +* Fix ODBC config handling for Linux ([#18586](https://github.com/DataDog/integrations-core/pull/18586)) + ## 17.5.2 / 2024-08-28 / Agent 7.57.0 ***Fixed***: diff --git a/sqlserver/changelog.d/18586.fixed b/sqlserver/changelog.d/18586.fixed deleted file mode 100644 index e6177056b7c48..0000000000000 --- a/sqlserver/changelog.d/18586.fixed +++ /dev/null @@ -1 +0,0 @@ -[sqlserver] Fix ODBC config handling for Linux diff --git a/sqlserver/datadog_checks/sqlserver/__about__.py b/sqlserver/datadog_checks/sqlserver/__about__.py index 6a7af39f3ed9a..76ae2e29005a6 100644 --- a/sqlserver/datadog_checks/sqlserver/__about__.py +++ b/sqlserver/datadog_checks/sqlserver/__about__.py @@ -2,4 +2,4 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -__version__ = '17.5.2' +__version__ = '17.5.3' From 2a419f75fa05ababbe2a9a2fa29472f7321d9f90 Mon Sep 17 00:00:00 2001 From: "agent-platform-auto-pr[bot]" <153269286+agent-platform-auto-pr[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 15:57:24 -0400 Subject: [PATCH 15/23] Update dependency resolution (#18605) Co-authored-by: Kyle-Neale --- .deps/image_digests.json | 6 +- .deps/resolved/linux-aarch64_3.12.txt | 128 ++++++++++++++++++++++++ .deps/resolved/linux-x86_64_3.12.txt | 129 +++++++++++++++++++++++++ .deps/resolved/macos-x86_64_3.12.txt | 128 ++++++++++++++++++++++++ .deps/resolved/windows-x86_64_3.12.txt | 126 ++++++++++++++++++++++++ 5 files changed, 514 insertions(+), 3 deletions(-) create mode 100644 .deps/resolved/linux-aarch64_3.12.txt create mode 100644 .deps/resolved/linux-x86_64_3.12.txt create mode 100644 .deps/resolved/macos-x86_64_3.12.txt create mode 100644 .deps/resolved/windows-x86_64_3.12.txt diff --git a/.deps/image_digests.json b/.deps/image_digests.json index 18afec2ebf7af..ad73cfcda588d 100644 --- a/.deps/image_digests.json +++ b/.deps/image_digests.json @@ -1,5 +1,5 @@ { - "linux-aarch64": "sha256:63f46c0aaad6c81830b99754b158625e6ff5dbf50cd74e62c098fa46ab6808ec", - "linux-x86_64": "sha256:e854e6d6f8258c2bb80c5f525a9c9f545ce95bd555583e9ae04bbeede9d5666a", - "windows-x86_64": "sha256:ce5ac8bd45f7816a8ea3f55f54d70c8856667ad2cf8d638b4c62728368a65652" + "linux-aarch64": "sha256:f3834f630ad669a0876dafb95a9ffd5b485171536cffbc1170ffbd3cbfab9c1d", + "linux-x86_64": "sha256:952783e733c9c0d08568394a0e925d1f8428d8f04507a2b10bcfd2a1486da2db", + "windows-x86_64": "sha256:2fd9816b42a300080a0bcc57753e577f0140bb8b0d06e14c54fc8f634d93a2db" } diff --git a/.deps/resolved/linux-aarch64_3.12.txt b/.deps/resolved/linux-aarch64_3.12.txt new file mode 100644 index 0000000000000..6503dd7bf1656 --- /dev/null +++ b/.deps/resolved/linux-aarch64_3.12.txt @@ -0,0 +1,128 @@ +aerospike @ https://agent-int-packages.datadoghq.com/built/aerospike/aerospike-7.1.1-20240917155304-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=8573244c130f49445cf9af1c1423f13e0c2a2aef7395045e21a0e3cf760c2213 +annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-types/annotated_types-0.7.0-py3-none-any.whl#sha256=1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 +asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 +attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 +aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd +azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 +bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68 +beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed +binary @ https://agent-int-packages.datadoghq.com/external/binary/binary-1.0.0-py2.py3-none-any.whl#sha256=e1b61f3a5c002717d1a28e4d9d2dc8acbc9d6b12baf7b1e4ab25d743da97e323 +boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py3-none-any.whl#sha256=add26dd58e076dfd387013da4704716d5cff215cf14f6d4347c4b9b7fc1f0b8e +botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 +bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d +cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 +certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 +cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 +charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a +clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=bbfd83713e5a7a700c4a8200e921bc580fd7cba5f3b9d732172a5d82b12b3e20 +clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342 +cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402155018-py3-none-manylinux2014_aarch64.whl#sha256=aba3c1683ef1b2099933e030464d29b3ad1c206784ebd15d8a7147ecd6ba24e1 +confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240917155305-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=6dd31639ba4c15fcb59f65842e446ecda997e8c826dec9e9b1bec01748b5066d +cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5 +ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e +ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=081bb12a54c46c9804e0645320d827deaff626b9035ba13ac97567149e07cdb5 +decorator @ https://agent-int-packages.datadoghq.com/external/decorator/decorator-5.1.1-py3-none-any.whl#sha256=b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 +deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprecated-1.2.14-py2.py3-none-any.whl#sha256=6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c +dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 +dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca +envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 +foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402155019-py3-none-manylinux2014_aarch64.whl#sha256=14259f824080062cc890965747597ff00a9d6c76a1eb926673fed68a45860ccd +google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 +gssapi @ https://agent-int-packages.datadoghq.com/built/gssapi/gssapi-1.8.3-20240917155305-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=4043e268278e0e4430d92cfb51db3103bd238b5c658197b7a9c2acc35c789d67 +hazelcast-python-client @ https://agent-int-packages.datadoghq.com/external/hazelcast-python-client/hazelcast_python_client-5.4.0-py3-none-any.whl#sha256=16195cd58feb2dd3be1594d08d42527ae00797548a6a9d6a601aae2e8514ff5f +idna @ https://agent-int-packages.datadoghq.com/external/idna/idna-3.10-py3-none-any.whl#sha256=946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +importlib-metadata @ https://agent-int-packages.datadoghq.com/external/importlib-metadata/importlib_metadata-8.4.0-py3-none-any.whl#sha256=66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 +in-toto @ https://agent-int-packages.datadoghq.com/external/in-toto/in_toto-2.0.0-py3-none-any.whl#sha256=eaf71733cdd5b2309b58b24169c13c24701da72baa43b7432d1dffc308e7bf32 +iso8601 @ https://agent-int-packages.datadoghq.com/external/iso8601/iso8601-2.1.0-py3-none-any.whl#sha256=aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242 +jellyfish @ https://agent-int-packages.datadoghq.com/external/jellyfish/jellyfish-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=87dc2a82c45b773a579fb695a5956a54106c1187f27c9ccee8508726d2e59cfc +jmespath @ https://agent-int-packages.datadoghq.com/external/jmespath/jmespath-1.0.1-py3-none-any.whl#sha256=02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 +jsonpatch @ https://agent-int-packages.datadoghq.com/external/jsonpatch/jsonpatch-1.33-py2.py3-none-any.whl#sha256=0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade +jsonpointer @ https://agent-int-packages.datadoghq.com/external/jsonpointer/jsonpointer-3.0.0-py2.py3-none-any.whl#sha256=13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 +keystoneauth1 @ https://agent-int-packages.datadoghq.com/external/keystoneauth1/keystoneauth1-5.8.0-py3-none-any.whl#sha256=e69dff80c509ab64d4de4494658d914e81f26af720828dc584ceee74ecd666d9 +krb5 @ https://agent-int-packages.datadoghq.com/built/krb5/krb5-0.6.0-20240917155306-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=47e12d5a37cd54c721688b99389cf6bdd555597ad08ab6787273ddc63a10a1e7 +kubernetes @ https://agent-int-packages.datadoghq.com/external/kubernetes/kubernetes-30.1.0-py2.py3-none-any.whl#sha256=e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d +ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2.py3-none-any.whl#sha256=5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70 +lxml @ https://agent-int-packages.datadoghq.com/built/lxml/lxml-4.9.4-20240917155306-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=06e8662fc23fce90e509842155152aacaca06be101edc42de77614f2e2c726db +lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=33c9a6fd20767ccaf70649982f8f3eeb0884035c150c0b818ea660152cf3c809 +mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 +msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240917155307-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=b18f919b60520879078083d19caaf9bb25c52381d9622ceae708245cf9826b25 +oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca +openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 +opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 +orjson @ https://agent-int-packages.datadoghq.com/external/orjson/orjson-3.10.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=76ac14cd57df0572453543f8f2575e2d01ae9e790c21f57627803f5e79b0d3c3 +os-service-types @ https://agent-int-packages.datadoghq.com/external/os-service-types/os_service_types-1.7.0-py2.py3-none-any.whl#sha256=0505c72205690910077fb72b88f2a1f07533c8d39f2fe75b29583481764965d6 +packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packaging-24.1-py3-none-any.whl#sha256=5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 +pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 +pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.3-py3-none-any.whl#sha256=50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 +ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce +portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf +prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 +protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl#sha256=b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce +psutil @ https://agent-int-packages.datadoghq.com/built/psutil/psutil-5.9.6-20240917155307-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=2ba6dce6cf517094b99ea70c0e82d657bdf4156a3eb5d775cf67bc7f44e02005 +psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212 +pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd +pyasyncore @ https://agent-int-packages.datadoghq.com/external/pyasyncore/pyasyncore-1.0.4-py3-none-any.whl#sha256=9e5f6dc9dc057c56370b7a5cdb4c4670fd4b0556de2913ed1f428cd6a5366895 +pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc +pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623 +pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 +pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f +pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 +pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=c6b804bb4f2d9dc389cc9e827d579fa327272cdb0629a99bfe5b83cb3e269ebf +pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c +pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 +pyodbc @ https://agent-int-packages.datadoghq.com/built/pyodbc/pyodbc-5.1.0-20240917155308-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=677380a4d52d3c02d426d11a77fcf205f19926da759f070eeed4007e546f6b4a +pyopenssl @ https://agent-int-packages.datadoghq.com/external/pyopenssl/pyOpenSSL-24.2.1-py3-none-any.whl#sha256=967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d +pysmi @ https://agent-int-packages.datadoghq.com/external/pysmi/pysmi-1.2.1-py3-none-any.whl#sha256=d97c60de9f81d33ab2899124d95a94fa7fefacc86ab6e00cbfec543a073e6d33 +pysnmp @ https://agent-int-packages.datadoghq.com/external/pysnmp/pysnmp-5.1.0-py3-none-any.whl#sha256=375a8adfc6820faf24ace6761a6d20544e60580d714ff7266df272850c39b439 +pysnmp-mibs @ https://agent-int-packages.datadoghq.com/external/pysnmp-mibs/pysnmp_mibs-0.1.6-py2.py3-none-any.whl#sha256=5e153ebe8e767c07940cea435f866c623ff6b2376155c7da75085b08d3774d48 +pysnmpcrypto @ https://agent-int-packages.datadoghq.com/external/pysnmpcrypto/pysnmpcrypto-0.0.4-py2.py3-none-any.whl#sha256=5889733caa030f45d9e03ea9d6370fb06426a8cb7f839aabbcdde33c6f634679 +pysocks @ https://agent-int-packages.datadoghq.com/external/pysocks/PySocks-1.7.1-py3-none-any.whl#sha256=2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 +pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0.11.1-py3-none-any.whl#sha256=129a4294f2c4d681d5875240ef87accc6f1d921e8983737fb0b59642b397951e +python-binary-memcached @ https://agent-int-packages.datadoghq.com/external/python-binary-memcached/python_binary_memcached-0.31.2-py3-none-any.whl#sha256=e5b93d54429e835cab7d5b33988649f9748344aa49adaed8eed94b37e714d562 +python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 +python3-gearman @ https://agent-int-packages.datadoghq.com/external/python3-gearman/python3_gearman-0.1.0-py3-none-any.whl#sha256=4a5808d3a0bfc6c243548ad57e7aab4bee62c9cba2b1c3a860fdd292d46a112d +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 +pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172038-py2.py3-none-manylinux2014_aarch64.whl#sha256=e173daf28895975b57850fef301837f24fba59dd8ff1d931795732e9be281d57 +pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 +redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 +requests @ https://agent-int-packages.datadoghq.com/external/requests/requests-2.32.3-py3-none-any.whl#sha256=70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 +requests-kerberos @ https://agent-int-packages.datadoghq.com/external/requests-kerberos/requests_kerberos-0.15.0-py2.py3-none-any.whl#sha256=ba9b0980b8489c93bfb13854fd118834e576d6700bfea3745cb2e62278cd16a6 +requests-ntlm @ https://agent-int-packages.datadoghq.com/external/requests-ntlm/requests_ntlm-1.3.0-py3-none-any.whl#sha256=4c7534a7d0e482bb0928531d621be4b2c74ace437e88c5a357ceb7452d25a510 +requests-oauthlib @ https://agent-int-packages.datadoghq.com/external/requests-oauthlib/requests_oauthlib-2.0.0-py2.py3-none-any.whl#sha256=7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 +requests-toolbelt @ https://agent-int-packages.datadoghq.com/external/requests-toolbelt/requests_toolbelt-1.0.0-py2.py3-none-any.whl#sha256=cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 +requests-unixsocket2 @ https://agent-int-packages.datadoghq.com/external/requests-unixsocket2/requests_unixsocket2-0.4.2-py3-none-any.whl#sha256=701fcd49d74bc0f759bbe45c4dfda0045fd89652948c2b473b1a312214c3770b +requestsexceptions @ https://agent-int-packages.datadoghq.com/external/requestsexceptions/requestsexceptions-1.4.0-py2.py3-none-any.whl#sha256=3083d872b6e07dc5c323563ef37671d992214ad9a32b0ca4a3d7f5500bf38ce3 +rethinkdb @ https://agent-int-packages.datadoghq.com/external/rethinkdb/rethinkdb-2.4.9-py2.py3-none-any.whl#sha256=dd2455b3eab5266d8006eeefeb5eb970a3697dfe6aa18e1ab6e1d99233badfcb +rsa @ https://agent-int-packages.datadoghq.com/external/rsa/rsa-4.9-py3-none-any.whl#sha256=90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 +s3transfer @ https://agent-int-packages.datadoghq.com/external/s3transfer/s3transfer-0.10.2-py3-none-any.whl#sha256=eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69 +securesystemslib @ https://agent-int-packages.datadoghq.com/external/securesystemslib/securesystemslib-0.28.0-py3-none-any.whl#sha256=9e6b9abe36a511d4f52c759069db8f6f650362ba82d6efc7bc7466a458b3f499 +semver @ https://agent-int-packages.datadoghq.com/external/semver/semver-3.0.2-py3-none-any.whl#sha256=b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4 +service-identity @ https://agent-int-packages.datadoghq.com/external/service-identity/service_identity-24.1.0-py3-none-any.whl#sha256=a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a +setuptools @ https://agent-int-packages.datadoghq.com/external/setuptools/setuptools-75.1.0-py3-none-any.whl#sha256=35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 +simplejson @ https://agent-int-packages.datadoghq.com/external/simplejson/simplejson-3.19.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=d0b0efc7279d768db7c74d3d07f0b5c81280d16ae3fb14e9081dc903e8360771 +six @ https://agent-int-packages.datadoghq.com/external/six/six-1.16.0-py2.py3-none-any.whl#sha256=8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +snowflake-connector-python @ https://agent-int-packages.datadoghq.com/external/snowflake-connector-python/snowflake_connector_python-3.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=473642c0e628b8b9f264cbf31c7f4de44974373db43052b6542a66e751159caf +sortedcontainers @ https://agent-int-packages.datadoghq.com/external/sortedcontainers/sortedcontainers-2.4.0-py2.py3-none-any.whl#sha256=a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 +soupsieve @ https://agent-int-packages.datadoghq.com/external/soupsieve/soupsieve-2.6-py3-none-any.whl#sha256=e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 +stevedore @ https://agent-int-packages.datadoghq.com/external/stevedore/stevedore-5.3.0-py3-none-any.whl#sha256=1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78 +supervisor @ https://agent-int-packages.datadoghq.com/external/supervisor/supervisor-4.2.5-py2.py3-none-any.whl#sha256=2ecaede32fc25af814696374b79e42644ecaba5c09494c51016ffda9602d0f08 +tomlkit @ https://agent-int-packages.datadoghq.com/external/tomlkit/tomlkit-0.13.2-py3-none-any.whl#sha256=7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde +tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-any.whl#sha256=a22ab5fa6daf910b3052929fdce42ccad8a300e5e85715daaff9592aed980f7a +typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d +tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 +uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240917155308-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl#sha256=6e4a3bb156fe2f24c0f00f17ee735a6caccb674aa6f307a164248c989baa8914 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac +vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b +websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 +wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl#sha256=94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73 +xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 diff --git a/.deps/resolved/linux-x86_64_3.12.txt b/.deps/resolved/linux-x86_64_3.12.txt new file mode 100644 index 0000000000000..018f55a9b844d --- /dev/null +++ b/.deps/resolved/linux-x86_64_3.12.txt @@ -0,0 +1,129 @@ +aerospike @ https://agent-int-packages.datadoghq.com/built/aerospike/aerospike-7.1.1-20240917155248-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=50a14738d4102572c0dd7ccddcc735074f30902813190369f0765f2f4a087300 +annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-types/annotated_types-0.7.0-py3-none-any.whl#sha256=1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 +asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 +attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 +aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd +azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 +bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe +beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed +binary @ https://agent-int-packages.datadoghq.com/external/binary/binary-1.0.0-py2.py3-none-any.whl#sha256=e1b61f3a5c002717d1a28e4d9d2dc8acbc9d6b12baf7b1e4ab25d743da97e323 +boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py3-none-any.whl#sha256=add26dd58e076dfd387013da4704716d5cff215cf14f6d4347c4b9b7fc1f0b8e +botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 +bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d +cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 +certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 +cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 +charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b +clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=f1f8fec4027cd648f72009ef59c9b76c5a27a33ca166b4e79e46542009429813 +clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43 +cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402154838-py3-none-manylinux2014_x86_64.whl#sha256=aba3c1683ef1b2099933e030464d29b3ad1c206784ebd15d8a7147ecd6ba24e1 +confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240917155249-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=aea2f7e617f82476c56eed42d0b5cf6b944a2769d93c8bf7baf0adef19d19bf7 +cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e +ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e +ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=5fc70ac472093093d9908965d95d977206372a3ddc8a2562acf9dfd57c6864d8 +decorator @ https://agent-int-packages.datadoghq.com/external/decorator/decorator-5.1.1-py3-none-any.whl#sha256=b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 +deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprecated-1.2.14-py2.py3-none-any.whl#sha256=6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c +dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 +dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca +envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 +foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402154840-py3-none-manylinux2014_x86_64.whl#sha256=14259f824080062cc890965747597ff00a9d6c76a1eb926673fed68a45860ccd +google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 +gssapi @ https://agent-int-packages.datadoghq.com/built/gssapi/gssapi-1.8.3-20240917155249-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=c2bf203c7f213368bc8c2de664495f8e4699235ab2d58251a45cb30acbd92c08 +hazelcast-python-client @ https://agent-int-packages.datadoghq.com/external/hazelcast-python-client/hazelcast_python_client-5.4.0-py3-none-any.whl#sha256=16195cd58feb2dd3be1594d08d42527ae00797548a6a9d6a601aae2e8514ff5f +idna @ https://agent-int-packages.datadoghq.com/external/idna/idna-3.10-py3-none-any.whl#sha256=946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +importlib-metadata @ https://agent-int-packages.datadoghq.com/external/importlib-metadata/importlib_metadata-8.4.0-py3-none-any.whl#sha256=66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 +in-toto @ https://agent-int-packages.datadoghq.com/external/in-toto/in_toto-2.0.0-py3-none-any.whl#sha256=eaf71733cdd5b2309b58b24169c13c24701da72baa43b7432d1dffc308e7bf32 +iso8601 @ https://agent-int-packages.datadoghq.com/external/iso8601/iso8601-2.1.0-py3-none-any.whl#sha256=aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242 +jellyfish @ https://agent-int-packages.datadoghq.com/external/jellyfish/jellyfish-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=e9d4002d01252f18eb26f28b66f6c9ce0696221804d8769553c5912b2f221a18 +jmespath @ https://agent-int-packages.datadoghq.com/external/jmespath/jmespath-1.0.1-py3-none-any.whl#sha256=02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 +jsonpatch @ https://agent-int-packages.datadoghq.com/external/jsonpatch/jsonpatch-1.33-py2.py3-none-any.whl#sha256=0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade +jsonpointer @ https://agent-int-packages.datadoghq.com/external/jsonpointer/jsonpointer-3.0.0-py2.py3-none-any.whl#sha256=13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 +keystoneauth1 @ https://agent-int-packages.datadoghq.com/external/keystoneauth1/keystoneauth1-5.8.0-py3-none-any.whl#sha256=e69dff80c509ab64d4de4494658d914e81f26af720828dc584ceee74ecd666d9 +krb5 @ https://agent-int-packages.datadoghq.com/built/krb5/krb5-0.6.0-20240917155250-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=f90b6b9bf1e964f081525253d1e20ebda0ca15f05df2194a16647d0c89c6293e +kubernetes @ https://agent-int-packages.datadoghq.com/external/kubernetes/kubernetes-30.1.0-py2.py3-none-any.whl#sha256=e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d +ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2.py3-none-any.whl#sha256=5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70 +lxml @ https://agent-int-packages.datadoghq.com/built/lxml/lxml-4.9.4-20240917155250-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=0c1d96942a08b30d1ae8192b59249d91254eccd4198361eabdcd1ef2e76158dc +lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=bca8fccc15e3add173da91be8f34121578dc777711ffd98d399be35487c934bf +mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3 +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 +msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240917155251-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=9f873ab2793981115a8d0fdfba8869ae6b5d312c79045db1867d9294541dcb3d +oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca +openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 +opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 +orjson @ https://agent-int-packages.datadoghq.com/external/orjson/orjson-3.10.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=a0c6a008e91d10a2564edbb6ee5069a9e66df3fbe11c9a005cb411f441fd2c09 +os-service-types @ https://agent-int-packages.datadoghq.com/external/os-service-types/os_service_types-1.7.0-py2.py3-none-any.whl#sha256=0505c72205690910077fb72b88f2a1f07533c8d39f2fe75b29583481764965d6 +packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packaging-24.1-py3-none-any.whl#sha256=5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 +pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 +pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.3-py3-none-any.whl#sha256=50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 +ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce +portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf +prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 +protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl#sha256=a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25 +psutil @ https://agent-int-packages.datadoghq.com/external/psutil/psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4 +psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119 +pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd +pyasyncore @ https://agent-int-packages.datadoghq.com/external/pyasyncore/pyasyncore-1.0.4-py3-none-any.whl#sha256=9e5f6dc9dc057c56370b7a5cdb4c4670fd4b0556de2913ed1f428cd6a5366895 +pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc +pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4 +pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 +pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e +pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 +pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=940d456774b17814bac5ea7fc28188c7a1338d4a233efbb6ba01de957bded2e8 +pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240917155251-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=f62a434af2e4da5b681bea1c2203f0b2d6a3cc4b27ee74b3dfb57bf0a558049c +pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c +pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 +pyodbc @ https://agent-int-packages.datadoghq.com/built/pyodbc/pyodbc-5.1.0-20240917155252-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=967887428c3564ee39edc65074e94d1f0a94fb3d1477cfcff080baa699525754 +pyopenssl @ https://agent-int-packages.datadoghq.com/external/pyopenssl/pyOpenSSL-24.2.1-py3-none-any.whl#sha256=967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d +pysmi @ https://agent-int-packages.datadoghq.com/external/pysmi/pysmi-1.2.1-py3-none-any.whl#sha256=d97c60de9f81d33ab2899124d95a94fa7fefacc86ab6e00cbfec543a073e6d33 +pysnmp @ https://agent-int-packages.datadoghq.com/external/pysnmp/pysnmp-5.1.0-py3-none-any.whl#sha256=375a8adfc6820faf24ace6761a6d20544e60580d714ff7266df272850c39b439 +pysnmp-mibs @ https://agent-int-packages.datadoghq.com/external/pysnmp-mibs/pysnmp_mibs-0.1.6-py2.py3-none-any.whl#sha256=5e153ebe8e767c07940cea435f866c623ff6b2376155c7da75085b08d3774d48 +pysnmpcrypto @ https://agent-int-packages.datadoghq.com/external/pysnmpcrypto/pysnmpcrypto-0.0.4-py2.py3-none-any.whl#sha256=5889733caa030f45d9e03ea9d6370fb06426a8cb7f839aabbcdde33c6f634679 +pysocks @ https://agent-int-packages.datadoghq.com/external/pysocks/PySocks-1.7.1-py3-none-any.whl#sha256=2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 +pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0.11.1-py3-none-any.whl#sha256=129a4294f2c4d681d5875240ef87accc6f1d921e8983737fb0b59642b397951e +python-binary-memcached @ https://agent-int-packages.datadoghq.com/external/python-binary-memcached/python_binary_memcached-0.31.2-py3-none-any.whl#sha256=e5b93d54429e835cab7d5b33988649f9748344aa49adaed8eed94b37e714d562 +python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 +python3-gearman @ https://agent-int-packages.datadoghq.com/external/python3-gearman/python3_gearman-0.1.0-py3-none-any.whl#sha256=4a5808d3a0bfc6c243548ad57e7aab4bee62c9cba2b1c3a860fdd292d46a112d +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 +pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172050-py2.py3-none-manylinux2014_x86_64.whl#sha256=e173daf28895975b57850fef301837f24fba59dd8ff1d931795732e9be281d57 +pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 +redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 +requests @ https://agent-int-packages.datadoghq.com/external/requests/requests-2.32.3-py3-none-any.whl#sha256=70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 +requests-kerberos @ https://agent-int-packages.datadoghq.com/external/requests-kerberos/requests_kerberos-0.15.0-py2.py3-none-any.whl#sha256=ba9b0980b8489c93bfb13854fd118834e576d6700bfea3745cb2e62278cd16a6 +requests-ntlm @ https://agent-int-packages.datadoghq.com/external/requests-ntlm/requests_ntlm-1.3.0-py3-none-any.whl#sha256=4c7534a7d0e482bb0928531d621be4b2c74ace437e88c5a357ceb7452d25a510 +requests-oauthlib @ https://agent-int-packages.datadoghq.com/external/requests-oauthlib/requests_oauthlib-2.0.0-py2.py3-none-any.whl#sha256=7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 +requests-toolbelt @ https://agent-int-packages.datadoghq.com/external/requests-toolbelt/requests_toolbelt-1.0.0-py2.py3-none-any.whl#sha256=cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 +requests-unixsocket2 @ https://agent-int-packages.datadoghq.com/external/requests-unixsocket2/requests_unixsocket2-0.4.2-py3-none-any.whl#sha256=701fcd49d74bc0f759bbe45c4dfda0045fd89652948c2b473b1a312214c3770b +requestsexceptions @ https://agent-int-packages.datadoghq.com/external/requestsexceptions/requestsexceptions-1.4.0-py2.py3-none-any.whl#sha256=3083d872b6e07dc5c323563ef37671d992214ad9a32b0ca4a3d7f5500bf38ce3 +rethinkdb @ https://agent-int-packages.datadoghq.com/external/rethinkdb/rethinkdb-2.4.9-py2.py3-none-any.whl#sha256=dd2455b3eab5266d8006eeefeb5eb970a3697dfe6aa18e1ab6e1d99233badfcb +rsa @ https://agent-int-packages.datadoghq.com/external/rsa/rsa-4.9-py3-none-any.whl#sha256=90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 +s3transfer @ https://agent-int-packages.datadoghq.com/external/s3transfer/s3transfer-0.10.2-py3-none-any.whl#sha256=eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69 +securesystemslib @ https://agent-int-packages.datadoghq.com/external/securesystemslib/securesystemslib-0.28.0-py3-none-any.whl#sha256=9e6b9abe36a511d4f52c759069db8f6f650362ba82d6efc7bc7466a458b3f499 +semver @ https://agent-int-packages.datadoghq.com/external/semver/semver-3.0.2-py3-none-any.whl#sha256=b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4 +service-identity @ https://agent-int-packages.datadoghq.com/external/service-identity/service_identity-24.1.0-py3-none-any.whl#sha256=a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a +setuptools @ https://agent-int-packages.datadoghq.com/external/setuptools/setuptools-75.1.0-py3-none-any.whl#sha256=35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 +simplejson @ https://agent-int-packages.datadoghq.com/external/simplejson/simplejson-3.19.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=7017329ca8d4dca94ad5e59f496e5fc77630aecfc39df381ffc1d37fb6b25832 +six @ https://agent-int-packages.datadoghq.com/external/six/six-1.16.0-py2.py3-none-any.whl#sha256=8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +snowflake-connector-python @ https://agent-int-packages.datadoghq.com/external/snowflake-connector-python/snowflake_connector_python-3.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=bddc4cdcd991f9538726a7c293d2637bb5aed43db68246e06c92c49a6df2b692 +sortedcontainers @ https://agent-int-packages.datadoghq.com/external/sortedcontainers/sortedcontainers-2.4.0-py2.py3-none-any.whl#sha256=a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 +soupsieve @ https://agent-int-packages.datadoghq.com/external/soupsieve/soupsieve-2.6-py3-none-any.whl#sha256=e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 +stevedore @ https://agent-int-packages.datadoghq.com/external/stevedore/stevedore-5.3.0-py3-none-any.whl#sha256=1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78 +supervisor @ https://agent-int-packages.datadoghq.com/external/supervisor/supervisor-4.2.5-py2.py3-none-any.whl#sha256=2ecaede32fc25af814696374b79e42644ecaba5c09494c51016ffda9602d0f08 +tomlkit @ https://agent-int-packages.datadoghq.com/external/tomlkit/tomlkit-0.13.2-py3-none-any.whl#sha256=7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde +tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-any.whl#sha256=a22ab5fa6daf910b3052929fdce42ccad8a300e5e85715daaff9592aed980f7a +typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d +tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 +uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240917155252-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl#sha256=d3ea8a4efb33d233338f185e0cc1fd46b04fdc0d11b69d0764a4f2260430a984 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac +vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b +websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 +wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b +xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 diff --git a/.deps/resolved/macos-x86_64_3.12.txt b/.deps/resolved/macos-x86_64_3.12.txt new file mode 100644 index 0000000000000..7e02cd7a4bece --- /dev/null +++ b/.deps/resolved/macos-x86_64_3.12.txt @@ -0,0 +1,128 @@ +annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-types/annotated_types-0.7.0-py3-none-any.whl#sha256=1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 +asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 +attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 +aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd +azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 +bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl#sha256=c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841 +beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed +binary @ https://agent-int-packages.datadoghq.com/external/binary/binary-1.0.0-py2.py3-none-any.whl#sha256=e1b61f3a5c002717d1a28e4d9d2dc8acbc9d6b12baf7b1e4ab25d743da97e323 +boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py3-none-any.whl#sha256=add26dd58e076dfd387013da4704716d5cff215cf14f6d4347c4b9b7fc1f0b8e +botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 +bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d +cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 +certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 +cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl#sha256=805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 +charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl#sha256=ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b +clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp312-cp312-macosx_10_9_x86_64.whl#sha256=261fc1b0bf349de66b2d9e3d367879a561b516ca8e54e85e0c27b7c1a4f639b4 +clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl#sha256=fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0 +cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402154932-py3-none-macosx_10_12_universal2.whl#sha256=aba3c1683ef1b2099933e030464d29b3ad1c206784ebd15d8a7147ecd6ba24e1 +confluent-kafka @ https://agent-int-packages.datadoghq.com/built/confluent-kafka/confluent_kafka-2.5.0-20240917155258-cp312-cp312-macosx_10_13_universal2.whl#sha256=b8d2785849784cb3ac8a33e0067bc0c6e4850d5dc4fe8bdf47973dcea5c94316 +cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl#sha256=7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66 +ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e +ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp312-cp312-macosx_12_0_x86_64.whl#sha256=401f77b0564c3f990b58b9f21055331ca9efcdfa06dfa6ccff13cf21f8329ba5 +decorator @ https://agent-int-packages.datadoghq.com/external/decorator/decorator-5.1.1-py3-none-any.whl#sha256=b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 +deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprecated-1.2.14-py2.py3-none-any.whl#sha256=6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c +dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 +dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca +envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 +foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402154934-py3-none-macosx_10_12_universal2.whl#sha256=14259f824080062cc890965747597ff00a9d6c76a1eb926673fed68a45860ccd +google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 +gssapi @ https://agent-int-packages.datadoghq.com/external/gssapi/gssapi-1.8.3-cp312-cp312-macosx_10_9_x86_64.whl#sha256=19c373b3ba63ce19cd3163aa1495635e3d01b0de6cc4ff1126095eded1df6e01 +hazelcast-python-client @ https://agent-int-packages.datadoghq.com/external/hazelcast-python-client/hazelcast_python_client-5.4.0-py3-none-any.whl#sha256=16195cd58feb2dd3be1594d08d42527ae00797548a6a9d6a601aae2e8514ff5f +idna @ https://agent-int-packages.datadoghq.com/external/idna/idna-3.10-py3-none-any.whl#sha256=946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +importlib-metadata @ https://agent-int-packages.datadoghq.com/external/importlib-metadata/importlib_metadata-8.4.0-py3-none-any.whl#sha256=66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 +in-toto @ https://agent-int-packages.datadoghq.com/external/in-toto/in_toto-2.0.0-py3-none-any.whl#sha256=eaf71733cdd5b2309b58b24169c13c24701da72baa43b7432d1dffc308e7bf32 +iso8601 @ https://agent-int-packages.datadoghq.com/external/iso8601/iso8601-2.1.0-py3-none-any.whl#sha256=aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242 +jellyfish @ https://agent-int-packages.datadoghq.com/external/jellyfish/jellyfish-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl#sha256=828a7000d369cbd4d812b88510c01fdab20b73dc54c63cdbe03bdff67ab362d0 +jmespath @ https://agent-int-packages.datadoghq.com/external/jmespath/jmespath-1.0.1-py3-none-any.whl#sha256=02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 +jsonpatch @ https://agent-int-packages.datadoghq.com/external/jsonpatch/jsonpatch-1.33-py2.py3-none-any.whl#sha256=0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade +jsonpointer @ https://agent-int-packages.datadoghq.com/external/jsonpointer/jsonpointer-3.0.0-py2.py3-none-any.whl#sha256=13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 +keystoneauth1 @ https://agent-int-packages.datadoghq.com/external/keystoneauth1/keystoneauth1-5.8.0-py3-none-any.whl#sha256=e69dff80c509ab64d4de4494658d914e81f26af720828dc584ceee74ecd666d9 +krb5 @ https://agent-int-packages.datadoghq.com/external/krb5/krb5-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=8f1419ad4c4052f687aafd8ed0497de3e10905d7c4bfd0645d02fd63e24ab4b8 +kubernetes @ https://agent-int-packages.datadoghq.com/external/kubernetes/kubernetes-30.1.0-py2.py3-none-any.whl#sha256=e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d +ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2.py3-none-any.whl#sha256=5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70 +lxml @ https://agent-int-packages.datadoghq.com/external/lxml/lxml-4.9.4-cp312-cp312-macosx_11_0_universal2.whl#sha256=dbcb2dc07308453db428a95a4d03259bd8caea97d7f0776842299f2d00c72fc8 +lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp312-cp312-macosx_10_9_x86_64.whl#sha256=e36cd7b9d4d920d3bfc2369840da506fa68258f7bb176b8743189793c055e43d +mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5 +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 +msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240917155259-cp312-cp312-macosx_10_13_universal2.whl#sha256=e36f29cbdbd18847c6ee2d512ef10114d086f32e56e1654fd0230552dde5c24e +oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca +openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 +opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 +orjson @ https://agent-int-packages.datadoghq.com/external/orjson/orjson-3.10.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl#sha256=44a96f2d4c3af51bfac6bc4ef7b182aa33f2f054fd7f34cc0ee9a320d051d41f +os-service-types @ https://agent-int-packages.datadoghq.com/external/os-service-types/os_service_types-1.7.0-py2.py3-none-any.whl#sha256=0505c72205690910077fb72b88f2a1f07533c8d39f2fe75b29583481764965d6 +packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packaging-24.1-py3-none-any.whl#sha256=5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 +pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 +pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.3-py3-none-any.whl#sha256=50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 +ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce +portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf +prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 +protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl#sha256=68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f +psutil @ https://agent-int-packages.datadoghq.com/external/psutil/psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl#sha256=c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a +psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl#sha256=8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf +pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd +pyasyncore @ https://agent-int-packages.datadoghq.com/external/pyasyncore/pyasyncore-1.0.4-py3-none-any.whl#sha256=9e5f6dc9dc057c56370b7a5cdb4c4670fd4b0556de2913ed1f428cd6a5366895 +pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc +pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl#sha256=82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3 +pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 +pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl#sha256=595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231 +pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 +pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=e6a720a3d22b54183352dc65f08cd1547204d263e0651b213a0a2e577e838526 +pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240917155259-cp312-cp312-macosx_10_13_universal2.whl#sha256=63625dcc0c82f16ad6f8b5a2b60e4c5a3b51caa541a0a4e9805ef7bafbce5d95 +pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c +pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl#sha256=401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 +pyodbc @ https://agent-int-packages.datadoghq.com/external/pyodbc/pyodbc-5.1.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=d3d9cc4af703c4817b6e604315910b0cf5dcb68056d52b25ca072dd59c52dcbc +pyopenssl @ https://agent-int-packages.datadoghq.com/external/pyopenssl/pyOpenSSL-24.2.1-py3-none-any.whl#sha256=967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d +pysmi @ https://agent-int-packages.datadoghq.com/external/pysmi/pysmi-1.2.1-py3-none-any.whl#sha256=d97c60de9f81d33ab2899124d95a94fa7fefacc86ab6e00cbfec543a073e6d33 +pysnmp @ https://agent-int-packages.datadoghq.com/external/pysnmp/pysnmp-5.1.0-py3-none-any.whl#sha256=375a8adfc6820faf24ace6761a6d20544e60580d714ff7266df272850c39b439 +pysnmp-mibs @ https://agent-int-packages.datadoghq.com/external/pysnmp-mibs/pysnmp_mibs-0.1.6-py2.py3-none-any.whl#sha256=5e153ebe8e767c07940cea435f866c623ff6b2376155c7da75085b08d3774d48 +pysnmpcrypto @ https://agent-int-packages.datadoghq.com/external/pysnmpcrypto/pysnmpcrypto-0.0.4-py2.py3-none-any.whl#sha256=5889733caa030f45d9e03ea9d6370fb06426a8cb7f839aabbcdde33c6f634679 +pysocks @ https://agent-int-packages.datadoghq.com/external/pysocks/PySocks-1.7.1-py3-none-any.whl#sha256=2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 +pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0.11.1-py3-none-any.whl#sha256=129a4294f2c4d681d5875240ef87accc6f1d921e8983737fb0b59642b397951e +python-binary-memcached @ https://agent-int-packages.datadoghq.com/external/python-binary-memcached/python_binary_memcached-0.31.2-py3-none-any.whl#sha256=e5b93d54429e835cab7d5b33988649f9748344aa49adaed8eed94b37e714d562 +python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 +python3-gearman @ https://agent-int-packages.datadoghq.com/external/python3-gearman/python3_gearman-0.1.0-py3-none-any.whl#sha256=4a5808d3a0bfc6c243548ad57e7aab4bee62c9cba2b1c3a860fdd292d46a112d +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 +pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172113-py2.py3-none-macosx_10_12_universal2.whl#sha256=e173daf28895975b57850fef301837f24fba59dd8ff1d931795732e9be281d57 +pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl#sha256=c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab +redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 +requests @ https://agent-int-packages.datadoghq.com/external/requests/requests-2.32.3-py3-none-any.whl#sha256=70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 +requests-kerberos @ https://agent-int-packages.datadoghq.com/external/requests-kerberos/requests_kerberos-0.15.0-py2.py3-none-any.whl#sha256=ba9b0980b8489c93bfb13854fd118834e576d6700bfea3745cb2e62278cd16a6 +requests-ntlm @ https://agent-int-packages.datadoghq.com/external/requests-ntlm/requests_ntlm-1.3.0-py3-none-any.whl#sha256=4c7534a7d0e482bb0928531d621be4b2c74ace437e88c5a357ceb7452d25a510 +requests-oauthlib @ https://agent-int-packages.datadoghq.com/external/requests-oauthlib/requests_oauthlib-2.0.0-py2.py3-none-any.whl#sha256=7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 +requests-toolbelt @ https://agent-int-packages.datadoghq.com/external/requests-toolbelt/requests_toolbelt-1.0.0-py2.py3-none-any.whl#sha256=cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 +requests-unixsocket2 @ https://agent-int-packages.datadoghq.com/external/requests-unixsocket2/requests_unixsocket2-0.4.2-py3-none-any.whl#sha256=701fcd49d74bc0f759bbe45c4dfda0045fd89652948c2b473b1a312214c3770b +requestsexceptions @ https://agent-int-packages.datadoghq.com/external/requestsexceptions/requestsexceptions-1.4.0-py2.py3-none-any.whl#sha256=3083d872b6e07dc5c323563ef37671d992214ad9a32b0ca4a3d7f5500bf38ce3 +rethinkdb @ https://agent-int-packages.datadoghq.com/external/rethinkdb/rethinkdb-2.4.9-py2.py3-none-any.whl#sha256=dd2455b3eab5266d8006eeefeb5eb970a3697dfe6aa18e1ab6e1d99233badfcb +rsa @ https://agent-int-packages.datadoghq.com/external/rsa/rsa-4.9-py3-none-any.whl#sha256=90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 +s3transfer @ https://agent-int-packages.datadoghq.com/external/s3transfer/s3transfer-0.10.2-py3-none-any.whl#sha256=eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69 +securesystemslib @ https://agent-int-packages.datadoghq.com/external/securesystemslib/securesystemslib-0.28.0-py3-none-any.whl#sha256=9e6b9abe36a511d4f52c759069db8f6f650362ba82d6efc7bc7466a458b3f499 +semver @ https://agent-int-packages.datadoghq.com/external/semver/semver-3.0.2-py3-none-any.whl#sha256=b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4 +service-identity @ https://agent-int-packages.datadoghq.com/external/service-identity/service_identity-24.1.0-py3-none-any.whl#sha256=a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a +setuptools @ https://agent-int-packages.datadoghq.com/external/setuptools/setuptools-75.1.0-py3-none-any.whl#sha256=35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 +simplejson @ https://agent-int-packages.datadoghq.com/external/simplejson/simplejson-3.19.3-cp312-cp312-macosx_10_9_x86_64.whl#sha256=6ef9383c5e05f445be60f1735c1816163c874c0b1ede8bb4390aff2ced34f333 +six @ https://agent-int-packages.datadoghq.com/external/six/six-1.16.0-py2.py3-none-any.whl#sha256=8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +snowflake-connector-python @ https://agent-int-packages.datadoghq.com/external/snowflake-connector-python/snowflake_connector_python-3.12.1-cp312-cp312-macosx_11_0_x86_64.whl#sha256=dadd262196cce0132ca7e766f055e00c00497a88fdf83fd48143eb4a469a4527 +sortedcontainers @ https://agent-int-packages.datadoghq.com/external/sortedcontainers/sortedcontainers-2.4.0-py2.py3-none-any.whl#sha256=a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 +soupsieve @ https://agent-int-packages.datadoghq.com/external/soupsieve/soupsieve-2.6-py3-none-any.whl#sha256=e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 +stevedore @ https://agent-int-packages.datadoghq.com/external/stevedore/stevedore-5.3.0-py3-none-any.whl#sha256=1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78 +supervisor @ https://agent-int-packages.datadoghq.com/external/supervisor/supervisor-4.2.5-py2.py3-none-any.whl#sha256=2ecaede32fc25af814696374b79e42644ecaba5c09494c51016ffda9602d0f08 +tomlkit @ https://agent-int-packages.datadoghq.com/external/tomlkit/tomlkit-0.13.2-py3-none-any.whl#sha256=7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde +tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-any.whl#sha256=a22ab5fa6daf910b3052929fdce42ccad8a300e5e85715daaff9592aed980f7a +typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d +tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 +uhashring @ https://agent-int-packages.datadoghq.com/external/uhashring/uhashring-2.3-py3-none-any.whl#sha256=7ee8a25ca495a97effad10bd563c83b4054a6d7606d9530757049a04edab9297 +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240917155300-cp312-cp312-macosx_10_13_universal2.whl#sha256=40c99fc8803f14b345a3bbfa38fe3ffd81cf4204491b6c31171b73cfec1431e9 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac +vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b +websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 +wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl#sha256=5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b +xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 diff --git a/.deps/resolved/windows-x86_64_3.12.txt b/.deps/resolved/windows-x86_64_3.12.txt new file mode 100644 index 0000000000000..134d120f0b18f --- /dev/null +++ b/.deps/resolved/windows-x86_64_3.12.txt @@ -0,0 +1,126 @@ +annotated-types @ https://agent-int-packages.datadoghq.com/external/annotated-types/annotated_types-0.7.0-py3-none-any.whl#sha256=1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 +asn1crypto @ https://agent-int-packages.datadoghq.com/external/asn1crypto/asn1crypto-1.5.1-py2.py3-none-any.whl#sha256=db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67 +attrs @ https://agent-int-packages.datadoghq.com/external/attrs/attrs-24.2.0-py3-none-any.whl#sha256=81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 +aws-requests-auth @ https://agent-int-packages.datadoghq.com/external/aws-requests-auth/aws_requests_auth-0.4.3-py2.py3-none-any.whl#sha256=646bc37d62140ea1c709d20148f5d43197e6bd2d63909eb36fa4bb2345759977 +azure-core @ https://agent-int-packages.datadoghq.com/external/azure-core/azure_core-1.31.0-py3-none-any.whl#sha256=22954de3777e0250029360ef31d80448ef1be13b80a459bff80ba7073379e2cd +azure-identity @ https://agent-int-packages.datadoghq.com/external/azure-identity/azure_identity-1.17.1-py3-none-any.whl#sha256=db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382 +bcrypt @ https://agent-int-packages.datadoghq.com/external/bcrypt/bcrypt-4.2.0-cp39-abi3-win_amd64.whl#sha256=61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9 +beautifulsoup4 @ https://agent-int-packages.datadoghq.com/external/beautifulsoup4/beautifulsoup4-4.12.3-py3-none-any.whl#sha256=b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed +binary @ https://agent-int-packages.datadoghq.com/external/binary/binary-1.0.0-py2.py3-none-any.whl#sha256=e1b61f3a5c002717d1a28e4d9d2dc8acbc9d6b12baf7b1e4ab25d743da97e323 +boto3 @ https://agent-int-packages.datadoghq.com/external/boto3/boto3-1.35.10-py3-none-any.whl#sha256=add26dd58e076dfd387013da4704716d5cff215cf14f6d4347c4b9b7fc1f0b8e +botocore @ https://agent-int-packages.datadoghq.com/external/botocore/botocore-1.35.10-py3-none-any.whl#sha256=0d96d023b9b0cea99a0a428a431d011329d3a958730aee6ed6a6fec5d9bfbc03 +bytecode @ https://agent-int-packages.datadoghq.com/external/bytecode/bytecode-0.15.1-py3-none-any.whl#sha256=0a1dc340cac823cff605609b8b214f7f9bf80418c6b9e0fc8c6db1793c27137d +cachetools @ https://agent-int-packages.datadoghq.com/external/cachetools/cachetools-5.5.0-py3-none-any.whl#sha256=02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 +cattrs @ https://agent-int-packages.datadoghq.com/external/cattrs/cattrs-24.1.1-py3-none-any.whl#sha256=ec8ce8fdc725de9d07547cd616f968670687c6fa7a2e263b088370c46d834d97 +certifi @ https://agent-int-packages.datadoghq.com/external/certifi/certifi-2024.8.30-py3-none-any.whl#sha256=922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 +cffi @ https://agent-int-packages.datadoghq.com/external/cffi/cffi-1.17.1-cp312-cp312-win_amd64.whl#sha256=51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 +charset-normalizer @ https://agent-int-packages.datadoghq.com/external/charset-normalizer/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl#sha256=96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 +clickhouse-cityhash @ https://agent-int-packages.datadoghq.com/external/clickhouse-cityhash/clickhouse_cityhash-1.0.2.4-cp312-cp312-win_amd64.whl#sha256=0409917be29f5ad80a6772712fce954b5e81450555636e8523290ee9740a2dbb +clickhouse-driver @ https://agent-int-packages.datadoghq.com/external/clickhouse-driver/clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl#sha256=de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18 +cm-client @ https://agent-int-packages.datadoghq.com/built/cm-client/cm_client-45.0.4-20240402154627-py3-none-win_amd64.whl#sha256=1743b32a221d2a0804b4e425ffd53468e8f1754da217fe1e7bd9ff7800fd90f8 +confluent-kafka @ https://agent-int-packages.datadoghq.com/external/confluent-kafka/confluent_kafka-2.5.0-cp312-cp312-win_amd64.whl#sha256=d668b5c426af595271bf6fce2917a6c3a15453656077a59db85f440958b5ccc2 +cryptography @ https://agent-int-packages.datadoghq.com/external/cryptography/cryptography-43.0.0-cp39-abi3-win_amd64.whl#sha256=0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709 +ddsketch @ https://agent-int-packages.datadoghq.com/external/ddsketch/ddsketch-3.0.1-py3-none-any.whl#sha256=6d047b455fe2837c43d366ff1ae6ba0c3166e15499de8688437a75cea914224e +ddtrace @ https://agent-int-packages.datadoghq.com/external/ddtrace/ddtrace-2.10.6-cp312-cp312-win_amd64.whl#sha256=bb183a535e5b24828a45901babd9fd15a1350c9d5096de5ba463287d0c8c64d1 +decorator @ https://agent-int-packages.datadoghq.com/external/decorator/decorator-5.1.1-py3-none-any.whl#sha256=b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 +deprecated @ https://agent-int-packages.datadoghq.com/external/deprecated/Deprecated-1.2.14-py2.py3-none-any.whl#sha256=6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c +dnspython @ https://agent-int-packages.datadoghq.com/external/dnspython/dnspython-2.6.1-py3-none-any.whl#sha256=5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 +dogpile-cache @ https://agent-int-packages.datadoghq.com/external/dogpile-cache/dogpile.cache-1.3.3-py3-none-any.whl#sha256=5e211c4902ebdf88c678d268e22454b41e68071632daa9402d8ee24e825ed8ca +envier @ https://agent-int-packages.datadoghq.com/external/envier/envier-0.5.2-py3-none-any.whl#sha256=65099cf3aa9b3b3b4b92db2f7d29e2910672e085b76f7e587d2167561a834add +filelock @ https://agent-int-packages.datadoghq.com/external/filelock/filelock-3.16.0-py3-none-any.whl#sha256=f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609 +foundationdb @ https://agent-int-packages.datadoghq.com/built/foundationdb/foundationdb-6.3.24-20240402154628-py3-none-win_amd64.whl#sha256=07e8e97e51dc9248d58d60d33076b82380135c31ab3727a33b885cea17e34bc7 +google-auth @ https://agent-int-packages.datadoghq.com/external/google-auth/google_auth-2.34.0-py2.py3-none-any.whl#sha256=72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 +hazelcast-python-client @ https://agent-int-packages.datadoghq.com/external/hazelcast-python-client/hazelcast_python_client-5.4.0-py3-none-any.whl#sha256=16195cd58feb2dd3be1594d08d42527ae00797548a6a9d6a601aae2e8514ff5f +idna @ https://agent-int-packages.datadoghq.com/external/idna/idna-3.10-py3-none-any.whl#sha256=946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +importlib-metadata @ https://agent-int-packages.datadoghq.com/external/importlib-metadata/importlib_metadata-8.4.0-py3-none-any.whl#sha256=66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1 +in-toto @ https://agent-int-packages.datadoghq.com/external/in-toto/in_toto-2.0.0-py3-none-any.whl#sha256=eaf71733cdd5b2309b58b24169c13c24701da72baa43b7432d1dffc308e7bf32 +iso8601 @ https://agent-int-packages.datadoghq.com/external/iso8601/iso8601-2.1.0-py3-none-any.whl#sha256=aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242 +jellyfish @ https://agent-int-packages.datadoghq.com/external/jellyfish/jellyfish-1.1.0-cp312-none-win_amd64.whl#sha256=2b928bad2887c662783a4d9b5828ed1fa0e943f680589f7fc002c456fc02e184 +jmespath @ https://agent-int-packages.datadoghq.com/external/jmespath/jmespath-1.0.1-py3-none-any.whl#sha256=02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 +jsonpatch @ https://agent-int-packages.datadoghq.com/external/jsonpatch/jsonpatch-1.33-py2.py3-none-any.whl#sha256=0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade +jsonpointer @ https://agent-int-packages.datadoghq.com/external/jsonpointer/jsonpointer-3.0.0-py2.py3-none-any.whl#sha256=13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942 +keystoneauth1 @ https://agent-int-packages.datadoghq.com/external/keystoneauth1/keystoneauth1-5.8.0-py3-none-any.whl#sha256=e69dff80c509ab64d4de4494658d914e81f26af720828dc584ceee74ecd666d9 +kubernetes @ https://agent-int-packages.datadoghq.com/external/kubernetes/kubernetes-30.1.0-py2.py3-none-any.whl#sha256=e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d +ldap3 @ https://agent-int-packages.datadoghq.com/external/ldap3/ldap3-2.9.1-py2.py3-none-any.whl#sha256=5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70 +lxml @ https://agent-int-packages.datadoghq.com/external/lxml/lxml-4.9.4-cp312-cp312-win_amd64.whl#sha256=f1faee2a831fe249e1bae9cbc68d3cd8a30f7e37851deee4d7962b17c410dd56 +lz4 @ https://agent-int-packages.datadoghq.com/external/lz4/lz4-4.3.3-cp312-cp312-win_amd64.whl#sha256=5d35533bf2cee56f38ced91f766cd0038b6abf46f438a80d50c52750088be93f +mmh3 @ https://agent-int-packages.datadoghq.com/external/mmh3/mmh3-4.1.0-cp312-cp312-win_amd64.whl#sha256=bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9 +msal @ https://agent-int-packages.datadoghq.com/external/msal/msal-1.31.0-py3-none-any.whl#sha256=96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7 +msal-extensions @ https://agent-int-packages.datadoghq.com/external/msal-extensions/msal_extensions-1.2.0-py3-none-any.whl#sha256=cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d +netifaces @ https://agent-int-packages.datadoghq.com/built/netifaces/netifaces-0.11.0-20240830145553-cp312-cp312-win_amd64.whl#sha256=a1ba522e63fb6b220e7fe668767f334662afa9c56eca18b361bd1f88863ab59a +oauthlib @ https://agent-int-packages.datadoghq.com/external/oauthlib/oauthlib-3.2.2-py3-none-any.whl#sha256=8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca +openstacksdk @ https://agent-int-packages.datadoghq.com/external/openstacksdk/openstacksdk-3.3.0-py3-none-any.whl#sha256=e6d4121b87354984caf0e3c032e2ebf4d4440374f86c81c27ec52ca5df359157 +opentelemetry-api @ https://agent-int-packages.datadoghq.com/external/opentelemetry-api/opentelemetry_api-1.27.0-py3-none-any.whl#sha256=953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7 +orjson @ https://agent-int-packages.datadoghq.com/external/orjson/orjson-3.10.7-cp312-none-win_amd64.whl#sha256=1d9c0e733e02ada3ed6098a10a8ee0052dd55774de3d9110d29868d24b17faa1 +os-service-types @ https://agent-int-packages.datadoghq.com/external/os-service-types/os_service_types-1.7.0-py2.py3-none-any.whl#sha256=0505c72205690910077fb72b88f2a1f07533c8d39f2fe75b29583481764965d6 +packaging @ https://agent-int-packages.datadoghq.com/external/packaging/packaging-24.1-py3-none-any.whl#sha256=5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +paramiko @ https://agent-int-packages.datadoghq.com/external/paramiko/paramiko-3.4.1-py3-none-any.whl#sha256=8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 +pathspec @ https://agent-int-packages.datadoghq.com/external/pathspec/pathspec-0.12.1-py3-none-any.whl#sha256=a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 +pbr @ https://agent-int-packages.datadoghq.com/external/pbr/pbr-6.1.0-py2.py3-none-any.whl#sha256=a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a +platformdirs @ https://agent-int-packages.datadoghq.com/external/platformdirs/platformdirs-4.3.3-py3-none-any.whl#sha256=50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5 +ply @ https://agent-int-packages.datadoghq.com/external/ply/ply-3.11-py2.py3-none-any.whl#sha256=096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce +portalocker @ https://agent-int-packages.datadoghq.com/external/portalocker/portalocker-2.10.1-py3-none-any.whl#sha256=53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf +prometheus-client @ https://agent-int-packages.datadoghq.com/external/prometheus-client/prometheus_client-0.20.0-py3-none-any.whl#sha256=cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7 +protobuf @ https://agent-int-packages.datadoghq.com/external/protobuf/protobuf-5.27.3-cp310-abi3-win_amd64.whl#sha256=16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7 +psutil @ https://agent-int-packages.datadoghq.com/external/psutil/psutil-5.9.6-cp37-abi3-win_amd64.whl#sha256=6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a +psycopg2-binary @ https://agent-int-packages.datadoghq.com/external/psycopg2-binary/psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl#sha256=81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab +pyasn1 @ https://agent-int-packages.datadoghq.com/external/pyasn1/pyasn1-0.4.8-py2.py3-none-any.whl#sha256=39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d +pyasn1-modules @ https://agent-int-packages.datadoghq.com/external/pyasn1-modules/pyasn1_modules-0.4.1-py3-none-any.whl#sha256=49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd +pyasyncore @ https://agent-int-packages.datadoghq.com/external/pyasyncore/pyasyncore-1.0.4-py3-none-any.whl#sha256=9e5f6dc9dc057c56370b7a5cdb4c4670fd4b0556de2913ed1f428cd6a5366895 +pycparser @ https://agent-int-packages.datadoghq.com/external/pycparser/pycparser-2.22-py3-none-any.whl#sha256=c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc +pycryptodomex @ https://agent-int-packages.datadoghq.com/external/pycryptodomex/pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl#sha256=2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc +pydantic @ https://agent-int-packages.datadoghq.com/external/pydantic/pydantic-2.8.2-py3-none-any.whl#sha256=73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8 +pydantic-core @ https://agent-int-packages.datadoghq.com/external/pydantic-core/pydantic_core-2.20.1-cp312-none-win_amd64.whl#sha256=035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d +pyjwt @ https://agent-int-packages.datadoghq.com/external/pyjwt/PyJWT-2.9.0-py3-none-any.whl#sha256=3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 +pymongo @ https://agent-int-packages.datadoghq.com/external/pymongo/pymongo-4.8.0-cp312-cp312-win_amd64.whl#sha256=e84bc7707492f06fbc37a9f215374d2977d21b72e10a67f1b31893ec5a140ad8 +pymqi @ https://agent-int-packages.datadoghq.com/built/pymqi/pymqi-1.12.10-20240830145554-cp312-cp312-win_amd64.whl#sha256=d9749dd3ca4d1705f364cab34f679cf889d363f64602ed2f7c106a78876bf4ab +pymysql @ https://agent-int-packages.datadoghq.com/external/pymysql/PyMySQL-1.1.1-py3-none-any.whl#sha256=4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c +pynacl @ https://agent-int-packages.datadoghq.com/external/pynacl/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl#sha256=20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 +pyodbc @ https://agent-int-packages.datadoghq.com/external/pyodbc/pyodbc-5.1.0-cp312-cp312-win_amd64.whl#sha256=33f4984af38872e7bdec78007a34e4d43ae72bf9d0bae3344e79d9d0db157c0e +pyopenssl @ https://agent-int-packages.datadoghq.com/external/pyopenssl/pyOpenSSL-24.2.1-py3-none-any.whl#sha256=967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d +pysmi @ https://agent-int-packages.datadoghq.com/external/pysmi/pysmi-1.2.1-py3-none-any.whl#sha256=d97c60de9f81d33ab2899124d95a94fa7fefacc86ab6e00cbfec543a073e6d33 +pysnmp @ https://agent-int-packages.datadoghq.com/external/pysnmp/pysnmp-5.1.0-py3-none-any.whl#sha256=375a8adfc6820faf24ace6761a6d20544e60580d714ff7266df272850c39b439 +pysnmp-mibs @ https://agent-int-packages.datadoghq.com/external/pysnmp-mibs/pysnmp_mibs-0.1.6-py2.py3-none-any.whl#sha256=5e153ebe8e767c07940cea435f866c623ff6b2376155c7da75085b08d3774d48 +pysnmpcrypto @ https://agent-int-packages.datadoghq.com/external/pysnmpcrypto/pysnmpcrypto-0.0.4-py2.py3-none-any.whl#sha256=5889733caa030f45d9e03ea9d6370fb06426a8cb7f839aabbcdde33c6f634679 +pysocks @ https://agent-int-packages.datadoghq.com/external/pysocks/PySocks-1.7.1-py3-none-any.whl#sha256=2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 +pyspnego @ https://agent-int-packages.datadoghq.com/external/pyspnego/pyspnego-0.11.1-py3-none-any.whl#sha256=129a4294f2c4d681d5875240ef87accc6f1d921e8983737fb0b59642b397951e +python-dateutil @ https://agent-int-packages.datadoghq.com/external/python-dateutil/python_dateutil-2.9.0.post0-py2.py3-none-any.whl#sha256=a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 +pytz @ https://agent-int-packages.datadoghq.com/external/pytz/pytz-2024.2-py2.py3-none-any.whl#sha256=31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725 +pyvmomi @ https://agent-int-packages.datadoghq.com/built/pyvmomi/pyvmomi-8.0.3.0.1-20240702172100-py2.py3-none-win_amd64.whl#sha256=19446fe48dbdd8b64097eff5648cc4b5a19165ede40826507f5e1398e1032e12 +pywin32 @ https://agent-int-packages.datadoghq.com/external/pywin32/pywin32-306-cp312-cp312-win_amd64.whl#sha256=37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e +pyyaml @ https://agent-int-packages.datadoghq.com/external/pyyaml/PyYAML-6.0.2-cp312-cp312-win_amd64.whl#sha256=7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 +redis @ https://agent-int-packages.datadoghq.com/external/redis/redis-5.0.8-py3-none-any.whl#sha256=56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4 +requests @ https://agent-int-packages.datadoghq.com/external/requests/requests-2.32.3-py3-none-any.whl#sha256=70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 +requests-kerberos @ https://agent-int-packages.datadoghq.com/external/requests-kerberos/requests_kerberos-0.15.0-py2.py3-none-any.whl#sha256=ba9b0980b8489c93bfb13854fd118834e576d6700bfea3745cb2e62278cd16a6 +requests-ntlm @ https://agent-int-packages.datadoghq.com/external/requests-ntlm/requests_ntlm-1.3.0-py3-none-any.whl#sha256=4c7534a7d0e482bb0928531d621be4b2c74ace437e88c5a357ceb7452d25a510 +requests-oauthlib @ https://agent-int-packages.datadoghq.com/external/requests-oauthlib/requests_oauthlib-2.0.0-py2.py3-none-any.whl#sha256=7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 +requests-toolbelt @ https://agent-int-packages.datadoghq.com/external/requests-toolbelt/requests_toolbelt-1.0.0-py2.py3-none-any.whl#sha256=cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 +requests-unixsocket2 @ https://agent-int-packages.datadoghq.com/external/requests-unixsocket2/requests_unixsocket2-0.4.2-py3-none-any.whl#sha256=701fcd49d74bc0f759bbe45c4dfda0045fd89652948c2b473b1a312214c3770b +requestsexceptions @ https://agent-int-packages.datadoghq.com/external/requestsexceptions/requestsexceptions-1.4.0-py2.py3-none-any.whl#sha256=3083d872b6e07dc5c323563ef37671d992214ad9a32b0ca4a3d7f5500bf38ce3 +rethinkdb @ https://agent-int-packages.datadoghq.com/external/rethinkdb/rethinkdb-2.4.9-py2.py3-none-any.whl#sha256=dd2455b3eab5266d8006eeefeb5eb970a3697dfe6aa18e1ab6e1d99233badfcb +rsa @ https://agent-int-packages.datadoghq.com/external/rsa/rsa-4.9-py3-none-any.whl#sha256=90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 +s3transfer @ https://agent-int-packages.datadoghq.com/external/s3transfer/s3transfer-0.10.2-py3-none-any.whl#sha256=eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69 +securesystemslib @ https://agent-int-packages.datadoghq.com/external/securesystemslib/securesystemslib-0.28.0-py3-none-any.whl#sha256=9e6b9abe36a511d4f52c759069db8f6f650362ba82d6efc7bc7466a458b3f499 +semver @ https://agent-int-packages.datadoghq.com/external/semver/semver-3.0.2-py3-none-any.whl#sha256=b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4 +service-identity @ https://agent-int-packages.datadoghq.com/external/service-identity/service_identity-24.1.0-py3-none-any.whl#sha256=a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a +setuptools @ https://agent-int-packages.datadoghq.com/external/setuptools/setuptools-75.1.0-py3-none-any.whl#sha256=35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 +simplejson @ https://agent-int-packages.datadoghq.com/external/simplejson/simplejson-3.19.3-cp312-cp312-win_amd64.whl#sha256=1e662336db50ad665777e6548b5076329a94a0c3d4a0472971c588b3ef27de3a +six @ https://agent-int-packages.datadoghq.com/external/six/six-1.16.0-py2.py3-none-any.whl#sha256=8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +snowflake-connector-python @ https://agent-int-packages.datadoghq.com/external/snowflake-connector-python/snowflake_connector_python-3.12.1-cp312-cp312-win_amd64.whl#sha256=b06c63ec0381df1f4da6c4326330a1a40c8fc21fd3dcc2f58df4de395d676893 +sortedcontainers @ https://agent-int-packages.datadoghq.com/external/sortedcontainers/sortedcontainers-2.4.0-py2.py3-none-any.whl#sha256=a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0 +soupsieve @ https://agent-int-packages.datadoghq.com/external/soupsieve/soupsieve-2.6-py3-none-any.whl#sha256=e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9 +sspilib @ https://agent-int-packages.datadoghq.com/external/sspilib/sspilib-0.1.0-cp312-cp312-win_amd64.whl#sha256=b83825a2c43ff84ddff72d09b098057efaabf3841d3c42888078e154cf8e9595 +stevedore @ https://agent-int-packages.datadoghq.com/external/stevedore/stevedore-5.3.0-py3-none-any.whl#sha256=1efd34ca08f474dad08d9b19e934a22c68bb6fe416926479ba29e5013bcc8f78 +supervisor @ https://agent-int-packages.datadoghq.com/external/supervisor/supervisor-4.2.5-py2.py3-none-any.whl#sha256=2ecaede32fc25af814696374b79e42644ecaba5c09494c51016ffda9602d0f08 +tomlkit @ https://agent-int-packages.datadoghq.com/external/tomlkit/tomlkit-0.13.2-py3-none-any.whl#sha256=7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde +tuf @ https://agent-int-packages.datadoghq.com/external/tuf/tuf-4.0.0-py3-none-any.whl#sha256=a22ab5fa6daf910b3052929fdce42ccad8a300e5e85715daaff9592aed980f7a +typing-extensions @ https://agent-int-packages.datadoghq.com/external/typing-extensions/typing_extensions-4.12.2-py3-none-any.whl#sha256=04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d +tzdata @ https://agent-int-packages.datadoghq.com/external/tzdata/tzdata-2024.1-py2.py3-none-any.whl#sha256=9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252 +tzlocal @ https://agent-int-packages.datadoghq.com/external/tzlocal/tzlocal-5.2-py3-none-any.whl#sha256=49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8 +uptime @ https://agent-int-packages.datadoghq.com/built/uptime/uptime-3.0.1-20240830145554-cp312-cp312-win_amd64.whl#sha256=3428f0d06d749e2b07f4b783429de36dca4bd437fe0b4df558074fa898cae5c0 +urllib3 @ https://agent-int-packages.datadoghq.com/external/urllib3/urllib3-2.2.3-py3-none-any.whl#sha256=ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac +vertica-python @ https://agent-int-packages.datadoghq.com/external/vertica-python/vertica_python-1.4.0-py3-none-any.whl#sha256=50fecd7687f4b0b9f6dee6e2b35c195af2a4f702ece01bd12e080b51756e000b +websocket-client @ https://agent-int-packages.datadoghq.com/external/websocket-client/websocket_client-1.8.0-py3-none-any.whl#sha256=17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526 +wrapt @ https://agent-int-packages.datadoghq.com/external/wrapt/wrapt-1.16.0-cp312-cp312-win_amd64.whl#sha256=dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8 +xmltodict @ https://agent-int-packages.datadoghq.com/external/xmltodict/xmltodict-0.13.0-py2.py3-none-any.whl#sha256=aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 +zipp @ https://agent-int-packages.datadoghq.com/external/zipp/zipp-3.20.2-py3-none-any.whl#sha256=a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 From d5250d3b50e30ba7331b32b3db2f02ab1e1e199b Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Tue, 17 Sep 2024 22:36:04 +0200 Subject: [PATCH 16/23] Improve assertions in test for python upgrade script (#18606) * Improve assertions in test for python upgrade script * bump python version in conftest * lint --------- Co-authored-by: Kyle-Neale --- ddev/tests/cli/meta/scripts/conftest.py | 11 +++++------ ddev/tests/cli/meta/scripts/test_upgrade_python.py | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/ddev/tests/cli/meta/scripts/conftest.py b/ddev/tests/cli/meta/scripts/conftest.py index d92dc4481cfcc..3eef49fef96ff 100644 --- a/ddev/tests/cli/meta/scripts/conftest.py +++ b/ddev/tests/cli/meta/scripts/conftest.py @@ -5,8 +5,10 @@ from ddev.repo.core import Repository -OLD_PYTHON_VERSION = "3.11" -NEW_PYTHON_VERSION = "3.12" +# Whenenever we bump python version, we also need to bump the python +# version in the conftest.py. +OLD_PYTHON_VERSION = "3.12" +NEW_PYTHON_VERSION = "3.13" @pytest.fixture @@ -43,7 +45,7 @@ def fake_repo(tmp_path_factory, config_file, ddev): f"""[env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["2.7", "{OLD_PYTHON_VERSION}"] +python = ["{OLD_PYTHON_VERSION}"] """, ) @@ -68,7 +70,6 @@ def fake_repo(tmp_path_factory, config_file, ddev): "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: {OLD_PYTHON_VERSION}", ] """, @@ -86,7 +87,6 @@ def fake_repo(tmp_path_factory, config_file, ddev): "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: {OLD_PYTHON_VERSION}", ] """, @@ -134,7 +134,6 @@ def fake_repo(tmp_path_factory, config_file, ddev): "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: {OLD_PYTHON_VERSION}", ] """, diff --git a/ddev/tests/cli/meta/scripts/test_upgrade_python.py b/ddev/tests/cli/meta/scripts/test_upgrade_python.py index 8302f47ec89ec..0f72910563c37 100644 --- a/ddev/tests/cli/meta/scripts/test_upgrade_python.py +++ b/ddev/tests/cli/meta/scripts/test_upgrade_python.py @@ -27,8 +27,8 @@ def test_upgrade_python(fake_repo, ddev): hatch_file = fake_repo.path / 'dummy' / 'hatch.toml' contents = hatch_file.read_text() - assert f'python = ["2.7", "{OLD_PYTHON_VERSION}"]' not in contents - assert f'python = ["2.7", "{NEW_PYTHON_VERSION}"]' in contents + assert f'python = ["{OLD_PYTHON_VERSION}"]' not in contents + assert f'python = ["{NEW_PYTHON_VERSION}"]' in contents for integration in ('dummy', 'datadog_checks_dependency_provider', 'logs_only'): pyproject_file = fake_repo.path / integration / 'pyproject.toml' From 9c901c4d8621d912c9e77a7ece95b3bcca28ffa2 Mon Sep 17 00:00:00 2001 From: Kyle Neale Date: Tue, 17 Sep 2024 17:40:01 -0400 Subject: [PATCH 17/23] update python env to 3.12.6 (#18612) --- kubeflow/hatch.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kubeflow/hatch.toml b/kubeflow/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/kubeflow/hatch.toml +++ b/kubeflow/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] From 1ab760be48802333c9a3a5c3ba850667c31d368e Mon Sep 17 00:00:00 2001 From: "datadog-agent-integrations-bot[bot]" <159767151+datadog-agent-integrations-bot[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 09:20:55 -0400 Subject: [PATCH 18/23] Finalize Agent release 7.57.1 (#18615) Co-authored-by: Kyle-Neale --- AGENT_CHANGELOG.md | 4 + AGENT_INTEGRATIONS.md | 201 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 205 insertions(+) diff --git a/AGENT_CHANGELOG.md b/AGENT_CHANGELOG.md index 62986be17e7cc..3fc171d8a39f6 100644 --- a/AGENT_CHANGELOG.md +++ b/AGENT_CHANGELOG.md @@ -1,3 +1,7 @@ +## Datadog Agent version [7.57.1](https://github.com/DataDog/datadog-agent/blob/master/CHANGELOG.rst#7571) + +* There were no integration updates for this version of the Agent. + ## Datadog Agent version [7.57.0](https://github.com/DataDog/datadog-agent/blob/master/CHANGELOG.rst#7570) * Amazon Kafka [4.10.0](https://github.com/DataDog/integrations-core/blob/master/amazon_msk/CHANGELOG.md) diff --git a/AGENT_INTEGRATIONS.md b/AGENT_INTEGRATIONS.md index 5b810e1a04d87..3dfab715b2827 100644 --- a/AGENT_INTEGRATIONS.md +++ b/AGENT_INTEGRATIONS.md @@ -1,3 +1,204 @@ +## Datadog Agent version 7.57.1 + +* datadog-active-directory: 2.1.1 +* datadog-activemq-xml: 3.2.1 +* datadog-activemq: 3.1.0 +* datadog-aerospike: 2.2.2 +* datadog-airflow: 5.0.1 +* datadog-amazon-msk: 4.10.0 +* datadog-ambari: 4.2.1 +* datadog-apache: 4.5.1 +* datadog-arangodb: 2.2.2 +* datadog-argo-rollouts: 1.0.2 +* datadog-argo-workflows: 1.0.2 +* datadog-argocd: 2.4.3 +* datadog-aspdotnet: 2.1.1 +* datadog-avi-vantage: 4.2.2 +* datadog-aws-neuron: 1.0.0 +* datadog-azure-iot-edge: 4.2.1 +* datadog-boundary: 2.2.3 +* datadog-btrfs: 2.3.0 +* datadog-cacti: 2.1.1 +* datadog-calico: 2.2.2 +* datadog-cassandra-nodetool: 1.13.0 +* datadog-cassandra: 1.18.0 +* datadog-ceph: 2.10.0 +* datadog-cert-manager: 4.1.2 +* datadog-checkpoint-quantum-firewall: 1.0.0 +* datadog-checks-base: 36.13.0 +* datadog-checks-dependency-provider: 1.4.0 +* datadog-checks-downloader: 4.7.0 +* datadog-cilium: 3.5.1 +* datadog-cisco-aci: 2.10.2 +* datadog-cisco-secure-firewall: 1.0.0 +* datadog-citrix-hypervisor: 3.2.1 +* datadog-clickhouse: 3.6.0 +* datadog-cloud-foundry-api: 3.3.1 +* datadog-cloudera: 2.2.0 +* datadog-cockroachdb: 3.3.2 +* datadog-confluent-platform: 1.10.2 +* datadog-consul: 2.6.1 +* datadog-coredns: 3.2.3 +* datadog-couch: 6.2.1 +* datadog-couchbase: 3.2.1 +* datadog-crio: 2.6.1 +* datadog-datadog-cluster-agent: 3.2.0 +* datadog-dcgm: 2.3.2 +* datadog-directory: 2.1.1 +* datadog-disk: 5.3.0 +* datadog-dns-check: 3.3.0 +* datadog-dotnetclr: 2.1.1 +* datadog-druid: 2.5.1 +* datadog-ecs-fargate: 4.3.1 +* datadog-eks-fargate: 4.2.1 +* datadog-elastic: 6.3.1 +* datadog-envoy: 3.5.1 +* datadog-esxi: 1.2.0 +* datadog-etcd: 6.2.2 +* datadog-exchange-server: 2.1.1 +* datadog-external-dns: 3.2.1 +* datadog-flink: 1.5.0 +* datadog-fluentd: 3.2.1 +* datadog-fluxcd: 1.2.2 +* datadog-fly-io: 1.0.0 +* datadog-foundationdb: 1.4.0 +* datadog-gearmand: 3.1.0 +* datadog-gitlab-runner: 4.2.1 +* datadog-gitlab: 7.3.2 +* datadog-glusterfs: 1.7.0 +* datadog-go-expvar: 2.5.1 +* datadog-gunicorn: 2.7.1 +* datadog-haproxy: 5.2.2 +* datadog-harbor: 3.2.2 +* datadog-hazelcast: 3.2.1 +* datadog-hdfs-datanode: 4.2.1 +* datadog-hdfs-namenode: 4.2.1 +* datadog-hive: 1.10.0 +* datadog-hivemq: 1.8.0 +* datadog-http-check: 9.8.0 +* datadog-hudi: 2.3.0 +* datadog-hyperv: 1.11.1 +* datadog-ibm-ace: 2.2.2 +* datadog-ibm-db2: 2.2.0 +* datadog-ibm-i: 2.2.0 +* datadog-ibm-mq: 6.3.0 +* datadog-ibm-was: 3.3.2 +* datadog-ignite: 2.4.0 +* datadog-iis: 3.1.1 +* datadog-impala: 2.2.2 +* datadog-istio: 6.1.2 +* datadog-jboss-wildfly: 2.2.0 +* datadog-journald: 1.2.0 +* datadog-kafka-consumer: 4.6.0 +* datadog-kafka: 2.16.0 +* datadog-karpenter: 1.5.0 +* datadog-kong: 3.2.2 +* datadog-kube-apiserver-metrics: 4.3.1 +* datadog-kube-controller-manager: 5.1.1 +* datadog-kube-dns: 4.4.1 +* datadog-kube-metrics-server: 3.3.1 +* datadog-kube-proxy: 6.3.1 +* datadog-kube-scheduler: 4.10.1 +* datadog-kubelet: 7.13.2 +* datadog-kubernetes-cluster-autoscaler: 1.0.1 +* datadog-kubernetes-state: 8.1.1 +* datadog-kyototycoon: 2.5.1 +* datadog-kyverno: 1.0.2 +* datadog-lighttpd: 3.5.1 +* datadog-linkerd: 4.2.2 +* datadog-linux-proc-extras: 2.5.0 +* datadog-mapr: 1.11.0 +* datadog-mapreduce: 4.2.1 +* datadog-marathon: 2.3.1 +* datadog-marklogic: 4.2.1 +* datadog-mcache: 4.1.0 +* datadog-mesos-master: 3.3.1 +* datadog-mesos-slave: 3.3.1 +* datadog-mongo: 6.9.0 +* datadog-mysql: 12.7.0 +* datadog-nagios: 1.13.0 +* datadog-network: 3.3.0 +* datadog-nfsstat: 1.13.0 +* datadog-nginx-ingress-controller: 2.6.1 +* datadog-nginx: 6.3.1 +* datadog-nvidia-triton: 1.2.2 +* datadog-openldap: 1.12.0 +* datadog-openmetrics: 4.2.2 +* datadog-openstack-controller: 6.9.0 +* datadog-openstack: 2.0.0 +* datadog-oracle: 5.2.0 +* datadog-ossec-security: 1.0.0 +* datadog-palo-alto-panorama: 1.0.0 +* datadog-pan-firewall: 1.2.0 +* datadog-pdh-check: 2.1.0 +* datadog-pgbouncer: 6.2.0 +* datadog-php-fpm: 3.3.1 +* datadog-ping-federate: 1.0.0 +* datadog-postfix: 1.14.0 +* datadog-postgres: 19.1.0 +* datadog-powerdns-recursor: 2.5.1 +* datadog-presto: 2.8.0 +* datadog-process: 3.4.0 +* datadog-prometheus: 3.6.0 +* datadog-proxysql: 5.1.1 +* datadog-pulsar: 2.2.2 +* datadog-rabbitmq: 5.3.2 +* datadog-ray: 1.2.2 +* datadog-redisdb: 5.7.0 +* datadog-rethinkdb: 3.1.0 +* datadog-riak: 3.5.1 +* datadog-riakcs: 2.11.0 +* datadog-sap-hana: 3.3.0 +* datadog-scylla: 2.7.2 +* datadog-sidekiq: 1.4.0 +* datadog-silk: 2.2.1 +* datadog-singlestore: 2.2.1 +* datadog-snmp: 7.4.0 +* datadog-snowflake: 5.8.0 +* datadog-solr: 1.13.0 +* datadog-sonarqube: 3.2.2 +* datadog-spark: 4.3.1 +* datadog-sqlserver: 17.5.2 +* datadog-squid: 2.5.1 +* datadog-ssh-check: 2.9.0 +* datadog-statsd: 1.12.0 +* datadog-strimzi: 2.2.2 +* datadog-supervisord: 2.6.0 +* datadog-suricata: 1.0.0 +* datadog-system-core: 2.4.0 +* datadog-system-swap: 1.18.0 +* datadog-tcp-check: 4.9.0 +* datadog-teamcity: 4.3.1 +* datadog-tekton: 1.0.2 +* datadog-teleport: 1.0.0 +* datadog-temporal: 2.3.0 +* datadog-tenable: 1.5.0 +* datadog-teradata: 2.2.1 +* datadog-tibco-ems: 1.0.0 +* datadog-tls: 2.19.0 +* datadog-tokumx: 3.2.0 +* datadog-tomcat: 2.0.0 +* datadog-torchserve: 2.2.2 +* datadog-traefik-mesh: 1.0.1 +* datadog-traffic-server: 2.2.1 +* datadog-twemproxy: 1.15.0 +* datadog-twistlock: 3.6.1 +* datadog-varnish: 2.1.0 +* datadog-vault: 4.2.1 +* datadog-vertica: 4.6.0 +* datadog-vllm: 1.0.0 +* datadog-voltdb: 3.2.1 +* datadog-vsphere: 7.6.0 +* datadog-weaviate: 2.3.3 +* datadog-weblogic: 1.3.0 +* datadog-win32-event-log: 3.3.0 +* datadog-windows-performance-counters: 2.1.1 +* datadog-windows-service: 4.9.1 +* datadog-wmi-check: 1.18.0 +* datadog-yarn: 5.3.1 +* datadog-zeek: 1.0.0 +* datadog-zk: 4.5.0 + ## Datadog Agent version 7.57.0 * datadog-active-directory: 2.1.1 From d74438853551ceb4c20bc55517ea3142a152788b Mon Sep 17 00:00:00 2001 From: Zhengda Lu Date: Wed, 18 Sep 2024 10:04:57 -0400 Subject: [PATCH 19/23] add comments to metrics to indicate which collector it comes from (#18611) --- mongo/datadog_checks/mongo/metrics.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/mongo/datadog_checks/mongo/metrics.py b/mongo/datadog_checks/mongo/metrics.py index 146cd07263387..f60a532481141 100644 --- a/mongo/datadog_checks/mongo/metrics.py +++ b/mongo/datadog_checks/mongo/metrics.py @@ -10,6 +10,7 @@ Core metrics collected by default. """ BASE_METRICS = { + # server_status collector "asserts.msg": RATE, "asserts.regular": RATE, "asserts.rollovers": RATE, @@ -125,14 +126,17 @@ "opcountersRepl.insert": RATE, "opcountersRepl.query": RATE, "opcountersRepl.update": RATE, + # replication_info collector "oplog.logSizeMB": GAUGE, "oplog.usedSizeMB": GAUGE, "oplog.timeDiff": GAUGE, + # replica collector "replSet.health": GAUGE, "replSet.replicationLag": GAUGE, "replSet.state": GAUGE, "replSet.votes": GAUGE, "replSet.voteFraction": GAUGE, + # db_stats collector "stats.avgObjSize": GAUGE, "stats.collections": GAUGE, "stats.dataSize": GAUGE, @@ -150,8 +154,10 @@ "stats.fsUsedSize": GAUGE, "stats.fsTotalSize": GAUGE, "stats.views": GAUGE, + # session_stats collector "sessions.count": GAUGE, "uptime": GAUGE, + # host_info collector "system.memSizeMB": (GAUGE, "system.mem.total"), # total amount of system memory "system.memLimitMB": (GAUGE, "system.mem.limit"), # memory usage limit "system.numCores": (GAUGE, "system.cpu.cores"), # number of CPU cores @@ -186,7 +192,8 @@ https://docs.mongodb.org/manual/reference/command/serverStatus/#serverStatus.metrics.commands """ COMMANDS_METRICS = { - # Required version > + # Required version > 3.0.0 + # server_status collector "metrics.commands.count.failed": RATE, "metrics.commands.count.total": GAUGE, "metrics.commands.createIndexes.failed": RATE, @@ -210,6 +217,7 @@ https://docs.mongodb.org/manual/reference/command/serverStatus/#server-status-locks """ LOCKS_METRICS = { + # server_status collector "locks.Collection.acquireCount.R": RATE, "locks.Collection.acquireCount.r": RATE, "locks.Collection.acquireCount.W": RATE, @@ -262,6 +270,7 @@ TCMalloc memory allocator report. """ TCMALLOC_METRICS = { + # server_status collector "tcmalloc.generic.current_allocated_bytes": GAUGE, "tcmalloc.generic.heap_size": GAUGE, "tcmalloc.tcmalloc.aggressive_memory_decommit": GAUGE, @@ -279,6 +288,7 @@ WiredTiger storage engine. """ WIREDTIGER_METRICS = { + # server_status collector "wiredTiger.cache.bytes currently in the cache": (GAUGE, "wiredTiger.cache.bytes_currently_in_cache"), "wiredTiger.cache.bytes read into cache": GAUGE, "wiredTiger.cache.bytes written from cache": GAUGE, @@ -315,6 +325,7 @@ https://docs.mongodb.org/v3.0/reference/command/top/ """ TOP_METRICS = { + # top collector "commands.count": RATE, "commands.time": GAUGE, "getmore.count": RATE, @@ -337,6 +348,7 @@ COLLECTION_METRICS = { # collection storage stats + # coll_stats collector 'collection.size': GAUGE, 'collection.avgObjSize': GAUGE, 'collection.count': GAUGE, @@ -365,6 +377,7 @@ } SHARDED_DATA_DISTRIBUTION_METRICS = { + # sharded_data_distribution collector 'numOrphanedDocs': (GAUGE, 'sharded_data_distribution.num_orphaned_docs'), 'numOwnedDocuments': (GAUGE, 'sharded_data_distribution.num_owned_documents'), 'ownedSizeBytes': (GAUGE, 'sharded_data_distribution.owned_size_bytes'), @@ -372,6 +385,7 @@ } INDEX_METRICS = { + # index stats collector 'indexes.accesses.ops': RATE, } From 3765e710e0d83088b3a3167037b9b0c9bdb71fc2 Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Wed, 18 Sep 2024 16:56:13 +0200 Subject: [PATCH 20/23] Remove six library from some integrations (#18604) * Remove six library from some integrations * fix mapreduce --- datadog_checks_dev/datadog_checks/dev/_env.py | 8 +- disk/datadog_checks/disk/disk.py | 9 +- .../datadog_checks/ecs_fargate/ecs_fargate.py | 13 +- .../kube_apiserver_metrics.py | 4 +- .../datadog_checks/mapreduce/mapreduce.py | 16 +- mcache/datadog_checks/mcache/mcache.py | 7 +- .../mesos_master/mesos_master.py | 10 +- mongo/datadog_checks/mongo/collectors/base.py | 13 +- .../mongo/collectors/coll_stats.py | 3 +- mongo/datadog_checks/mongo/collectors/top.py | 5 +- .../datadog_checks/mysql/collection_utils.py | 6 +- mysql/datadog_checks/mysql/innodb_metrics.py | 178 +++++++++--------- mysql/datadog_checks/mysql/mysql.py | 27 ++- nginx/datadog_checks/nginx/nginx.py | 20 +- redisdb/datadog_checks/redisdb/redisdb.py | 16 +- riakcs/datadog_checks/riakcs/riakcs.py | 7 +- sap_hana/datadog_checks/sap_hana/sap_hana.py | 4 +- spark/datadog_checks/spark/constants.py | 8 +- squid/datadog_checks/squid/squid.py | 4 +- .../datadog_checks/system_core/system_core.py | 5 +- .../datadog_checks/twemproxy/twemproxy.py | 5 +- .../datadog_checks/twistlock/twistlock.py | 3 +- varnish/datadog_checks/varnish/varnish.py | 22 +-- vsphere/datadog_checks/vsphere/api_rest.py | 3 +- vsphere/datadog_checks/vsphere/config.py | 9 +- .../vsphere/legacy/mor_cache.py | 8 +- vsphere/datadog_checks/vsphere/utils.py | 3 +- vsphere/datadog_checks/vsphere/vsphere.py | 9 +- vsphere/tests/legacy/utils.py | 3 +- yarn/datadog_checks/yarn/yarn.py | 12 +- zk/datadog_checks/zk/zk.py | 21 +-- 31 files changed, 198 insertions(+), 263 deletions(-) diff --git a/datadog_checks_dev/datadog_checks/dev/_env.py b/datadog_checks_dev/datadog_checks/dev/_env.py index 35b761a785d05..84f96bcab491a 100644 --- a/datadog_checks_dev/datadog_checks/dev/_env.py +++ b/datadog_checks_dev/datadog_checks/dev/_env.py @@ -5,8 +5,6 @@ import os from base64 import urlsafe_b64decode, urlsafe_b64encode -from six import iteritems - DDTRACE_OPTIONS_LIST = [ 'DD_TAGS', 'DD_TRACE*', @@ -56,7 +54,7 @@ def e2e_testing(): def set_env_vars(env_vars): - for key, value in iteritems(env_vars): + for key, value in env_vars.items(): key = '{}{}'.format(E2E_ENV_VAR_PREFIX, key) os.environ[key] = value @@ -68,11 +66,11 @@ def remove_env_vars(env_vars): def get_env_vars(raw=False): if raw: - return {key: value for key, value in iteritems(os.environ) if key.startswith(E2E_ENV_VAR_PREFIX)} + return {key: value for key, value in os.environ.items() if key.startswith(E2E_ENV_VAR_PREFIX)} else: env_vars = {} - for key, value in iteritems(os.environ): + for key, value in os.environ.items(): _, found, ev = key.partition(E2E_ENV_VAR_PREFIX) if found: # Normalize casing for Windows diff --git a/disk/datadog_checks/disk/disk.py b/disk/datadog_checks/disk/disk.py index 5999e32c9dc33..e4574f89b248e 100644 --- a/disk/datadog_checks/disk/disk.py +++ b/disk/datadog_checks/disk/disk.py @@ -9,7 +9,6 @@ from xml.etree import ElementTree as ET import psutil -from six import iteritems, string_types from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.base.utils.platform import Platform @@ -151,7 +150,7 @@ def check(self, _): self.log.debug('Passed: %s', part.device) tags = self._get_tags(part) - for metric_name, metric_value in iteritems(self._collect_part_metrics(part, disk_usage)): + for metric_name, metric_value in self._collect_part_metrics(part, disk_usage).items(): self.gauge(metric_name, metric_value, tags=tags) # Add in a disk read write or read only check @@ -324,7 +323,7 @@ def _collect_inodes_metrics(self, mountpoint): return metrics def collect_latency_metrics(self): - for disk_name, disk in iteritems(psutil.disk_io_counters(perdisk=True)): + for disk_name, disk in psutil.disk_io_counters(perdisk=True).items(): self.log.debug('IO Counters: %s -> %s', disk_name, disk) try: metric_tags = [] if self._custom_tags is None else self._custom_tags[:] @@ -389,7 +388,7 @@ def _compile_pattern_filters(self, instance): def _compile_valid_patterns(self, patterns, casing=IGNORE_CASE, extra_patterns=None): valid_patterns = [] - if isinstance(patterns, string_types): + if isinstance(patterns, str): patterns = [patterns] else: patterns = list(patterns) @@ -419,7 +418,7 @@ def _compile_tag_re(self): Compile regex strings from device_tag_re option and return list of compiled regex/tag pairs """ device_tag_list = [] - for regex_str, tags in iteritems(self._device_tag_re): + for regex_str, tags in self._device_tag_re.items(): try: device_tag_list.append([re.compile(regex_str, IGNORE_CASE), [t.strip() for t in tags.split(',')]]) except TypeError: diff --git a/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py b/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py index 9d55916a926cc..4ae362cc879b8 100644 --- a/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py +++ b/ecs_fargate/datadog_checks/ecs_fargate/ecs_fargate.py @@ -7,7 +7,6 @@ import requests from dateutil import parser -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.utils.common import round_value @@ -192,7 +191,7 @@ def check(self, _): ## Ephemeral Storage Metrics if 'EphemeralStorageMetrics' in metadata: es_metrics = metadata['EphemeralStorageMetrics'] - for field_name, metric_value in iteritems(es_metrics): + for field_name, metric_value in es_metrics.items(): metric_name = EPHEMERAL_STORAGE_GAUGE_METRICS.get(field_name) self.gauge(metric_name, metric_value, task_tags) @@ -229,7 +228,7 @@ def check(self, _): self.service_check('fargate_check', AgentCheck.WARNING, message=msg, tags=custom_tags) self.log.warning(msg, exc_info=True) - for container_id, container_stats in iteritems(stats): + for container_id, container_stats in stats.items(): if container_id not in exlcuded_cid: self.submit_perf_metrics(container_tags, container_id, container_stats) @@ -337,7 +336,7 @@ def submit_perf_metrics(self, container_tags, container_id, container_stats): self.gauge('ecs.fargate.mem.limit', value, tags) # I/O metrics - for blkio_cat, metric_name in iteritems(IO_METRICS): + for blkio_cat, metric_name in IO_METRICS.items(): read_counter = write_counter = 0 blkio_stats = container_stats.get("blkio_stats", {}).get(blkio_cat) @@ -363,13 +362,13 @@ def submit_perf_metrics(self, container_tags, container_id, container_stats): # Network metrics networks = container_stats.get('networks', {}) - for network_interface, network_stats in iteritems(networks): + for network_interface, network_stats in networks.items(): network_tags = tags + ["interface:{}".format(network_interface)] - for field_name, metric_name in iteritems(NETWORK_GAUGE_METRICS): + for field_name, metric_name in NETWORK_GAUGE_METRICS.items(): metric_value = network_stats.get(field_name) if metric_value is not None: self.gauge(metric_name, metric_value, network_tags) - for field_name, metric_name in iteritems(NETWORK_RATE_METRICS): + for field_name, metric_name in NETWORK_RATE_METRICS.items(): metric_value = network_stats.get(field_name) if metric_value is not None: self.rate(metric_name, metric_value, network_tags) diff --git a/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py b/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py index e8287b258a261..d07d8b168d411 100644 --- a/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py +++ b/kube_apiserver_metrics/datadog_checks/kube_apiserver_metrics/kube_apiserver_metrics.py @@ -4,8 +4,6 @@ from copy import deepcopy from re import match, search, sub -from six import iteritems - from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.base.errors import CheckException @@ -200,7 +198,7 @@ def submit_metric(self, metric_suffix, metric, scraper_config, gauge=True, monot # Explicit shallow copy of the instance tags _tags = list(scraper_config['custom_tags']) - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): _tags.append('{}:{}'.format(label_name, label_value)) if gauge: # submit raw metric diff --git a/mapreduce/datadog_checks/mapreduce/mapreduce.py b/mapreduce/datadog_checks/mapreduce/mapreduce.py index 893a6f3c63d48..31b850abaad47 100644 --- a/mapreduce/datadog_checks/mapreduce/mapreduce.py +++ b/mapreduce/datadog_checks/mapreduce/mapreduce.py @@ -1,12 +1,10 @@ # (C) Datadog, Inc. 2010-present # All rights reserved # Licensed under Simplified BSD License (see LICENSE) - +from urllib.parse import urljoin, urlsplit, urlunsplit from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from simplejson import JSONDecodeError -from six import iteritems, itervalues -from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.mapreduce.metrics import ( @@ -93,7 +91,7 @@ def check(self, instance): # Report success after gathering all metrics from Application Master if running_jobs: - job_id, metrics = next(iteritems(running_jobs)) + job_id, metrics = next(iter(running_jobs.items())) am_address = self._get_url_base(metrics['tracking_url']) self.service_check( @@ -249,7 +247,7 @@ def _mapreduce_job_metrics(self, running_apps, addl_tags): """ running_jobs = {} - for app_name, tracking_url in itervalues(running_apps): + for app_name, tracking_url in running_apps.values(): metrics_json = self._rest_request_to_json( tracking_url, self.MAPREDUCE_JOBS_PATH, self.MAPREDUCE_SERVICE_CHECK @@ -289,7 +287,7 @@ def _mapreduce_job_counters_metrics(self, running_jobs, addl_tags): """ Get custom metrics specified for each counter """ - for job_metrics in itervalues(running_jobs): + for job_metrics in running_jobs.values(): job_name = job_metrics['job_name'] # Check if the job_name exist in the custom metrics @@ -344,7 +342,7 @@ def _mapreduce_task_metrics(self, running_jobs, addl_tags): Get metrics for each MapReduce task Return a dictionary of {task_id: 'tracking_url'} for each MapReduce task """ - for job_stats in itervalues(running_jobs): + for job_stats in running_jobs.values(): metrics_json = self._rest_request_to_json( job_stats['tracking_url'], 'tasks', self.MAPREDUCE_SERVICE_CHECK, tags=addl_tags @@ -376,7 +374,7 @@ def _set_metrics_from_json(self, metrics_json, metrics, tags): """ Parse the JSON response and set the metrics """ - for status, (metric_name, metric_type) in iteritems(metrics): + for status, (metric_name, metric_type) in metrics.items(): metric_status = metrics_json.get(status) if metric_status is not None: @@ -415,7 +413,7 @@ def _rest_request_to_json(self, address, object_path, service_name=None, tags=No # Add kwargs as arguments if kwargs: - query = '&'.join(['{}={}'.format(key, value) for key, value in iteritems(kwargs)]) + query = '&'.join(['{}={}'.format(key, value) for key, value in kwargs.items()]) url = urljoin(url, '?' + query) try: diff --git a/mcache/datadog_checks/mcache/mcache.py b/mcache/datadog_checks/mcache/mcache.py index 92d42af29c083..07e945c7ea161 100644 --- a/mcache/datadog_checks/mcache/mcache.py +++ b/mcache/datadog_checks/mcache/mcache.py @@ -4,7 +4,6 @@ from __future__ import division import bmemcached -from six import iteritems, itervalues from datadog_checks.base import AgentCheck, ConfigurationError @@ -111,7 +110,7 @@ def _process_response(self, response): if len(response) != 1: raise BadResponseError("Malformed response: {}".format(response)) - stats = list(itervalues(response))[0] + stats = list(response.values())[0] if not len(stats): raise BadResponseError("Malformed response for host: {}".format(stats)) @@ -177,7 +176,7 @@ def _get_metrics(self, client, tags, service_check_tags=None): raise def _get_optional_metrics(self, client, tags, options=None): - for arg, metrics_args in iteritems(self.OPTIONAL_STATS): + for arg, metrics_args in self.OPTIONAL_STATS.items(): if not options or options.get(arg, False): try: optional_rates = metrics_args[0] @@ -187,7 +186,7 @@ def _get_optional_metrics(self, client, tags, options=None): stats = self._process_response(client.stats(arg)) prefix = "memcache.{}".format(arg) - for metric, val in iteritems(stats): + for metric, val in stats.items(): # Check if metric is a gauge or rate metric_tags = [] if optional_fn: diff --git a/mesos_master/datadog_checks/mesos_master/mesos_master.py b/mesos_master/datadog_checks/mesos_master/mesos_master.py index e82cc2fb33c95..aaa2adc6fa489 100644 --- a/mesos_master/datadog_checks/mesos_master/mesos_master.py +++ b/mesos_master/datadog_checks/mesos_master/mesos_master.py @@ -6,9 +6,9 @@ Collects metrics from mesos master node, only the leader is sending metrics. """ +from urllib.parse import urlparse + import requests -from six import iteritems -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException @@ -305,7 +305,7 @@ def check(self, instance): framework_tags = ['framework_name:' + framework['name']] + tags self.GAUGE('mesos.framework.total_tasks', len(framework['tasks']), tags=framework_tags) resources = framework['used_resources'] - for key_name, (metric_name, metric_func) in iteritems(self.FRAMEWORK_METRICS): + for key_name, (metric_name, metric_func) in self.FRAMEWORK_METRICS.items(): metric_func(self, metric_name, resources[key_name], tags=framework_tags) role_metrics = self._get_master_roles(url, instance_tags) @@ -314,7 +314,7 @@ def check(self, instance): role_tags = ['mesos_role:' + role['name']] + tags self.GAUGE('mesos.role.frameworks.count', len(role['frameworks']), tags=role_tags) self.GAUGE('mesos.role.weight', role['weight'], tags=role_tags) - for key_name, (metric_name, metric_func) in iteritems(self.ROLE_RESOURCES_METRICS): + for key_name, (metric_name, metric_func) in self.ROLE_RESOURCES_METRICS.items(): try: metric_func(self, metric_name, role['resources'][key_name], tags=role_tags) except KeyError: @@ -335,7 +335,7 @@ def check(self, instance): self.STATS_METRICS, ] for m in metrics: - for key_name, (metric_name, metric_func) in iteritems(m): + for key_name, (metric_name, metric_func) in m.items(): if key_name in stats_metrics: metric_func(self, metric_name, stats_metrics[key_name], tags=tags) diff --git a/mongo/datadog_checks/mongo/collectors/base.py b/mongo/datadog_checks/mongo/collectors/base.py index 68d884ea1a81d..be88dfcf179af 100644 --- a/mongo/datadog_checks/mongo/collectors/base.py +++ b/mongo/datadog_checks/mongo/collectors/base.py @@ -4,14 +4,9 @@ import re -from six import PY3, iteritems - from datadog_checks.base import AgentCheck from datadog_checks.mongo.metrics import CASE_SENSITIVE_METRIC_NAME_SUFFIXES -if PY3: - long = int - class MongoCollector(object): """The base collector object, can be considered abstract. @@ -46,7 +41,7 @@ def _normalize(self, metric_name, submit_method, prefix=None): metric_suffix = "ps" if submit_method == AgentCheck.rate else "" # Replace case-sensitive metric name characters - for pattern, repl in iteritems(CASE_SENSITIVE_METRIC_NAME_SUFFIXES): + for pattern, repl in CASE_SENSITIVE_METRIC_NAME_SUFFIXES.items(): metric_name = re.compile(pattern).sub(repl, metric_name) # Normalize, and wrap @@ -93,11 +88,9 @@ def _submit_payload(self, payload, additional_tags=None, metrics_to_collect=None continue # value is now status[x][y][z] - if not isinstance(value, (int, long, float)): + if not isinstance(value, (int, float)): raise TypeError( - u"{0} value is a {1}, it should be an int, a float or a long instead.".format( - metric_name, type(value) - ) + u"{0} value is a {1}, it should be an int, or a float instead.".format(metric_name, type(value)) ) # Submit the metric diff --git a/mongo/datadog_checks/mongo/collectors/coll_stats.py b/mongo/datadog_checks/mongo/collectors/coll_stats.py index 68778a5546e16..02c8809641a24 100644 --- a/mongo/datadog_checks/mongo/collectors/coll_stats.py +++ b/mongo/datadog_checks/mongo/collectors/coll_stats.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from pymongo.errors import OperationFailure -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.mongo.collectors.base import MongoCollector @@ -96,7 +95,7 @@ def collect(self, api): # Submit the indexSizes metrics manually if index_sizes: metric_name_alias = self._normalize("collection.indexSizes", AgentCheck.gauge) - for idx, val in iteritems(index_sizes): + for idx, val in index_sizes.items(): # we tag the index idx_tags = self.base_tags + additional_tags + ["index:%s" % idx] self.gauge(metric_name_alias, val, tags=idx_tags) diff --git a/mongo/datadog_checks/mongo/collectors/top.py b/mongo/datadog_checks/mongo/collectors/top.py index fe51fd92ae8ad..9c7b186661c7d 100644 --- a/mongo/datadog_checks/mongo/collectors/top.py +++ b/mongo/datadog_checks/mongo/collectors/top.py @@ -1,9 +1,6 @@ # (C) Datadog, Inc. 2020-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) - -from six import iteritems - from datadog_checks.mongo.collectors.base import MongoCollector from datadog_checks.mongo.common import MongosDeployment, ReplicaSetDeployment from datadog_checks.mongo.metrics import TOP_METRICS @@ -26,7 +23,7 @@ def compatible_with(self, deployment): def collect(self, api): dbtop = api["admin"].command('top') - for ns, ns_metrics in iteritems(dbtop['totals']): + for ns, ns_metrics in dbtop['totals'].items(): if "." not in ns: continue diff --git a/mysql/datadog_checks/mysql/collection_utils.py b/mysql/datadog_checks/mysql/collection_utils.py index f87c952227385..0846401c70038 100644 --- a/mysql/datadog_checks/mysql/collection_utils.py +++ b/mysql/datadog_checks/mysql/collection_utils.py @@ -3,8 +3,6 @@ # Licensed under Simplified BSD License (see LICENSE) import logging -from six import iteritems, text_type - log = logging.getLogger(__name__) @@ -12,7 +10,7 @@ def collect_all_scalars(key, dictionary): if key not in dictionary or dictionary[key] is None: yield None, None elif isinstance(dictionary[key], dict): - for tag, _ in iteritems(dictionary[key]): + for tag, _ in dictionary[key].items(): yield tag, collect_type(tag, dictionary[key], float) else: yield None, collect_type(key, dictionary, float) @@ -23,7 +21,7 @@ def collect_scalar(key, mapping): def collect_string(key, mapping): - return collect_type(key, mapping, text_type) + return collect_type(key, mapping, str) def collect_type(key, mapping, the_type): diff --git a/mysql/datadog_checks/mysql/innodb_metrics.py b/mysql/datadog_checks/mysql/innodb_metrics.py index cd4debe4d8448..b33bcc3868ae1 100644 --- a/mysql/datadog_checks/mysql/innodb_metrics.py +++ b/mysql/datadog_checks/mysql/innodb_metrics.py @@ -6,7 +6,6 @@ from contextlib import closing import pymysql -from six import PY3, iteritems from datadog_checks.base import is_affirmative from datadog_checks.base.log import get_check_logger @@ -15,9 +14,6 @@ from .collection_utils import collect_scalar from .const import OPTIONAL_INNODB_VARS -if PY3: - long = int - def _are_values_numeric(array): return all(v.isdigit() for v in array) @@ -79,38 +75,38 @@ def get_stats_from_innodb_status(self, db): row = [item.strip(']') for item in row] if line.startswith('---BUFFER POOL'): - buffer_id = long(row[2]) + buffer_id = int(row[2]) # SEMAPHORES if line.find('Mutex spin waits') == 0: # Mutex spin waits 79626940, rounds 157459864, OS waits 698719 # Mutex spin waits 0, rounds 247280272495, OS waits 316513438 - results['Innodb_mutex_spin_waits'] = long(row[3]) - results['Innodb_mutex_spin_rounds'] = long(row[5]) - results['Innodb_mutex_os_waits'] = long(row[8]) + results['Innodb_mutex_spin_waits'] = int(row[3]) + results['Innodb_mutex_spin_rounds'] = int(row[5]) + results['Innodb_mutex_os_waits'] = int(row[8]) elif line.find('RW-shared spins') == 0 and line.find(';') > 0: # RW-shared spins 3859028, OS waits 2100750; RW-excl spins # 4641946, OS waits 1530310 - results['Innodb_s_lock_spin_waits'] = long(row[2]) - results['Innodb_x_lock_spin_waits'] = long(row[8]) - results['Innodb_s_lock_os_waits'] = long(row[5]) - results['Innodb_x_lock_os_waits'] = long(row[11]) + results['Innodb_s_lock_spin_waits'] = int(row[2]) + results['Innodb_x_lock_spin_waits'] = int(row[8]) + results['Innodb_s_lock_os_waits'] = int(row[5]) + results['Innodb_x_lock_os_waits'] = int(row[11]) elif line.find('RW-shared spins') == 0 and line.find('; RW-excl spins') == -1: # Post 5.5.17 SHOW ENGINE INNODB STATUS syntax # RW-shared spins 604733, rounds 8107431, OS waits 241268 - results['Innodb_s_lock_spin_waits'] = long(row[2]) - results['Innodb_s_lock_spin_rounds'] = long(row[4]) - results['Innodb_s_lock_os_waits'] = long(row[7]) + results['Innodb_s_lock_spin_waits'] = int(row[2]) + results['Innodb_s_lock_spin_rounds'] = int(row[4]) + results['Innodb_s_lock_os_waits'] = int(row[7]) elif line.find('RW-excl spins') == 0: # Post 5.5.17 SHOW ENGINE INNODB STATUS syntax # RW-excl spins 604733, rounds 8107431, OS waits 241268 - results['Innodb_x_lock_spin_waits'] = long(row[2]) - results['Innodb_x_lock_spin_rounds'] = long(row[4]) - results['Innodb_x_lock_os_waits'] = long(row[7]) + results['Innodb_x_lock_spin_waits'] = int(row[2]) + results['Innodb_x_lock_spin_rounds'] = int(row[4]) + results['Innodb_x_lock_os_waits'] = int(row[7]) elif line.find('seconds the semaphore:') > 0: # --Thread 907205 has waited at handler/ha_innodb.cc line 7156 for 1.00 seconds the semaphore: results['Innodb_semaphore_waits'] += 1 - results['Innodb_semaphore_wait_time'] += long(float(row[9])) * 1000 + results['Innodb_semaphore_wait_time'] += int(float(row[9])) * 1000 # TRANSACTIONS elif line.find('Trx id counter') == 0: @@ -121,7 +117,7 @@ def get_stats_from_innodb_status(self, db): txn_seen = True elif line.find('History list length') == 0: # History list length 132 - results['Innodb_history_list_length'] = long(row[3]) + results['Innodb_history_list_length'] = int(row[3]) elif txn_seen and line.find('---TRANSACTION') == 0: # ---TRANSACTION 0, not started, process no 13510, OS thread id 1170446656 results['Innodb_current_transactions'] += 1 @@ -129,76 +125,76 @@ def get_stats_from_innodb_status(self, db): results['Innodb_active_transactions'] += 1 elif line.find('read views open inside InnoDB') > 0: # 1 read views open inside InnoDB - results['Innodb_read_views'] = long(row[0]) + results['Innodb_read_views'] = int(row[0]) elif line.find('mysql tables in use') == 0: # mysql tables in use 2, locked 2 - results['Innodb_tables_in_use'] += long(row[4]) - results['Innodb_locked_tables'] += long(row[6]) + results['Innodb_tables_in_use'] += int(row[4]) + results['Innodb_locked_tables'] += int(row[6]) elif txn_seen and line.find('lock struct(s)') > 0: # 23 lock struct(s), heap size 3024, undo log entries 27 # LOCK WAIT 12 lock struct(s), heap size 3024, undo log entries 5 # LOCK WAIT 2 lock struct(s), heap size 368 if line.find('LOCK WAIT') == 0: - results['Innodb_lock_structs'] += long(row[2]) + results['Innodb_lock_structs'] += int(row[2]) results['Innodb_locked_transactions'] += 1 elif line.find('ROLLING BACK') == 0: # ROLLING BACK 127539 lock struct(s), heap size 15201832, # 4411492 row lock(s), undo log entries 1042488 - results['Innodb_lock_structs'] += long(row[2]) + results['Innodb_lock_structs'] += int(row[2]) else: - results['Innodb_lock_structs'] += long(row[0]) + results['Innodb_lock_structs'] += int(row[0]) # FILE I/O elif line.find(' OS file reads, ') > 0: # 8782182 OS file reads, 15635445 OS file writes, 947800 OS # fsyncs - results['Innodb_os_file_reads'] = long(row[0]) - results['Innodb_os_file_writes'] = long(row[4]) - results['Innodb_os_file_fsyncs'] = long(row[8]) + results['Innodb_os_file_reads'] = int(row[0]) + results['Innodb_os_file_writes'] = int(row[4]) + results['Innodb_os_file_fsyncs'] = int(row[8]) elif line.find('Pending normal aio reads:') == 0: try: if len(row) == 8: # (len(row) == 8) Pending normal aio reads: 0, aio writes: 0, - results['Innodb_pending_normal_aio_reads'] = long(row[4]) - results['Innodb_pending_normal_aio_writes'] = long(row[7]) + results['Innodb_pending_normal_aio_reads'] = int(row[4]) + results['Innodb_pending_normal_aio_writes'] = int(row[7]) elif len(row) == 14: # (len(row) == 14) Pending normal aio reads: 0 [0, 0] , aio writes: 0 [0, 0] , - results['Innodb_pending_normal_aio_reads'] = long(row[4]) - results['Innodb_pending_normal_aio_writes'] = long(row[10]) + results['Innodb_pending_normal_aio_reads'] = int(row[4]) + results['Innodb_pending_normal_aio_writes'] = int(row[10]) elif len(row) == 16: # (len(row) == 16) Pending normal aio reads: [0, 0, 0, 0] , aio writes: [0, 0, 0, 0] , if _are_values_numeric(row[4:8]) and _are_values_numeric(row[11:15]): results['Innodb_pending_normal_aio_reads'] = ( - long(row[4]) + long(row[5]) + long(row[6]) + long(row[7]) + int(row[4]) + int(row[5]) + int(row[6]) + int(row[7]) ) results['Innodb_pending_normal_aio_writes'] = ( - long(row[11]) + long(row[12]) + long(row[13]) + long(row[14]) + int(row[11]) + int(row[12]) + int(row[13]) + int(row[14]) ) # (len(row) == 16) Pending normal aio reads: 0 [0, 0, 0, 0] , aio writes: 0 [0, 0] , elif _are_values_numeric(row[4:9]) and _are_values_numeric(row[12:15]): - results['Innodb_pending_normal_aio_reads'] = long(row[4]) - results['Innodb_pending_normal_aio_writes'] = long(row[12]) + results['Innodb_pending_normal_aio_reads'] = int(row[4]) + results['Innodb_pending_normal_aio_writes'] = int(row[12]) else: self.log.warning("Can't parse result line %s", line) elif len(row) == 18: # (len(row) == 18) Pending normal aio reads: 0 [0, 0, 0, 0] , aio writes: 0 [0, 0, 0, 0] , - results['Innodb_pending_normal_aio_reads'] = long(row[4]) - results['Innodb_pending_normal_aio_writes'] = long(row[12]) + results['Innodb_pending_normal_aio_reads'] = int(row[4]) + results['Innodb_pending_normal_aio_writes'] = int(row[12]) elif len(row) == 22: # (len(row) == 22) # Pending normal aio reads: 0 [0, 0, 0, 0, 0, 0, 0, 0] , aio writes: 0 [0, 0, 0, 0] , - results['Innodb_pending_normal_aio_reads'] = long(row[4]) - results['Innodb_pending_normal_aio_writes'] = long(row[16]) + results['Innodb_pending_normal_aio_reads'] = int(row[4]) + results['Innodb_pending_normal_aio_writes'] = int(row[16]) except ValueError as e: self.log.warning("Can't parse result line %s: %s", line, e) elif line.find('ibuf aio reads') == 0: # ibuf aio reads: 0, log i/o's: 0, sync i/o's: 0 # or ibuf aio reads:, log i/o's:, sync i/o's: if len(row) == 10: - results['Innodb_pending_ibuf_aio_reads'] = long(row[3]) - results['Innodb_pending_aio_log_ios'] = long(row[6]) - results['Innodb_pending_aio_sync_ios'] = long(row[9]) + results['Innodb_pending_ibuf_aio_reads'] = int(row[3]) + results['Innodb_pending_aio_log_ios'] = int(row[6]) + results['Innodb_pending_aio_sync_ios'] = int(row[9]) elif len(row) == 7: results['Innodb_pending_ibuf_aio_reads'] = 0 results['Innodb_pending_aio_log_ios'] = 0 @@ -206,11 +202,11 @@ def get_stats_from_innodb_status(self, db): elif line.find('Pending flushes (fsync)') == 0: if len(row) == 4: # Pending flushes (fsync): 0 - results['Innodb_pending_buffer_pool_flushes'] = long(row[3]) + results['Innodb_pending_buffer_pool_flushes'] = int(row[3]) else: # Pending flushes (fsync) log: 0; buffer pool: 0 - results['Innodb_pending_log_flushes'] = long(row[4]) - results['Innodb_pending_buffer_pool_flushes'] = long(row[7]) + results['Innodb_pending_log_flushes'] = int(row[4]) + results['Innodb_pending_buffer_pool_flushes'] = int(row[7]) # INSERT BUFFER AND ADAPTIVE HASH INDEX elif line.find('Ibuf for space 0: size ') == 0: @@ -218,24 +214,24 @@ def get_stats_from_innodb_status(self, db): # had two lines in the output. Newer has just one line, see below. # Ibuf for space 0: size 1, free list len 887, seg size 889, is not empty # Ibuf for space 0: size 1, free list len 887, seg size 889, - results['Innodb_ibuf_size'] = long(row[5]) - results['Innodb_ibuf_free_list'] = long(row[9]) - results['Innodb_ibuf_segment_size'] = long(row[12]) + results['Innodb_ibuf_size'] = int(row[5]) + results['Innodb_ibuf_free_list'] = int(row[9]) + results['Innodb_ibuf_segment_size'] = int(row[12]) elif line.find('Ibuf: size ') == 0: # Ibuf: size 1, free list len 4634, seg size 4636, - results['Innodb_ibuf_size'] = long(row[2]) - results['Innodb_ibuf_free_list'] = long(row[6]) - results['Innodb_ibuf_segment_size'] = long(row[9]) + results['Innodb_ibuf_size'] = int(row[2]) + results['Innodb_ibuf_free_list'] = int(row[6]) + results['Innodb_ibuf_segment_size'] = int(row[9]) if line.find('merges') > -1: - results['Innodb_ibuf_merges'] = long(row[10]) + results['Innodb_ibuf_merges'] = int(row[10]) elif line.find(', delete mark ') > 0 and prev_line.find('merged operations:') == 0: # Output of show engine innodb status has changed in 5.5 # merged operations: # insert 593983, delete mark 387006, delete 73092 - results['Innodb_ibuf_merged_inserts'] = long(row[1]) - results['Innodb_ibuf_merged_delete_marks'] = long(row[4]) - results['Innodb_ibuf_merged_deletes'] = long(row[6]) + results['Innodb_ibuf_merged_inserts'] = int(row[1]) + results['Innodb_ibuf_merged_delete_marks'] = int(row[4]) + results['Innodb_ibuf_merged_deletes'] = int(row[6]) results['Innodb_ibuf_merged'] = ( results['Innodb_ibuf_merged_inserts'] + results['Innodb_ibuf_merged_delete_marks'] @@ -243,85 +239,85 @@ def get_stats_from_innodb_status(self, db): ) elif line.find(' merged recs, ') > 0: # 19817685 inserts, 19817684 merged recs, 3552620 merges - results['Innodb_ibuf_merged_inserts'] = long(row[0]) - results['Innodb_ibuf_merged'] = long(row[2]) - results['Innodb_ibuf_merges'] = long(row[5]) + results['Innodb_ibuf_merged_inserts'] = int(row[0]) + results['Innodb_ibuf_merged'] = int(row[2]) + results['Innodb_ibuf_merges'] = int(row[5]) elif line.find('Hash table size ') == 0: # In some versions of InnoDB, the used cells is omitted. # Hash table size 4425293, used cells 4229064, .... # Hash table size 57374437, node heap has 72964 buffer(s) <-- # no used cells - results['Innodb_hash_index_cells_total'] = long(row[3]) - results['Innodb_hash_index_cells_used'] = long(row[6]) if line.find('used cells') > 0 else 0 + results['Innodb_hash_index_cells_total'] = int(row[3]) + results['Innodb_hash_index_cells_used'] = int(row[6]) if line.find('used cells') > 0 else 0 # LOG elif line.find(" log i/o's done, ") > 0: # 3430041 log i/o's done, 17.44 log i/o's/second # 520835887 log i/o's done, 17.28 log i/o's/second, 518724686 # syncs, 2980893 checkpoints - results['Innodb_log_writes'] = long(row[0]) + results['Innodb_log_writes'] = int(row[0]) elif line.find(" pending log writes, ") > 0: # 0 pending log writes, 0 pending chkp writes - results['Innodb_pending_log_writes'] = long(row[0]) - results['Innodb_pending_checkpoint_writes'] = long(row[4]) + results['Innodb_pending_log_writes'] = int(row[0]) + results['Innodb_pending_checkpoint_writes'] = int(row[4]) elif line.find("Log sequence number") == 0: # This number is NOT printed in hex in InnoDB plugin. # Log sequence number 272588624 - results['Innodb_lsn_current'] = long(row[3]) + results['Innodb_lsn_current'] = int(row[3]) elif line.find("Log flushed up to") == 0: # This number is NOT printed in hex in InnoDB plugin. # Log flushed up to 272588624 - results['Innodb_lsn_flushed'] = long(row[4]) + results['Innodb_lsn_flushed'] = int(row[4]) elif line.find("Last checkpoint at") == 0: # Last checkpoint at 272588624 - results['Innodb_lsn_last_checkpoint'] = long(row[3]) + results['Innodb_lsn_last_checkpoint'] = int(row[3]) # BUFFER POOL AND MEMORY elif line.find("Total memory allocated") == 0 and line.find("in additional pool allocated") > 0: # Total memory allocated 29642194944; in additional pool allocated 0 # Total memory allocated by read views 96 - results['Innodb_mem_total'] = long(row[3]) - results['Innodb_mem_additional_pool'] = long(row[8]) + results['Innodb_mem_total'] = int(row[3]) + results['Innodb_mem_additional_pool'] = int(row[8]) elif line.find('Adaptive hash index ') == 0: # Adaptive hash index 1538240664 (186998824 + 1351241840) - results['Innodb_mem_adaptive_hash'] = long(row[3]) + results['Innodb_mem_adaptive_hash'] = int(row[3]) elif line.find('Page hash ') == 0: # Page hash 11688584 - results['Innodb_mem_page_hash'] = long(row[2]) + results['Innodb_mem_page_hash'] = int(row[2]) elif line.find('Dictionary cache ') == 0: # Dictionary cache 145525560 (140250984 + 5274576) - results['Innodb_mem_dictionary'] = long(row[2]) + results['Innodb_mem_dictionary'] = int(row[2]) elif line.find('File system ') == 0: # File system 313848 (82672 + 231176) - results['Innodb_mem_file_system'] = long(row[2]) + results['Innodb_mem_file_system'] = int(row[2]) elif line.find('Lock system ') == 0: # Lock system 29232616 (29219368 + 13248) - results['Innodb_mem_lock_system'] = long(row[2]) + results['Innodb_mem_lock_system'] = int(row[2]) elif line.find('Recovery system ') == 0: # Recovery system 0 (0 + 0) - results['Innodb_mem_recovery_system'] = long(row[2]) + results['Innodb_mem_recovery_system'] = int(row[2]) elif line.find('Threads ') == 0: # Threads 409336 (406936 + 2400) - results['Innodb_mem_thread_hash'] = long(row[1]) + results['Innodb_mem_thread_hash'] = int(row[1]) elif line.find("Buffer pool size ") == 0: # The " " after size is necessary to avoid matching the wrong line: # Buffer pool size 1769471 # Buffer pool size, bytes 28991012864 if buffer_id == -1: - results['Innodb_buffer_pool_pages_total'] = long(row[3]) + results['Innodb_buffer_pool_pages_total'] = int(row[3]) elif line.find("Free buffers") == 0: # Free buffers 0 if buffer_id == -1: - results['Innodb_buffer_pool_pages_free'] = long(row[2]) + results['Innodb_buffer_pool_pages_free'] = int(row[2]) elif line.find("Database pages") == 0: # Database pages 1696503 if buffer_id == -1: - results['Innodb_buffer_pool_pages_data'] = long(row[2]) + results['Innodb_buffer_pool_pages_data'] = int(row[2]) elif line.find("Modified db pages") == 0: # Modified db pages 160602 if buffer_id == -1: - results['Innodb_buffer_pool_pages_dirty'] = long(row[3]) + results['Innodb_buffer_pool_pages_dirty'] = int(row[3]) elif line.find("Pages read ahead") == 0: # Must do this BEFORE the next test, otherwise it'll get fooled by this # line from the new plugin: @@ -330,22 +326,22 @@ def get_stats_from_innodb_status(self, db): elif line.find("Pages read") == 0: # Pages read 15240822, created 1770238, written 21705836 if buffer_id == -1: - results['Innodb_pages_read'] = long(row[2]) - results['Innodb_pages_created'] = long(row[4]) - results['Innodb_pages_written'] = long(row[6]) + results['Innodb_pages_read'] = int(row[2]) + results['Innodb_pages_created'] = int(row[4]) + results['Innodb_pages_written'] = int(row[6]) # ROW OPERATIONS elif line.find('Number of rows inserted') == 0: # Number of rows inserted 50678311, updated 66425915, deleted # 20605903, read 454561562 - results['Innodb_rows_inserted'] = long(row[4]) - results['Innodb_rows_updated'] = long(row[6]) - results['Innodb_rows_deleted'] = long(row[8]) - results['Innodb_rows_read'] = long(row[10]) + results['Innodb_rows_inserted'] = int(row[4]) + results['Innodb_rows_updated'] = int(row[6]) + results['Innodb_rows_deleted'] = int(row[8]) + results['Innodb_rows_read'] = int(row[10]) elif line.find(" queries inside InnoDB, ") > 0: # 0 queries inside InnoDB, 0 queries in queue - results['Innodb_queries_inside'] = long(row[0]) - results['Innodb_queries_queued'] = long(row[4]) + results['Innodb_queries_inside'] = int(row[0]) + results['Innodb_queries_queued'] = int(row[4]) prev_line = line @@ -357,7 +353,7 @@ def get_stats_from_innodb_status(self, db): # Finally we change back the metrics values to string to make the values # consistent with how they are reported by SHOW GLOBAL STATUS - for metric, value in list(iteritems(results)): + for metric, value in list(results.items()): results[metric] = str(value) return results diff --git a/mysql/datadog_checks/mysql/mysql.py b/mysql/datadog_checks/mysql/mysql.py index a4c2eeb6a4ec2..de45c43013417 100644 --- a/mysql/datadog_checks/mysql/mysql.py +++ b/mysql/datadog_checks/mysql/mysql.py @@ -13,7 +13,6 @@ import pymysql from cachetools import TTLCache -from six import PY3, iteritems, itervalues from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.db import QueryExecutor, QueryManager @@ -96,10 +95,6 @@ from ..stubs import datadog_agent -if PY3: - long = int - - class MySql(AgentCheck): SERVICE_CHECK_NAME = 'mysql.can_connect' SLAVE_SERVICE_CHECK_NAME = 'mysql.replication.slave_running' @@ -726,9 +721,9 @@ def _check_replication_status(self, results): if replica_sql_running is None: replica_sql_running = collect_type('Replica_SQL_Running', results, dict) if replica_io_running: - replica_io_running = any(v.lower().strip() == 'yes' for v in itervalues(replica_io_running)) + replica_io_running = any(v.lower().strip() == 'yes' for v in replica_io_running.values()) if replica_sql_running: - replica_sql_running = any(v.lower().strip() == 'yes' for v in itervalues(replica_sql_running)) + replica_sql_running = any(v.lower().strip() == 'yes' for v in replica_sql_running.values()) binlog_running = results.get('Binlog_enabled', False) # replicas will only be collected if user has PROCESS privileges. @@ -813,7 +808,7 @@ def _is_group_replication_active(self, db): return False def _submit_metrics(self, variables, db_results, tags): - for variable, metric in iteritems(variables): + for variable, metric in variables.items(): if isinstance(metric, list): for m in metric: metric_name, metric_type = m @@ -856,7 +851,7 @@ def _collect_dict(self, metric_type, field_metric_map, query, db, tags): cursor.execute(query) result = cursor.fetchone() if result is not None: - for field, metric in list(iteritems(field_metric_map)): + for field, metric in field_metric_map.items(): # Find the column name in the cursor description to identify the column index # http://www.python.org/dev/peps/pep-0249/ # cursor.description is a tuple of (column_name, ..., ...) @@ -903,7 +898,7 @@ def _get_runtime_aurora_tags(self, db): def _collect_system_metrics(self, host, db, tags): pid = None # The server needs to run locally, accessed by TCP or socket - if host in ["localhost", "127.0.0.1", "0.0.0.0"] or db.port == long(0): + if host in ["localhost", "127.0.0.1", "0.0.0.0"] or db.port == int(0): pid = self._get_server_pid(db) if pid: @@ -1021,7 +1016,7 @@ def _get_binary_log_stats(self, db): master_logs = {result[0]: result[1] for result in cursor_results} binary_log_space = 0 - for value in itervalues(master_logs): + for value in master_logs.values(): binary_log_space += value return binary_log_space @@ -1059,7 +1054,7 @@ def _get_replica_stats(self, db, is_mariadb, replication_channel): # MySQL <5.7 does not have Channel_Name. # For MySQL >=5.7 'Channel_Name' is set to an empty string by default channel = replication_channel or replica_result.get('Channel_Name') or 'default' - for key, value in iteritems(replica_result): + for key, value in replica_result.items(): if value is not None: replica_results[key]['channel:{0}'.format(channel)] = value except (pymysql.err.InternalError, pymysql.err.OperationalError) as e: @@ -1161,7 +1156,7 @@ def _query_exec_time_per_schema(self, db): schema_query_avg_run_time = {} for row in cursor.fetchall(): schema_name = str(row[0]) - avg_us = long(row[1]) + avg_us = int(row[1]) # set the tag as the dictionary key schema_query_avg_run_time["schema:{0}".format(schema_name)] = avg_us @@ -1216,7 +1211,7 @@ def _query_size_per_schema(self, db): schema_size = {} for row in cursor.fetchall(): schema_name = str(row[0]) - size = long(row[1]) + size = int(row[1]) # set the tag as the dictionary key schema_size["schema:{0}".format(schema_name)] = size @@ -1241,8 +1236,8 @@ def _query_rows_stats_per_table(self, db): for row in cursor.fetchall(): table_schema = str(row[0]) table_name = str(row[1]) - rows_read_total = long(row[2]) - rows_changed_total = long(row[3]) + rows_read_total = int(row[2]) + rows_changed_total = int(row[3]) # set the tag as the dictionary key table_rows_read_total["schema:{},table:{}".format(table_schema, table_name)] = rows_read_total diff --git a/nginx/datadog_checks/nginx/nginx.py b/nginx/datadog_checks/nginx/nginx.py index 0f7006abfd52f..3dc525a9eea7c 100644 --- a/nginx/datadog_checks/nginx/nginx.py +++ b/nginx/datadog_checks/nginx/nginx.py @@ -4,10 +4,9 @@ import re from datetime import datetime from itertools import chain +from urllib.parse import urljoin, urlparse import simplejson as json -from six import PY3, iteritems, text_type -from six.moves.urllib.parse import urljoin, urlparse from datadog_checks.base import AgentCheck, ConfigurationError, to_native_string from datadog_checks.base.utils.time import get_timestamp @@ -15,9 +14,6 @@ from .const import PLUS_API_ENDPOINTS, PLUS_API_STREAM_ENDPOINTS, TAGGED_KEYS from .metrics import COUNT_METRICS, METRICS_SEND_AS_COUNT, METRICS_SEND_AS_HISTOGRAM, VTS_METRIC_MAP -if PY3: - long = int - if hasattr(datetime, 'fromisoformat'): fromisoformat = datetime.fromisoformat else: @@ -133,7 +129,7 @@ def _get_enabled_endpoints(self): supported_endpoints = self._supported_endpoints(available_endpoints) self.log.debug("Supported endpoints are %s", supported_endpoints) - return chain(iteritems(supported_endpoints)) + return chain(supported_endpoints.items()) except Exception as e: self.log.warning( "Could not determine available endpoints from the API, " @@ -250,13 +246,13 @@ def _get_plus_api_endpoints(self, use_stream=False): Returns all of either stream or default endpoints that the integration supports collecting metrics from based on the Plus API version """ - endpoints = iteritems({}) + endpoints = iter([]) available_plus_endpoints = PLUS_API_STREAM_ENDPOINTS if use_stream else PLUS_API_ENDPOINTS for earliest_version, new_endpoints in available_plus_endpoints.items(): if int(self.plus_api_version) >= int(earliest_version): - endpoints = chain(endpoints, iteritems(new_endpoints)) + endpoints = chain(endpoints, new_endpoints.items()) return endpoints def _get_all_plus_api_endpoints(self): @@ -370,10 +366,10 @@ def _flatten_json(cls, metric_base, val, tags): if tags is None: tags = [] tags = tags + [server] - for key, val2 in iteritems(val): + for key, val2 in val.items(): if key in TAGGED_KEYS: metric_name = '%s.%s' % (metric_base, TAGGED_KEYS[key]) - for tag_val, data in iteritems(val2): + for tag_val, data in val2.items(): tag = '%s:%s' % (TAGGED_KEYS[key], tag_val) output.extend(cls._flatten_json(metric_name, data, tags + [tag])) else: @@ -387,10 +383,10 @@ def _flatten_json(cls, metric_base, val, tags): elif isinstance(val, bool): output.append((metric_base, int(val), tags, 'gauge')) - elif isinstance(val, (int, float, long)): + elif isinstance(val, (int, float)): output.append((metric_base, val, tags, 'gauge')) - elif isinstance(val, (text_type, str)) and val[-1] == "Z": + elif isinstance(val, str) and val[-1] == "Z": try: # In the new Plus API, timestamps are now formatted # strings, some include microseconds, some don't... diff --git a/redisdb/datadog_checks/redisdb/redisdb.py b/redisdb/datadog_checks/redisdb/redisdb.py index 4155bcdb03e81..86298169a0b26 100644 --- a/redisdb/datadog_checks/redisdb/redisdb.py +++ b/redisdb/datadog_checks/redisdb/redisdb.py @@ -9,7 +9,6 @@ from copy import deepcopy import redis -from six import PY2, iteritems from datadog_checks.base import AgentCheck, ConfigurationError, ensure_unicode, is_affirmative from datadog_checks.base.utils.common import round_value @@ -266,7 +265,7 @@ def _check_db(self): elif info_name in self.RATE_KEYS: self.rate(self.RATE_KEYS[info_name], info[info_name], tags=tags) - for config_key, value in iteritems(config): + for config_key, value in config.items(): metric_name = self.CONFIG_GAUGE_KEYS.get(config_key) if metric_name is not None: self.gauge(metric_name, value, tags=tags) @@ -404,7 +403,7 @@ def _check_key_lengths(self, conn, tags): lengths[text_key]["key_type"] = key_type # Send the metrics for each db in the redis instance. - for key, total in iteritems(lengths): + for key, total in lengths.items(): # Only send non-zeros if tagged per db. if total["length"] > 0: self.gauge( @@ -416,7 +415,7 @@ def _check_key_lengths(self, conn, tags): # Warn if a key is missing from the entire redis instance. # Send 0 if the key is missing/empty from the entire redis instance. - for key, total in iteritems(lengths_overall): + for key, total in lengths_overall.items(): if total == 0: key_tags = ['key:{}'.format(key)] if instance_db: @@ -552,7 +551,7 @@ def _check_command_stats(self, conn, tags): self.warning('Could not retrieve command stats from Redis. INFO COMMANDSTATS only works with Redis >= 2.6.') return - for key, stats in iteritems(command_stats): + for key, stats in command_stats.items(): command = key.split('_', 1)[1] command_tags = tags + ['command:{}'.format(command)] @@ -572,11 +571,8 @@ def _collect_metadata(self, info): self.set_metadata('version', info['redis_version']) -_timer = time.time if PY2 else time.perf_counter - - def _call_and_time(func): - start_time = _timer() + start_time = time.perf_counter() rv = func() - end_time = _timer() + end_time = time.perf_counter() return rv, round_value((end_time - start_time) * 1000, 2) diff --git a/riakcs/datadog_checks/riakcs/riakcs.py b/riakcs/datadog_checks/riakcs/riakcs.py index 5f5f12ec8dffe..8185985da4140 100644 --- a/riakcs/datadog_checks/riakcs/riakcs.py +++ b/riakcs/datadog_checks/riakcs/riakcs.py @@ -8,7 +8,6 @@ import boto3 import simplejson as json from botocore.config import Config -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.config import _is_affirmative @@ -22,7 +21,7 @@ def multidict(ordered_pairs): d[k].append(v) # unpack lists that have only 1 item dict_copy = deepcopy(d) - for k, v in iteritems(dict_copy): + for k, v in dict_copy.items(): if len(v) == 1: d[k] = v[0] return dict(d) @@ -62,7 +61,7 @@ def process_stats(self, stats, tags, metrics): metrics.update(V21_DEFAULT_METRICS) else: metrics = V21_DEFAULT_METRICS - for key, value in iteritems(stats): + for key, value in stats.items(): if key not in metrics: continue suffix = key.rsplit("_", 1)[-1] @@ -72,7 +71,7 @@ def process_stats(self, stats, tags, metrics): # pre 2.1 stats format legends = {len(k): k for k in stats["legend"]} del stats["legend"] - for key, values in iteritems(stats): + for key, values in stats.items(): legend = legends[len(values)] for i, value in enumerate(values): metric_name = "riakcs.{0}.{1}".format(key, legend[i]) diff --git a/sap_hana/datadog_checks/sap_hana/sap_hana.py b/sap_hana/datadog_checks/sap_hana/sap_hana.py index d0f173b6c316e..227b93a411a39 100644 --- a/sap_hana/datadog_checks/sap_hana/sap_hana.py +++ b/sap_hana/datadog_checks/sap_hana/sap_hana.py @@ -18,8 +18,6 @@ from hdbcli.dbapi import Connection as HanaConnection except ImportError: HanaConnection = None -from six import iteritems -from six.moves import zip from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.common import total_time_to_temporal_percent @@ -265,7 +263,7 @@ def query_connection_overview(self): for conn in self.iter_rows(queries.GlobalSystemConnectionsStatus(schema=self._schema)): db_counts[(conn['db_name'], conn['host'], conn['port'])][conn['status'].lower()] += conn['total'] - for (db, host, port), counts in iteritems(db_counts): + for (db, host, port), counts in db_counts.items(): tags = ['db:{}'.format(db), 'hana_port:{}'.format(port)] tags.extend(self._tags) tags.append('hana_host:{}'.format(host)) diff --git a/spark/datadog_checks/spark/constants.py b/spark/datadog_checks/spark/constants.py index d66b3b410977d..4097fce403f4f 100644 --- a/spark/datadog_checks/spark/constants.py +++ b/spark/datadog_checks/spark/constants.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import re -from six import iteritems - # Identifier for cluster master address in `spark.yaml` MASTER_ADDRESS = 'spark_url' DEPRECATED_MASTER_ADDRESS = 'resourcemanager_uri' @@ -137,15 +135,15 @@ } SPARK_DRIVER_METRICS = { - key: (value[0].format('driver'), value[1]) for key, value in iteritems(SPARK_EXECUTOR_TEMPLATE_METRICS) + key: (value[0].format('driver'), value[1]) for key, value in SPARK_EXECUTOR_TEMPLATE_METRICS.items() } SPARK_EXECUTOR_METRICS = { - key: (value[0].format('executor'), value[1]) for key, value in iteritems(SPARK_EXECUTOR_TEMPLATE_METRICS) + key: (value[0].format('executor'), value[1]) for key, value in SPARK_EXECUTOR_TEMPLATE_METRICS.items() } SPARK_EXECUTOR_LEVEL_METRICS = { - key: (value[0].format('executor.id'), value[1]) for key, value in iteritems(SPARK_EXECUTOR_TEMPLATE_METRICS) + key: (value[0].format('executor.id'), value[1]) for key, value in SPARK_EXECUTOR_TEMPLATE_METRICS.items() } SPARK_RDD_METRICS = { diff --git a/squid/datadog_checks/squid/squid.py b/squid/datadog_checks/squid/squid.py index 42b2ead2f8406..a11875e6a74bb 100644 --- a/squid/datadog_checks/squid/squid.py +++ b/squid/datadog_checks/squid/squid.py @@ -5,9 +5,7 @@ import re import requests -from six import iteritems -# project from datadog_checks.base import AgentCheck EVENT_TYPE = SOURCE_TYPE_NAME = 'squid' @@ -100,7 +98,7 @@ def check(self, instance): counters = self.get_counters(host, port, tags + custom_tags) # Send these values as rate - for counter, value in iteritems(counters): + for counter, value in counters.items(): self.rate(counter, value, tags=tags + custom_tags) def get_counters(self, host, port, tags): diff --git a/system_core/datadog_checks/system_core/system_core.py b/system_core/datadog_checks/system_core/system_core.py index 8ab0d0a452b9a..979836880a5aa 100644 --- a/system_core/datadog_checks/system_core/system_core.py +++ b/system_core/datadog_checks/system_core/system_core.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import psutil -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.utils.platform import Platform @@ -22,11 +21,11 @@ def check(self, instance): for i, cpu in enumerate(cpu_times): tags = instance_tags + ['core:{0}'.format(i)] - for key, value in iteritems(cpu._asdict()): + for key, value in cpu._asdict().items(): self.rate('system.core.{0}'.format(key), 100.0 * value, tags=tags) total_cpu_times = psutil.cpu_times() - for key, value in iteritems(total_cpu_times._asdict()): + for key, value in total_cpu_times._asdict().items(): self.rate('system.core.{0}.total'.format(key), 100.0 * value / n_cpus, tags=instance_tags) # https://psutil.readthedocs.io/en/latest/#psutil.cpu_freq diff --git a/twemproxy/datadog_checks/twemproxy/twemproxy.py b/twemproxy/datadog_checks/twemproxy/twemproxy.py index 303357af28eb6..b6a2534a5eadc 100644 --- a/twemproxy/datadog_checks/twemproxy/twemproxy.py +++ b/twemproxy/datadog_checks/twemproxy/twemproxy.py @@ -4,7 +4,6 @@ import socket import simplejson as json -from six import iteritems from datadog_checks.base import AgentCheck, ensure_unicode @@ -158,11 +157,11 @@ def parse_json(cls, raw, tags=None): version = parsed.get('version', None) - for key, val in iteritems(parsed): + for key, val in parsed.items(): if isinstance(val, dict): # server pool pool_tags = tags + ['pool:%s' % key] - for server_key, server_val in iteritems(val): + for server_key, server_val in val.items(): if isinstance(server_val, dict): # server server_tags = pool_tags + ['server:%s' % server_key] diff --git a/twistlock/datadog_checks/twistlock/twistlock.py b/twistlock/datadog_checks/twistlock/twistlock.py index e8dd42f60fdb7..f543ee88d733f 100644 --- a/twistlock/datadog_checks/twistlock/twistlock.py +++ b/twistlock/datadog_checks/twistlock/twistlock.py @@ -7,7 +7,6 @@ from datetime import datetime, timedelta from dateutil import parser, tz -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.utils.serialization import json @@ -284,7 +283,7 @@ def _report_vuln_info(self, namespace, data, tags): cve_tags += ["package:{}".format(cve['packageName'])] self.gauge('{}.cve.details'.format(namespace), float(1), cve_tags) # Send counts to avoid no-data on zeroes - for severity, count in iteritems(summary): + for severity, count in summary.items(): cve_tags = SEVERITY_TAGS.get(severity, []) + tags self.gauge('{}.cve.count'.format(namespace), float(count), cve_tags) diff --git a/varnish/datadog_checks/varnish/varnish.py b/varnish/datadog_checks/varnish/varnish.py index 57bd381452d11..f617cbf27650e 100644 --- a/varnish/datadog_checks/varnish/varnish.py +++ b/varnish/datadog_checks/varnish/varnish.py @@ -8,16 +8,12 @@ from os import geteuid from packaging.version import Version -from six import PY3, iteritems from six.moves import filter from datadog_checks.base import ConfigurationError from datadog_checks.base.checks import AgentCheck from datadog_checks.base.utils.subprocess_output import get_subprocess_output -if PY3: - long = int - class BackendStatus(object): HEALTHY = 'healthy' @@ -100,11 +96,11 @@ def _end_element(self, name): if name == "stat": m_name = self.normalize(self._current_metric) if self._current_type in ("a", "c"): - self.rate(m_name, long(self._current_value), tags=self.tags) + self.rate(m_name, int(self._current_value), tags=self.tags) elif self._current_type in ("i", "g"): - self.gauge(m_name, long(self._current_value), tags=self.tags) + self.gauge(m_name, int(self._current_value), tags=self.tags) if 'n_purges' in m_name: - self.rate('varnish.n_purgesps', long(self._current_value), tags=self.tags) + self.rate('varnish.n_purgesps', int(self._current_value), tags=self.tags) else: # Unsupported data type, ignore self._reset() @@ -120,7 +116,7 @@ def _char_data(self, data): data = data.strip() if len(data) > 0 and self._current_element != "": if self._current_element == "value": - self._current_value = long(data) + self._current_value = int(data) elif self._current_element == "flag": self._current_type = data else: @@ -243,7 +239,7 @@ def _parse_varnishstat(self, output, varnishstat_format): json_output = json.loads(output) if "counters" in json_output: json_output = json_output["counters"] - for name, metric in iteritems(json_output): + for name, metric in json_output.items(): if not isinstance(metric, dict): # skip 'timestamp' field continue @@ -254,11 +250,11 @@ def _parse_varnishstat(self, output, varnishstat_format): value = metric.get("value", 0) if metric.get("flag") in ("a", "c"): - self.rate(metric_name, long(value), tags=self.tags) + self.rate(metric_name, int(value), tags=self.tags) elif metric.get("flag") in ("g", "i"): - self.gauge(metric_name, long(value), tags=self.tags) + self.gauge(metric_name, int(value), tags=self.tags) if 'n_purges' in self.normalize(name, prefix="varnish"): - self.rate('varnish.n_purgesps', long(value), tags=self.tags) + self.rate('varnish.n_purgesps', int(value), tags=self.tags) elif 'flag' not in metric: self.log.warning("Could not determine the type of metric %s, skipping submission", metric_name) self.log.debug("Raw metric %s is missing the `flag` field", str(metric)) @@ -367,7 +363,7 @@ def _submit_backend_service_checks(self, backends_by_status): if backends_by_status is None: return - for status, backends in iteritems(backends_by_status): + for status, backends in backends_by_status.items(): check_status = BackendStatus.to_check_status(status) for backend, message in backends: service_checks_tags = ['backend:%s' % backend] + self.custom_tags diff --git a/vsphere/datadog_checks/vsphere/api_rest.py b/vsphere/datadog_checks/vsphere/api_rest.py index 5de5d17b78f8d..cdf6df833c0c4 100644 --- a/vsphere/datadog_checks/vsphere/api_rest.py +++ b/vsphere/datadog_checks/vsphere/api_rest.py @@ -6,7 +6,6 @@ from typing import Any, Dict, Iterator, List, Set # noqa: F401 from pyVmomi import vim -from six import iteritems from datadog_checks.base.log import CheckLoggingAdapter # noqa: F401 from datadog_checks.base.utils.http import RequestsWrapper @@ -24,7 +23,7 @@ 'ClusterComputeResource': vim.ClusterComputeResource, } -MOR_TYPE_MAPPING_TO_STRING = {v: k for k, v in iteritems(MOR_TYPE_MAPPING_FROM_STRING)} +MOR_TYPE_MAPPING_TO_STRING = {v: k for k, v in MOR_TYPE_MAPPING_FROM_STRING.items()} class VSphereRestAPI(object): diff --git a/vsphere/datadog_checks/vsphere/config.py b/vsphere/datadog_checks/vsphere/config.py index 3cda46f780e52..366b091f3117d 100644 --- a/vsphere/datadog_checks/vsphere/config.py +++ b/vsphere/datadog_checks/vsphere/config.py @@ -6,7 +6,6 @@ from typing import Any, Dict, List # noqa: F401 from pyVmomi import vim -from six import iteritems, string_types from datadog_checks.base import ConfigurationError, is_affirmative from datadog_checks.base.log import CheckLoggingAdapter # noqa: F401 @@ -208,9 +207,7 @@ def _parse_resource_filters(self, all_resource_filters): ) # Check required fields and their types - for field, field_type in iteritems( - {'resource': string_types, 'property': string_types, 'type': string_types, 'patterns': list} - ): + for field, field_type in {'resource': str, 'property': str, 'type': str, 'patterns': list}.items(): if field not in resource_filter: self.log.warning( "Ignoring filter %r because it doesn't contain a %s field.", resource_filter, field @@ -282,7 +279,7 @@ def _parse_metric_regex_filters(self, all_metric_filters): # type: (MetricFilterConfig) -> MetricFilters allowed_resource_types = [MOR_TYPE_AS_STRING[k] for k in self.collected_resource_types] metric_filters = {} - for resource_type, filters in iteritems(all_metric_filters): + for resource_type, filters in all_metric_filters.items(): if resource_type not in allowed_resource_types: self.log.warning( "Ignoring metric_filter for resource '%s'. When collection_type is '%s', it should be one of '%s'", @@ -293,7 +290,7 @@ def _parse_metric_regex_filters(self, all_metric_filters): continue metric_filters[resource_type] = filters - return {k: [re.compile(r) for r in v] for k, v in iteritems(metric_filters)} + return {k: [re.compile(r) for r in v] for k, v in metric_filters.items()} def _normalize_event_resource_filters(self, filters): return [filter.lower() for filter in filters] diff --git a/vsphere/datadog_checks/vsphere/legacy/mor_cache.py b/vsphere/datadog_checks/vsphere/legacy/mor_cache.py index 276fa653a609f..2edb5081cf18b 100644 --- a/vsphere/datadog_checks/vsphere/legacy/mor_cache.py +++ b/vsphere/datadog_checks/vsphere/legacy/mor_cache.py @@ -4,8 +4,6 @@ import threading import time -from six import iteritems - from datadog_checks.vsphere.legacy.common import REALTIME_RESOURCES @@ -88,7 +86,7 @@ def mors(self, key): Generator returning all the mors in the cache for the given instance key. """ with self._mor_lock: - for k, v in iteritems(self._mor.get(key, {})): + for k, v in self._mor.get(key, {}).items(): yield k, v def mors_batch(self, key, batch_size, max_historical_metrics=None): @@ -111,7 +109,7 @@ def mors_batch(self, key, batch_size, max_historical_metrics=None): batch = {} nb_hist_metrics = 0 - for mor_name, mor in iteritems(mors_dict): + for mor_name, mor in mors_dict.items(): if mor['mor_type'] not in REALTIME_RESOURCES and mor.get('metrics'): # Those metrics are historical, let's make sure we don't have too # many of them in the same batch. @@ -182,7 +180,7 @@ def purge(self, key, ttl): with self._mor_lock: # Don't change the dict during iteration! # First collect the names of the Mors to remove... - for name, mor in iteritems(self._mor[key]): + for name, mor in self._mor[key].items(): age = now - mor['creation_time'] if age > ttl: mors_to_purge.append(name) diff --git a/vsphere/datadog_checks/vsphere/utils.py b/vsphere/datadog_checks/vsphere/utils.py index 136b041ed304b..99298b575cced 100644 --- a/vsphere/datadog_checks/vsphere/utils.py +++ b/vsphere/datadog_checks/vsphere/utils.py @@ -4,7 +4,6 @@ from typing import Any, Dict, List, Optional, Type # noqa: F401 from pyVmomi import vim -from six import iteritems from datadog_checks.base import to_string from datadog_checks.vsphere.constants import ( @@ -238,7 +237,7 @@ def get_mapped_instance_tag(metric_name): tag cannot be guessed by looking at the api results and has to be inferred using documentation or experience. This method acts as a utility to map a metric_name to the meaning of its instance tag. """ - for prefix, tag_key in iteritems(METRIC_TO_INSTANCE_TAG_MAPPING): + for prefix, tag_key in METRIC_TO_INSTANCE_TAG_MAPPING.items(): if metric_name.startswith(prefix): return tag_key return 'instance' diff --git a/vsphere/datadog_checks/vsphere/vsphere.py b/vsphere/datadog_checks/vsphere/vsphere.py index 80acdcb990ad9..f50cfdf73743c 100644 --- a/vsphere/datadog_checks/vsphere/vsphere.py +++ b/vsphere/datadog_checks/vsphere/vsphere.py @@ -11,7 +11,6 @@ from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Type, cast # noqa: F401 from pyVmomi import vim, vmodl -from six import iteritems from datadog_checks.base import AgentCheck, is_affirmative, to_string from datadog_checks.base.checks.libs.timer import Timer @@ -182,7 +181,7 @@ def collect_tags(self, infrastructure_data): resource_filters_without_tags = [f for f in self._config.resource_filters if not isinstance(f, TagFilter)] filtered_infra_data = { mor: props - for mor, props in iteritems(infrastructure_data) + for mor, props in infrastructure_data.items() if isinstance(mor, tuple(self._config.collected_resource_types)) and is_resource_collected_by_filters(mor, infrastructure_data, resource_filters_without_tags) } @@ -234,7 +233,7 @@ def refresh_infrastructure_cache(self): all_tags = self.collect_tags(infrastructure_data) self.infrastructure_cache.set_all_tags(all_tags) - for mor, properties in iteritems(infrastructure_data): + for mor, properties in infrastructure_data.items(): if not isinstance(mor, tuple(self._config.collected_resource_types)): # Do nothing for the resource types we do not collect continue @@ -487,7 +486,7 @@ def make_query_specs(self): counters = self.metrics_metadata_cache.get_metadata(resource_type) metric_ids = [] # type: List[vim.PerformanceManager.MetricId] is_historical_batch = metric_type == HISTORICAL - for counter_key, metric_name in iteritems(counters): + for counter_key, metric_name in counters.items(): # PerformanceManager.MetricId `instance` kwarg: # - An asterisk (*) to specify all instances of the metric for the specified counterId # - Double-quotes ("") to specify aggregated statistics @@ -504,7 +503,7 @@ def make_query_specs(self): for batch in self.make_batch(mors, metric_ids, resource_type, is_historical_batch=is_historical_batch): query_specs = [] - for mor, metrics in iteritems(batch): + for mor, metrics in batch.items(): query_spec = vim.PerformanceManager.QuerySpec() # type: vim.PerformanceManager.QuerySpec query_spec.entity = mor query_spec.metricId = metrics diff --git a/vsphere/tests/legacy/utils.py b/vsphere/tests/legacy/utils.py index bee31bf3dc758..5842f9817fded 100644 --- a/vsphere/tests/legacy/utils.py +++ b/vsphere/tests/legacy/utils.py @@ -7,7 +7,6 @@ from mock import MagicMock, Mock from pyVmomi import vim -from six import iteritems HERE = os.path.abspath(os.path.dirname(__file__)) @@ -90,7 +89,7 @@ def assertMOR(check, instance, name=None, spec=None, tags=None, count=None, subs instance_name = instance['name'] candidates = [] - mor_list = [mor for _, mors in iteritems(check.mor_objects_queue._objects_queue[instance_name]) for mor in mors] + mor_list = [mor for mors in check.mor_objects_queue._objects_queue[instance_name].values() for mor in mors] for mor in mor_list: if name is not None and name != mor['hostname']: diff --git a/yarn/datadog_checks/yarn/yarn.py b/yarn/datadog_checks/yarn/yarn.py index 461a02dd0f620..c6d94672f5469 100644 --- a/yarn/datadog_checks/yarn/yarn.py +++ b/yarn/datadog_checks/yarn/yarn.py @@ -1,9 +1,9 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from urllib.parse import urljoin, urlsplit, urlunsplit + from requests.exceptions import ConnectionError, HTTPError, InvalidURL, SSLError, Timeout -from six import iteritems -from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.errors import ConfigurationError @@ -198,7 +198,7 @@ def check(self, instance): app_tags = {} filtered_app_tags = {} - for dd_prefix, yarn_key in iteritems(app_tags): + for dd_prefix, yarn_key in app_tags.items(): if yarn_key in self._ALLOWED_APPLICATION_TAGS: filtered_app_tags[dd_prefix] = yarn_key app_tags = filtered_app_tags @@ -292,7 +292,7 @@ def _yarn_app_metrics(self, rm_address, app_tags, addl_tags): def _get_app_tags(self, app_json, app_tags): split_app_tags = self.instance.get('split_yarn_application_tags', DEFAULT_SPLIT_YARN_APPLICATION_TAGS) tags = [] - for dd_tag, yarn_key in iteritems(app_tags): + for dd_tag, yarn_key in app_tags.items(): try: val = app_json[yarn_key] if val: @@ -416,7 +416,7 @@ def _set_yarn_metrics_from_json(self, tags, metrics_json, yarn_metrics): """ Parse the JSON response and set the metrics """ - for dict_path, metric in iteritems(yarn_metrics): + for dict_path, metric in yarn_metrics.items(): metric_name, metric_type = metric metric_value = self._get_value_from_json(dict_path, metrics_json) @@ -465,7 +465,7 @@ def _rest_request_to_json(self, url, object_path, tags, *args, **kwargs): # Add kwargs as arguments if kwargs: - query = '&'.join(['{}={}'.format(key, value) for key, value in iteritems(kwargs)]) + query = '&'.join(['{}={}'.format(key, value) for key, value in kwargs.items()]) url = urljoin(url, '?' + query) try: diff --git a/zk/datadog_checks/zk/zk.py b/zk/datadog_checks/zk/zk.py index 05d9493e1889b..dfa63e9415cb8 100644 --- a/zk/datadog_checks/zk/zk.py +++ b/zk/datadog_checks/zk/zk.py @@ -61,15 +61,12 @@ import struct from collections import defaultdict from contextlib import closing +from io import StringIO from packaging.version import Version -from six import PY3, StringIO, iteritems from datadog_checks.base import AgentCheck, ensure_bytes, ensure_unicode, is_affirmative -if PY3: - long = int - class ZKConnectionFailure(Exception): """Raised when we are unable to connect or get the output of a command.""" @@ -226,7 +223,7 @@ def report_instance_mode(self, mode): tags = self.base_tags + ['mode:%s' % mode] self.gauge('zookeeper.instances', 1, tags=tags) gauges[mode] = 1 - for k, v in iteritems(gauges): + for k, v in gauges.items(): gauge_name = 'zookeeper.instances.%s' % k self.gauge(gauge_name, v, tags=self.base_tags) @@ -309,15 +306,15 @@ def parse_stat(self, buf): _, value = buf.readline().split(':') # Fixme: This metric name is wrong. It should be removed in a major version of the agent # See https://github.com/DataDog/integrations-core/issues/816 - metrics.append(ZKMetric('zookeeper.bytes_received', long(value.strip()))) - metrics.append(ZKMetric('zookeeper.packets.received', long(value.strip()), "rate")) + metrics.append(ZKMetric('zookeeper.bytes_received', int(value.strip()))) + metrics.append(ZKMetric('zookeeper.packets.received', int(value.strip()), "rate")) # Sent: 1324 _, value = buf.readline().split(':') # Fixme: This metric name is wrong. It should be removed in a major version of the agent # See https://github.com/DataDog/integrations-core/issues/816 - metrics.append(ZKMetric('zookeeper.bytes_sent', long(value.strip()))) - metrics.append(ZKMetric('zookeeper.packets.sent', long(value.strip()), "rate")) + metrics.append(ZKMetric('zookeeper.bytes_sent', int(value.strip()))) + metrics.append(ZKMetric('zookeeper.packets.sent', int(value.strip()), "rate")) if has_connections_val: # Connections: 1 @@ -330,12 +327,12 @@ def parse_stat(self, buf): # Outstanding: 0 _, value = buf.readline().split(':') - metrics.append(ZKMetric('zookeeper.outstanding_requests', long(value.strip()))) + metrics.append(ZKMetric('zookeeper.outstanding_requests', int(value.strip()))) # Zxid: 0x1034799c7 _, value = buf.readline().split(':') # Parse as a 64 bit hex int - zxid = long(value.strip(), 16) + zxid = int(value.strip(), 16) # convert to bytes zxid_bytes = struct.pack('>q', zxid) # the higher order 4 bytes is the epoch @@ -353,7 +350,7 @@ def parse_stat(self, buf): # Node count: 487 _, value = buf.readline().split(':') - metrics.append(ZKMetric('zookeeper.nodes', long(value.strip()))) + metrics.append(ZKMetric('zookeeper.nodes', int(value.strip()))) return metrics, tags, mode, version From 969a2aefd8d085593c1a9ac0317161b741a3f8e9 Mon Sep 17 00:00:00 2001 From: Zhengda Lu Date: Wed, 18 Sep 2024 12:48:00 -0400 Subject: [PATCH 21/23] [mongo] bump base package version (#18583) * bump base package version * add changelog * use 36.11.0 * remove file --- mongo/changelog.d/18583.added | 1 + mongo/pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 mongo/changelog.d/18583.added diff --git a/mongo/changelog.d/18583.added b/mongo/changelog.d/18583.added new file mode 100644 index 0000000000000..73092246d5ada --- /dev/null +++ b/mongo/changelog.d/18583.added @@ -0,0 +1 @@ +Bump datadog-checks-base dependency diff --git a/mongo/pyproject.toml b/mongo/pyproject.toml index de71c7d49ce1a..87eb3c784e024 100644 --- a/mongo/pyproject.toml +++ b/mongo/pyproject.toml @@ -29,7 +29,7 @@ classifiers = [ "Private :: Do Not Upload", ] dependencies = [ - "datadog-checks-base>=36.7.0", + "datadog-checks-base>=36.11.0", ] dynamic = [ "version", From ed1085fc604b5fc99a528cacf5b1e55a07860758 Mon Sep 17 00:00:00 2001 From: rahulkaukuntla <144174402+rahulkaukuntla@users.noreply.github.com> Date: Wed, 18 Sep 2024 16:53:54 -0400 Subject: [PATCH 22/23] [PLINT-508] Fixed excluded host tags for property metrics in vSphere (#18601) * accounting for excluded host tags for property metrics * added a changelog * lint * applying suggestion * addressing Sarah's comments --- vsphere/changelog.d/18601.fixed | 1 + vsphere/datadog_checks/vsphere/vsphere.py | 7 ++- vsphere/tests/test_unit.py | 63 ++++++++++++++++++----- 3 files changed, 56 insertions(+), 15 deletions(-) create mode 100644 vsphere/changelog.d/18601.fixed diff --git a/vsphere/changelog.d/18601.fixed b/vsphere/changelog.d/18601.fixed new file mode 100644 index 0000000000000..62efc32805c0e --- /dev/null +++ b/vsphere/changelog.d/18601.fixed @@ -0,0 +1 @@ +Fixed excluded host tags for property metrics diff --git a/vsphere/datadog_checks/vsphere/vsphere.py b/vsphere/datadog_checks/vsphere/vsphere.py index f50cfdf73743c..6c137ce2ff1df 100644 --- a/vsphere/datadog_checks/vsphere/vsphere.py +++ b/vsphere/datadog_checks/vsphere/vsphere.py @@ -901,7 +901,12 @@ def submit_property_metrics( ) return - base_tags = self._config.base_tags + resource_tags + base_tags = [] + if self._config.excluded_host_tags: + base_tags.extend([t for t in resource_tags if t.split(":", 1)[0] in self._config.excluded_host_tags]) + else: + base_tags.extend(resource_tags) + base_tags.extend(self._config.base_tags) if resource_type == vim.VirtualMachine: object_properties = self._config.object_properties_to_collect_by_mor.get(resource_metric_suffix, []) diff --git a/vsphere/tests/test_unit.py b/vsphere/tests/test_unit.py index fb6ce7133eb8c..0f3654809764c 100644 --- a/vsphere/tests/test_unit.py +++ b/vsphere/tests/test_unit.py @@ -2358,7 +2358,7 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog base_tags_vm1 = base_tags + ['vsphere_host:host1'] base_tags_vm3 = base_tags + ['vsphere_host:host2'] - realtime_instance['excluded_host_tags'] = ['vsphere_host'] + realtime_instance['excluded_host_tags'] = ['vsphere_host', 'vsphere_type', 'vsphere_folder'] check = VSphereCheck('vsphere', {}, [realtime_instance]) caplog.set_level(logging.DEBUG) dd_run_check(check) @@ -2373,8 +2373,8 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog ) assert ( "Could not submit property metric- no metric data: name=`vm.guest.guestFullName`, " - "value=`None`, hostname=`vm1`, base tags=`['vcenter_server:FAKE', 'vsphere_host:host1', " - "'vsphere_folder:unknown', 'vsphere_type:vm']` additional tags=`{}`" + "value=`None`, hostname=`vm1`, base tags=`['vsphere_host:host1', 'vsphere_folder:unknown', " + "'vsphere_type:vm', 'vcenter_server:FAKE']` additional tags=`{}`" ) in caplog.text aggregator.assert_metric( @@ -2433,8 +2433,8 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog ) assert ( "Could not submit property metric- no metric data: name=`vm.guest.toolsRunningStatus`, " - "value=`None`, hostname=`vm1`, base tags=`['vcenter_server:FAKE', 'vsphere_host:host1', " - "'vsphere_folder:unknown', 'vsphere_type:vm']` additional tags=`{}`" + "value=`None`, hostname=`vm1`, base tags=`['vsphere_host:host1', 'vsphere_folder:unknown', " + "'vsphere_type:vm', 'vcenter_server:FAKE']` additional tags=`{}`" ) in caplog.text aggregator.assert_metric( @@ -2514,8 +2514,8 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog assert ( "Submit property metric: name=`vm.config.memoryAllocation.limit`, value=`-1.0`, " - "hostname=`vm1`, tags=`['vcenter_server:FAKE', 'vsphere_host:host1', " - "'vsphere_folder:unknown', 'vsphere_type:vm']`, count=`False`" + "hostname=`vm1`, tags=`['vsphere_host:host1', 'vsphere_folder:unknown', " + "'vsphere_type:vm', 'vcenter_server:FAKE']`, count=`False`" ) in caplog.text aggregator.assert_metric( @@ -2526,8 +2526,8 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog assert ( "Could not submit property metric- unexpected metric value: " "name=`vm.config.cpuAllocation.overheadLimit`, value=`None`, hostname=`vm1`, " - "base tags=`['vcenter_server:FAKE', 'vsphere_host:host1', 'vsphere_folder:unknown', " - "'vsphere_type:vm']` additional tags=`{}`" + "base tags=`['vsphere_host:host1', 'vsphere_folder:unknown', " + "'vsphere_type:vm', 'vcenter_server:FAKE']` additional tags=`{}`" ) in caplog.text aggregator.assert_metric( @@ -2538,8 +2538,8 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog assert ( "Could not submit property metric- unexpected metric value: " "name=`vm.config.memoryAllocation.overheadLimit`, value=`None`, hostname=`vm1`, " - "base tags=`['vcenter_server:FAKE', 'vsphere_host:host1', 'vsphere_folder:unknown', " - "'vsphere_type:vm']` additional tags=`{}`" + "base tags=`['vsphere_host:host1', 'vsphere_folder:unknown', " + "'vsphere_type:vm', 'vcenter_server:FAKE']` additional tags=`{}`" ) in caplog.text # VM 3 @@ -2599,8 +2599,8 @@ def test_vm_property_metrics(aggregator, realtime_instance, dd_run_check, caplog ) assert ( "Could not submit property metric- unexpected metric value: name=`vm.summary.config.memorySizeMB`, " - "value=`None`, hostname=`vm3`, base tags=`['vcenter_server:FAKE', 'vsphere_host:host2', " - "'vsphere_folder:unknown', 'vsphere_type:vm']` additional tags=`{}`" + "value=`None`, hostname=`vm3`, base tags=`['vsphere_host:host2', " + "'vsphere_folder:unknown', 'vsphere_type:vm', 'vcenter_server:FAKE']` additional tags=`{}`" ) in caplog.text aggregator.assert_metric( @@ -2733,7 +2733,7 @@ def test_host_property_metrics(aggregator, realtime_instance, dd_run_check, capl assert ( "Could not submit property metric- no metric data: " "name=`host.hardware.cpuPowerManagementInfo.currentPolicy`, value=`None`, " - "hostname=`host2`, base tags=`['vcenter_server:FAKE', 'vsphere_type:host']` " + "hostname=`host2`, base tags=`['vsphere_type:host', 'vcenter_server:FAKE']` " "additional tags=`{}`" ) in caplog.text @@ -3181,6 +3181,41 @@ def test_property_metrics_invalid_ip_route_config_gateway( ) +def test_property_metrics_excluded_host_tags( + aggregator, realtime_instance, dd_run_check, service_instance, vm_properties_ex, datadog_agent +): + realtime_instance['collect_property_metrics'] = True + + service_instance.content.propertyCollector.RetrievePropertiesEx = vm_properties_ex + + realtime_instance['excluded_host_tags'] = ['vsphere_host', 'vsphere_folder'] + check = VSphereCheck('vsphere', {}, [realtime_instance]) + dd_run_check(check) + + aggregator.assert_metric( + 'vsphere.vm.summary.quickStats.uptimeSeconds', + count=1, + value=12184573.0, + tags=['vcenter_server:FAKE', 'vsphere_folder:unknown', 'vsphere_host:host1'], + hostname='vm1', + ) + datadog_agent.assert_external_tags( + 'vm1', + {'vsphere': ['vcenter_server:FAKE', 'vsphere_type:vm']}, + ) + aggregator.assert_metric( + 'vsphere.host.hardware.cpuPowerManagementInfo.currentPolicy', + count=1, + value=1, + tags=['currentPolicy:Balanced', 'vcenter_server:FAKE'], + hostname='host1', + ) + datadog_agent.assert_external_tags( + 'host1', + {'vsphere': ['vcenter_server:FAKE', 'vsphere_type:host']}, + ) + + @pytest.mark.parametrize( ('max_query_metrics', 'metrics_per_query', 'max_historical_metrics', 'expected_batch_num'), [ From 25c7fbea4e3bd01a1112553602c478bd04a4800a Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Thu, 19 Sep 2024 06:42:12 +0200 Subject: [PATCH 23/23] Drop 'six' from some more checks. (#18616) * Drop 'six' from some more checks. Here we focus mostly on the different compatibility crutches in use by integrations * fix import * fix another import --- .../activemq_xml/activemq_xml.py | 3 +- ambari/datadog_checks/ambari/ambari.py | 13 +++--- btrfs/datadog_checks/btrfs/btrfs.py | 4 +- ceph/datadog_checks/ceph/ceph.py | 3 +- .../datadog_checks/cisco_aci/aci_metrics.py | 10 ++--- .../datadog_checks/cisco_aci/capacity.py | 10 ++--- cisco_aci/datadog_checks/cisco_aci/cisco.py | 6 +-- cisco_aci/datadog_checks/cisco_aci/fabric.py | 13 ++---- cisco_aci/datadog_checks/cisco_aci/ndm.py | 18 +++----- cisco_aci/datadog_checks/cisco_aci/tags.py | 6 +-- cisco_aci/datadog_checks/cisco_aci/tenant.py | 6 +-- .../datadog_checks/clickhouse/clickhouse.py | 6 +-- .../cloud_foundry_api/cloud_foundry_api.py | 2 +- .../datadog_checks/cloud_foundry_api/utils.py | 2 +- cockroachdb/tests/legacy/common.py | 4 +- consul/datadog_checks/consul/consul.py | 11 +++-- couch/datadog_checks/couch/couch.py | 29 ++++++------ .../datadog_checks/couchbase/couchbase.py | 5 +-- elastic/datadog_checks/elastic/config.py | 3 +- elastic/datadog_checks/elastic/elastic.py | 27 ++++++----- envoy/datadog_checks/envoy/check.py | 3 +- envoy/datadog_checks/envoy/envoy.py | 2 +- envoy/datadog_checks/envoy/parser.py | 2 - esxi/datadog_checks/esxi/check.py | 5 +-- esxi/datadog_checks/esxi/utils.py | 3 +- etcd/datadog_checks/etcd/etcd.py | 2 +- fluentd/datadog_checks/fluentd/fluentd.py | 3 +- fly_io/datadog_checks/fly_io/check.py | 2 +- .../gitlab_runner/gitlab_runner.py | 2 +- glusterfs/datadog_checks/glusterfs/check.py | 4 +- .../datadog_checks/go_expvar/go_expvar.py | 6 +-- .../hdfs_datanode/hdfs_datanode.py | 8 ++-- .../hdfs_namenode/hdfs_namenode.py | 8 ++-- .../datadog_checks/http_check/http_check.py | 9 +--- .../collectors/channel_metric_collector.py | 6 +-- .../collectors/queue_metric_collector.py | 8 ++-- ibm_mq/datadog_checks/ibm_mq/config.py | 6 +-- ibm_mq/datadog_checks/ibm_mq/ibm_mq.py | 4 +- ibm_was/datadog_checks/ibm_was/ibm_was.py | 5 +-- iis/datadog_checks/iis/iis.py | 10 ++--- kong/datadog_checks/kong/kong.py | 3 +- .../kube_controller_manager.py | 3 +- kube_dns/datadog_checks/kube_dns/kube_dns.py | 3 +- kubelet/datadog_checks/kubelet/cadvisor.py | 5 +-- kubelet/datadog_checks/kubelet/kubelet.py | 13 +++--- kubelet/datadog_checks/kubelet/prometheus.py | 12 +++-- .../kubernetes_state/kubernetes_state.py | 36 +++++++-------- lighttpd/datadog_checks/lighttpd/lighttpd.py | 3 +- .../linux_proc_extras/linux_proc_extras.py | 4 +- mapr/datadog_checks/mapr/mapr.py | 4 +- marathon/datadog_checks/marathon/marathon.py | 5 +-- .../marklogic/parsers/request.py | 4 +- .../marklogic/parsers/status.py | 6 +-- .../marklogic/parsers/storage.py | 6 +-- .../datadog_checks/mesos_slave/mesos_slave.py | 7 ++- .../mongo/collectors/replica.py | 3 +- mongo/datadog_checks/mongo/utils.py | 2 +- network/datadog_checks/network/check_bsd.py | 21 ++++----- network/datadog_checks/network/check_linux.py | 37 +++++++-------- .../datadog_checks/network/check_solaris.py | 9 +--- .../datadog_checks/network/check_windows.py | 7 +-- network/datadog_checks/network/network.py | 29 ++++-------- .../datadog_checks/openstack/openstack.py | 9 ++-- .../openstack_controller/legacy/api.py | 16 +++---- .../legacy/openstack_controller_legacy.py | 21 +++++---- .../datadog_checks/pgbouncer/pgbouncer.py | 2 +- php_fpm/datadog_checks/php_fpm/php_fpm.py | 45 +++++++++---------- postgres/datadog_checks/postgres/config.py | 17 +------ postgres/datadog_checks/postgres/postgres.py | 3 +- process/datadog_checks/process/process.py | 5 +-- rabbitmq/datadog_checks/rabbitmq/rabbitmq.py | 11 +++-- silk/datadog_checks/silk/check.py | 3 +- snmp/datadog_checks/snmp/snmp.py | 4 +- .../datadog_checks/sqlserver/connection.py | 4 +- statsd/datadog_checks/statsd/statsd.py | 3 +- tls/datadog_checks/tls/tls.py | 5 +-- voltdb/datadog_checks/voltdb/check.py | 3 +- voltdb/datadog_checks/voltdb/client.py | 2 +- voltdb/datadog_checks/voltdb/config.py | 3 +- vsphere/datadog_checks/vsphere/api.py | 3 +- vsphere/datadog_checks/vsphere/cache.py | 3 +- .../vsphere/legacy/vsphere_legacy.py | 4 +- vsphere/tests/legacy/test_mor_cache.py | 1 - .../windows_service/windows_service.py | 3 +- 84 files changed, 257 insertions(+), 414 deletions(-) diff --git a/activemq_xml/datadog_checks/activemq_xml/activemq_xml.py b/activemq_xml/datadog_checks/activemq_xml/activemq_xml.py index 6c274675dc844..18a6d2ccdd00b 100644 --- a/activemq_xml/datadog_checks/activemq_xml/activemq_xml.py +++ b/activemq_xml/datadog_checks/activemq_xml/activemq_xml.py @@ -4,7 +4,6 @@ from xml.etree import ElementTree import requests -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.config import _is_affirmative @@ -95,7 +94,7 @@ def _process_data(self, data, el_type, tags, max_elements, detailed_elements): continue el_tags = tags + ["{0}:{1}".format(el_type, name)] - for attr_name, alias in iteritems(TOPIC_QUEUE_METRICS): + for attr_name, alias in TOPIC_QUEUE_METRICS.items(): metric_name = "activemq.{0}.{1}".format(el_type, alias) value = stats.get(attr_name, 0) self.gauge(metric_name, value, tags=el_tags) diff --git a/ambari/datadog_checks/ambari/ambari.py b/ambari/datadog_checks/ambari/ambari.py index e84ca963606c1..902bf894667d1 100644 --- a/ambari/datadog_checks/ambari/ambari.py +++ b/ambari/datadog_checks/ambari/ambari.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from requests.exceptions import ConnectionError, HTTPError, Timeout -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.constants import ServiceCheck @@ -96,7 +95,7 @@ def get_host_metrics(self, clusters): continue metrics = self.flatten_host_metrics(host_metrics) - for metric_name, value in iteritems(metrics): + for metric_name, value in metrics.items(): metric_tags = self.base_tags + [cluster_tag] if isinstance(value, float): self._submit_gauge(metric_name, value, metric_tags, hostname) @@ -111,7 +110,7 @@ def get_service_status_and_metrics(self, clusters, collect_service_metrics, coll for cluster in clusters: tags = self.base_tags + [CLUSTER_TAG_TEMPLATE.format(cluster)] - for service, components in iteritems(self.included_services): + for service, components in self.included_services.items(): service_tags = tags + [SERVICE_TAG + service.lower()] if collect_service_metrics: @@ -144,7 +143,7 @@ def get_component_metrics(self, cluster, service, base_tags, component_included) component_metrics_endpoint = common.create_endpoint(self.base_url, cluster, service, COMPONENT_METRICS_QUERY) components_response = self._make_request(component_metrics_endpoint) - component_included = {k.upper(): v for k, v in iteritems(component_included)} + component_included = {k.upper(): v for k, v in component_included.items()} if components_response is None or 'items' not in components_response: self.log.warning("No components found for service %s.", service) @@ -171,7 +170,7 @@ def get_component_metrics(self, cluster, service, base_tags, component_included) metrics = self.flatten_service_metrics(component_metrics[header], header) component_tag = COMPONENT_TAG + component_name.lower() - for metric_name, value in iteritems(metrics): + for metric_name, value in metrics.items(): metric_tags = base_tags + [component_tag] if isinstance(value, float): self._submit_gauge(metric_name, value, metric_tags) @@ -206,7 +205,7 @@ def _submit_service_checks(self, name, value, tags, message=None): @classmethod def flatten_service_metrics(cls, metric_dict, prefix): flat_metrics = {} - for raw_metric_name, metric_value in iteritems(metric_dict): + for raw_metric_name, metric_value in metric_dict.items(): if isinstance(metric_value, dict): flat_metrics.update(cls.flatten_service_metrics(metric_value, prefix)) else: @@ -217,7 +216,7 @@ def flatten_service_metrics(cls, metric_dict, prefix): @classmethod def flatten_host_metrics(cls, metric_dict, prefix=""): flat_metrics = {} - for raw_metric_name, metric_value in iteritems(metric_dict): + for raw_metric_name, metric_value in metric_dict.items(): metric_name = '{}.{}'.format(prefix, raw_metric_name) if prefix else raw_metric_name if raw_metric_name == "boottime": flat_metrics["boottime"] = metric_value diff --git a/btrfs/datadog_checks/btrfs/btrfs.py b/btrfs/datadog_checks/btrfs/btrfs.py index c469b511dd5bf..3c5d0543ea23d 100644 --- a/btrfs/datadog_checks/btrfs/btrfs.py +++ b/btrfs/datadog_checks/btrfs/btrfs.py @@ -11,8 +11,6 @@ from collections import defaultdict import psutil -from six import iteritems -from six.moves import range from datadog_checks.base import AgentCheck @@ -175,7 +173,7 @@ def check(self, _): if len(btrfs_devices) == 0: raise Exception("No btrfs device found") - for device, mountpoint in iteritems(btrfs_devices): + for device, mountpoint in btrfs_devices.items(): for flags, total_bytes, used_bytes in self.get_usage(mountpoint): replication_type, usage_type = FLAGS_MAPPER[flags] tags = [ diff --git a/ceph/datadog_checks/ceph/ceph.py b/ceph/datadog_checks/ceph/ceph.py index 535bdb9e2ba09..e52d497db48f7 100644 --- a/ceph/datadog_checks/ceph/ceph.py +++ b/ceph/datadog_checks/ceph/ceph.py @@ -7,7 +7,6 @@ import re import simplejson as json -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.config import _is_affirmative @@ -149,7 +148,7 @@ def _extract_metrics(self, raw, tags): # so we won't send the metric osd.pct_used if 'checks' in raw['health_detail']: checks = raw['health_detail']['checks'] - for check_name, check_detail in iteritems(checks): + for check_name, check_detail in checks.items(): if check_name == 'OSD_NEARFULL': health['num_near_full_osds'] = len(check_detail['detail']) if check_name == 'OSD_FULL': diff --git a/cisco_aci/datadog_checks/cisco_aci/aci_metrics.py b/cisco_aci/datadog_checks/cisco_aci/aci_metrics.py index 92ded035e1542..5a254eac9ab1f 100644 --- a/cisco_aci/datadog_checks/cisco_aci/aci_metrics.py +++ b/cisco_aci/datadog_checks/cisco_aci/aci_metrics.py @@ -2,8 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six import iteritems - METRIC_PREFIX = "cisco_aci" FABRIC_PREFIX = METRIC_PREFIX + ".fabric" @@ -106,11 +104,11 @@ def make_tenant_metrics(): tenant_metrics = {"tenant": {}, "application": {}, "endpoint_group": {}} - for cisco_metric, metric_map in iteritems(metrics): + for cisco_metric, metric_map in metrics.items(): tenant_metrics["tenant"][cisco_metric] = {} tenant_metrics["application"][cisco_metric] = {} tenant_metrics["endpoint_group"][cisco_metric] = {} - for sub_metric, dd_metric in iteritems(metric_map): + for sub_metric, dd_metric in metric_map.items(): dd_tenant_metric = dd_metric.format(TENANT_PREFIX) tenant_metrics["tenant"][cisco_metric][sub_metric] = dd_tenant_metric dd_app_metric = dd_metric.format(APPLICATION_PREFIX) @@ -118,10 +116,10 @@ def make_tenant_metrics(): dd_epg_metric = dd_metric.format(ENDPOINT_GROUP_PREFIX) tenant_metrics["endpoint_group"][cisco_metric][sub_metric] = dd_epg_metric - for cisco_metric, metric_map in iteritems(endpoint_metrics): + for cisco_metric, metric_map in endpoint_metrics.items(): if not tenant_metrics.get("endpoint_group", {}).get(cisco_metric): tenant_metrics["endpoint_group"][cisco_metric] = {} - for sub_metric, dd_metric in iteritems(metric_map): + for sub_metric, dd_metric in metric_map.items(): dd_epg_metric = dd_metric.format(TENANT_PREFIX) tenant_metrics["endpoint_group"][cisco_metric][sub_metric] = dd_epg_metric diff --git a/cisco_aci/datadog_checks/cisco_aci/capacity.py b/cisco_aci/datadog_checks/cisco_aci/capacity.py index 1fe537388a4a5..692e6c77816c8 100644 --- a/cisco_aci/datadog_checks/cisco_aci/capacity.py +++ b/cisco_aci/datadog_checks/cisco_aci/capacity.py @@ -2,8 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six import iteritems - from . import aci_metrics, exceptions, helpers @@ -49,7 +47,7 @@ def collect(self): self.log.info("finished collecting capacity data") def _get_eqpt_capacity(self): - for c, metric_dict in iteritems(aci_metrics.EQPT_CAPACITY_METRICS): + for c, metric_dict in aci_metrics.EQPT_CAPACITY_METRICS.items(): data = self.api.get_eqpt_capacity(c) for d in data: dn = d.get('attributes', {}).get('dn') @@ -63,14 +61,14 @@ def _get_eqpt_capacity(self): child_attrs = child.get(c, {}).get('attributes') if not child_attrs or type(child_attrs) is not dict: continue - for cisco_metric, dd_metric in iteritems(metric_dict): + for cisco_metric, dd_metric in metric_dict.items(): value = child_attrs.get(cisco_metric) if not value: continue self.gauge(dd_metric, value, tags=tags, hostname=hostname) def _get_contexts(self): - for c, metric_dict in iteritems(aci_metrics.CAPACITY_CONTEXT_METRICS): + for c, metric_dict in aci_metrics.CAPACITY_CONTEXT_METRICS.items(): dd_metric = metric_dict.get("metric_name") utilized_metric_name = dd_metric + ".utilized" # These Values are, for some reason, hardcoded in the UI @@ -105,7 +103,7 @@ def _get_apic_capacity_limits(self): def _get_apic_capacity_metrics(self): tags = self.user_tags + self.check_tags - for c, opts in iteritems(aci_metrics.APIC_CAPACITY_METRICS): + for c, opts in aci_metrics.APIC_CAPACITY_METRICS.items(): dd_metric = opts.get("metric_name") data = self.api.get_apic_capacity_metrics(c, query=opts.get("query_string")) if c == "fabricNode": diff --git a/cisco_aci/datadog_checks/cisco_aci/cisco.py b/cisco_aci/datadog_checks/cisco_aci/cisco.py index 4cfee34753db7..ee17248e34f11 100644 --- a/cisco_aci/datadog_checks/cisco_aci/cisco.py +++ b/cisco_aci/datadog_checks/cisco_aci/cisco.py @@ -1,8 +1,6 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six import iteritems - from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.config import _is_affirmative from datadog_checks.base.utils.containers import hash_mutable @@ -146,7 +144,7 @@ def submit_metrics(self, metrics, tags, instance=None, obj_type="gauge", hostnam instance = {} user_tags = instance.get('tags', []) - for mname, mval in iteritems(metrics): + for mname, mval in metrics.items(): tags_to_send = [] if mval: if hostname: @@ -162,7 +160,7 @@ def submit_metrics(self, metrics, tags, instance=None, obj_type="gauge", hostnam def get_external_host_tags(self): external_host_tags = [] - for hostname, tags in iteritems(self.external_host_tags): + for hostname, tags in self.external_host_tags.items(): host_tags = tags + self.check_tags external_host_tags.append((hostname, {SOURCE_TYPE: host_tags})) return external_host_tags diff --git a/cisco_aci/datadog_checks/cisco_aci/fabric.py b/cisco_aci/datadog_checks/cisco_aci/fabric.py index c73baf0eb695a..1a99ebbb3425b 100644 --- a/cisco_aci/datadog_checks/cisco_aci/fabric.py +++ b/cisco_aci/datadog_checks/cisco_aci/fabric.py @@ -1,15 +1,10 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) - - -from six import PY3, iteritems +import time from datadog_checks.base.utils.serialization import json -if PY3: - import time - from . import aci_metrics, exceptions, helpers, ndm VENDOR_CISCO = 'cisco' @@ -43,7 +38,7 @@ def __init__(self, check, api, instance, namespace): self.event_platform_event = check.event_platform_event def ndm_enabled(self): - return PY3 and self.send_ndm_metadata + return self.send_ndm_metadata def collect(self): fabric_pods = self.api.get_fabric_pods() @@ -170,10 +165,10 @@ def submit_fabric_metric(self, stats, tags, obj_type, hostname=None): continue metrics = {} - for n, ms in iteritems(aci_metrics.FABRIC_METRICS): + for n, ms in aci_metrics.FABRIC_METRICS.items(): if n not in name: continue - for cisco_metric, dd_metric in iteritems(ms): + for cisco_metric, dd_metric in ms.items(): mname = dd_metric.format(self.get_fabric_type(obj_type)) mval = s.get(name, {}).get("attributes", {}).get(cisco_metric) json_attrs = s.get(name, {}).get("attributes", {}) diff --git a/cisco_aci/datadog_checks/cisco_aci/ndm.py b/cisco_aci/datadog_checks/cisco_aci/ndm.py index 850a1c4ae6173..5681350d6b39f 100644 --- a/cisco_aci/datadog_checks/cisco_aci/ndm.py +++ b/cisco_aci/datadog_checks/cisco_aci/ndm.py @@ -2,17 +2,13 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) - -from six import PY3 - -if PY3: - from datadog_checks.cisco_aci.models import ( - DeviceMetadata, - InterfaceMetadata, - NetworkDevicesMetadata, - Node, - PhysIf, - ) +from datadog_checks.cisco_aci.models import ( + DeviceMetadata, + InterfaceMetadata, + NetworkDevicesMetadata, + Node, + PhysIf, +) VENDOR_CISCO = 'cisco' PAYLOAD_METADATA_BATCH_SIZE = 100 diff --git a/cisco_aci/datadog_checks/cisco_aci/tags.py b/cisco_aci/datadog_checks/cisco_aci/tags.py index 2696c960f23ce..c9b1fab5e59de 100644 --- a/cisco_aci/datadog_checks/cisco_aci/tags.py +++ b/cisco_aci/datadog_checks/cisco_aci/tags.py @@ -4,8 +4,6 @@ import re -from six import iteritems - from datadog_checks.base.utils.containers import hash_mutable from . import exceptions, helpers @@ -103,7 +101,7 @@ def _tenant_mapper(self, edpt): application_meta = [] application_meta_map = self._edpt_tags_map(edpt) - for k, v in iteritems(application_meta_map): + for k, v in application_meta_map.items(): application_meta.append(k + ":" + v) tenant_name = application_meta_map.get("tenant") app_name = application_meta_map.get("application") @@ -112,7 +110,7 @@ def _tenant_mapper(self, edpt): # adding meta tags endpoint_meta = [] endpoint_meta_map = self._get_epg_meta_tags_map(tenant_name, app_name, epg_name) - for k, v in iteritems(endpoint_meta_map): + for k, v in endpoint_meta_map.items(): endpoint_meta.append(k + ":" + v) # adding application tags diff --git a/cisco_aci/datadog_checks/cisco_aci/tenant.py b/cisco_aci/datadog_checks/cisco_aci/tenant.py index f906dd4aa38ea..02b8b7dede03e 100644 --- a/cisco_aci/datadog_checks/cisco_aci/tenant.py +++ b/cisco_aci/datadog_checks/cisco_aci/tenant.py @@ -6,8 +6,6 @@ import re import time -from six import iteritems - from . import exceptions, helpers @@ -112,10 +110,10 @@ def submit_raw_obj(self, raw_stats, tags, obj_type): tenant_metrics = self.tenant_metrics.get(obj_type, {}) - for n, ms in iteritems(tenant_metrics): + for n, ms in tenant_metrics.items(): if n not in name: continue - for cisco_metric, dd_metric in iteritems(ms): + for cisco_metric, dd_metric in ms.items(): mval = s.get(name, {}).get("attributes", {}).get(cisco_metric) json_attrs = s.get(name, {}).get("attributes", {}) if mval and helpers.check_metric_can_be_zero(cisco_metric, mval, json_attrs): diff --git a/clickhouse/datadog_checks/clickhouse/clickhouse.py b/clickhouse/datadog_checks/clickhouse/clickhouse.py index 85e4af4d02266..e640169d9e749 100644 --- a/clickhouse/datadog_checks/clickhouse/clickhouse.py +++ b/clickhouse/datadog_checks/clickhouse/clickhouse.py @@ -2,7 +2,6 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import clickhouse_driver -from six import raise_from from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.base.utils.db import QueryManager @@ -119,10 +118,7 @@ def connect(self): self._error_sanitizer.clean(self._error_sanitizer.scrub(str(e))) ) self.service_check(self.SERVICE_CHECK_CONNECT, self.CRITICAL, message=error, tags=self._tags) - - # When an exception is raised in the context of another one, both will be printed. To avoid - # this we set the context to None. https://www.python.org/dev/peps/pep-0409/ - raise_from(type(e)(error), None) + raise type(e)(error) from None else: self.service_check(self.SERVICE_CHECK_CONNECT, self.OK, tags=self._tags) self._client = client diff --git a/cloud_foundry_api/datadog_checks/cloud_foundry_api/cloud_foundry_api.py b/cloud_foundry_api/datadog_checks/cloud_foundry_api/cloud_foundry_api.py index f7d8cadae870c..219f156a7ac85 100644 --- a/cloud_foundry_api/datadog_checks/cloud_foundry_api/cloud_foundry_api.py +++ b/cloud_foundry_api/datadog_checks/cloud_foundry_api/cloud_foundry_api.py @@ -5,10 +5,10 @@ import json import time from typing import Any, Dict, Generator, Tuple # noqa: F401 +from urllib.parse import urlparse from requests.exceptions import HTTPError, RequestException from semver import VersionInfo -from six.moves.urllib_parse import urlparse from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException, ConfigurationError diff --git a/cloud_foundry_api/datadog_checks/cloud_foundry_api/utils.py b/cloud_foundry_api/datadog_checks/cloud_foundry_api/utils.py index 193bb135baac0..6adc2ae883187 100644 --- a/cloud_foundry_api/datadog_checks/cloud_foundry_api/utils.py +++ b/cloud_foundry_api/datadog_checks/cloud_foundry_api/utils.py @@ -3,9 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from datetime import datetime from typing import Any, Dict # noqa: F401 +from urllib.parse import urljoin from dateutil import parser, tz -from six.moves.urllib_parse import urljoin def get_next_url(payload, version): diff --git a/cockroachdb/tests/legacy/common.py b/cockroachdb/tests/legacy/common.py index 81ad062f6812f..94bda6b0382f3 100644 --- a/cockroachdb/tests/legacy/common.py +++ b/cockroachdb/tests/legacy/common.py @@ -1,14 +1,12 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six import itervalues - from datadog_checks.cockroachdb.metrics import METRIC_MAP from datadog_checks.dev.utils import assert_service_checks def assert_check(aggregator): - for metric in itervalues(METRIC_MAP): + for metric in METRIC_MAP.values(): aggregator.assert_metric('cockroachdb.{}'.format(metric), at_least=0) assert aggregator.metrics_asserted_pct > 80, 'Missing metrics {}'.format(aggregator.not_asserted()) diff --git a/consul/datadog_checks/consul/consul.py b/consul/datadog_checks/consul/consul.py index cccd45fee7812..585197fb62b48 100644 --- a/consul/datadog_checks/consul/consul.py +++ b/consul/datadog_checks/consul/consul.py @@ -8,11 +8,10 @@ from itertools import islice from multiprocessing.pool import ThreadPool from time import time as timestamp +from urllib.parse import urljoin import requests from requests import HTTPError -from six import iteritems, iterkeys, itervalues -from six.moves.urllib.parse import urljoin from datadog_checks.base import ConfigurationError, OpenMetricsBaseCheck, is_affirmative from datadog_checks.base.utils.serialization import json @@ -309,7 +308,7 @@ def _cull_services_list(self, services): ) self.warning(log_line) - services = {s: services[s] for s in list(islice(iterkeys(allowed_services), 0, self.max_services))} + services = {s: services[s] for s in list(islice(allowed_services, 0, self.max_services))} return services @@ -381,7 +380,7 @@ def check(self, _): elif STATUS_SEVERITY[status] > STATUS_SEVERITY[sc[sc_id]['status']]: sc[sc_id]['status'] = status - for s in itervalues(sc): + for s in sc.values(): self.service_check(HEALTH_CHECK, s['status'], tags=main_tags + s['tags']) except Exception as e: @@ -437,7 +436,7 @@ def check(self, _): nodes_with_service[service] if self.thread_pool is None else nodes_with_service[service].get(), ) - for node, service_status in iteritems(nodes_to_service_status): + for node, service_status in nodes_to_service_status.items(): # For every node discovered for included services, gauge the following: # `consul.catalog.services_up` : Total services registered on node # `consul.catalog.services_passing` : Total passing services on node @@ -455,7 +454,7 @@ def check(self, _): tags=main_tags + node_tags, ) - for node_status, count in iteritems(nodes_per_service_tag_counts): + for node_status, count in nodes_per_service_tag_counts.items(): service_tags = [ 'consul_{}_service_tag:{}'.format(node_status.service_name, tag) for tag in node_status.service_tags_set diff --git a/couch/datadog_checks/couch/couch.py b/couch/datadog_checks/couch/couch.py index ad00f4105cbe3..37235cd84b2aa 100644 --- a/couch/datadog_checks/couch/couch.py +++ b/couch/datadog_checks/couch/couch.py @@ -5,10 +5,9 @@ from __future__ import division import math +from urllib.parse import quote, urljoin import requests -from six import iteritems -from six.moves.urllib.parse import quote, urljoin from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException, ConfigurationError @@ -115,14 +114,14 @@ def __init__(self, agent_check): def _create_metric(self, data, tags=None): overall_stats = data.get('stats', {}) - for key, stats in iteritems(overall_stats): - for metric, val in iteritems(stats): + for key, stats in overall_stats.items(): + for metric, val in stats.items(): if val['current'] is not None: metric_name = '.'.join(['couchdb', key, metric]) self.gauge(metric_name, val['current'], tags=tags) - for db_name, db_stats in iteritems(data.get('databases', {})): - for name, val in iteritems(db_stats): + for db_name, db_stats in data.get('databases', {}).items(): + for name, val in db_stats.items(): if name in ['doc_count', 'disk_size'] and val is not None: metric_name = '.'.join(['couchdb', 'by_db', name]) metric_tags = list(tags) @@ -204,10 +203,10 @@ def __init__(self, agent_check): self.instance = agent_check.instance def _build_metrics(self, data, tags, prefix='couchdb'): - for key, value in iteritems(data): + for key, value in data.items(): if "type" in value: if value["type"] == "histogram": - for metric, histo_value in iteritems(value["value"]): + for metric, histo_value in value["value"].items(): if metric == "histogram": continue elif metric == "percentile": @@ -221,7 +220,7 @@ def _build_metrics(self, data, tags, prefix='couchdb'): self._build_metrics(value, tags, "{0}.{1}".format(prefix, key)) def _build_db_metrics(self, data, tags): - for key, value in iteritems(data['sizes']): + for key, value in data['sizes'].items(): self.gauge("couchdb.by_db.{0}_size".format(key), value, tags) for key in ['doc_del_count', 'doc_count']: @@ -233,18 +232,18 @@ def _build_dd_metrics(self, info, tags): ddtags.append("design_document:{0}".format(info['name'])) ddtags.append("language:{0}".format(data['language'])) - for key, value in iteritems(data['sizes']): + for key, value in data['sizes'].items(): self.gauge("couchdb.by_ddoc.{0}_size".format(key), value, ddtags) - for key, value in iteritems(data['updates_pending']): + for key, value in data['updates_pending'].items(): self.gauge("couchdb.by_ddoc.{0}_updates_pending".format(key), value, ddtags) self.gauge("couchdb.by_ddoc.waiting_clients", data['waiting_clients'], ddtags) def _build_system_metrics(self, data, tags, prefix='couchdb.erlang'): - for key, value in iteritems(data): + for key, value in data.items(): if key == "message_queues": - for queue, val in iteritems(value): + for queue, val in value.items(): queue_tags = list(tags) queue_tags.append("queue:{0}".format(queue)) if isinstance(val, dict): @@ -257,7 +256,7 @@ def _build_system_metrics(self, data, tags, prefix='couchdb.erlang'): else: self.gauge("{0}.{1}.size".format(prefix, key), val, queue_tags) elif key == "distribution": - for node, metrics in iteritems(value): + for node, metrics in value.items(): dist_tags = list(tags) dist_tags.append("node:{0}".format(node)) self._build_system_metrics(metrics, dist_tags, "{0}.{1}".format(prefix, key)) @@ -305,7 +304,7 @@ def _build_active_tasks_metrics(self, data, tags, prefix='couchdb.active_tasks') if task.get(metric) is not None: self.gauge("{0}.view_compaction.{1}".format(prefix, metric), task[metric], rtags) - for metric, count in iteritems(counts): + for metric, count in counts.items(): if metric == "database_compaction": metric = "db_compaction" self.gauge("{0}.{1}.count".format(prefix, metric), count, tags) diff --git a/couchbase/datadog_checks/couchbase/couchbase.py b/couchbase/datadog_checks/couchbase/couchbase.py index 97281b8304eb0..b74703d838528 100644 --- a/couchbase/datadog_checks/couchbase/couchbase.py +++ b/couchbase/datadog_checks/couchbase/couchbase.py @@ -7,10 +7,9 @@ import re import time +from urllib.parse import urljoin import requests -from six import string_types -from six.moves.urllib.parse import urljoin from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.couchbase.couchbase_consts import ( @@ -96,7 +95,7 @@ def _create_metrics(self, data): norm_metric_name = self.camel_case_to_joined_lower(metric_name) if norm_metric_name in QUERY_STATS: # for query times, the unit is part of the value, we need to extract it - if isinstance(val, string_types): + if isinstance(val, str): val = self.extract_seconds_value(val) full_metric_name = 'couchbase.query.{}'.format(self.camel_case_to_joined_lower(norm_metric_name)) diff --git a/elastic/datadog_checks/elastic/config.py b/elastic/datadog_checks/elastic/config.py index 1e79c236b4ee6..18b111307e1a9 100644 --- a/elastic/datadog_checks/elastic/config.py +++ b/elastic/datadog_checks/elastic/config.py @@ -2,8 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from collections import namedtuple - -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import ConfigurationError, is_affirmative diff --git a/elastic/datadog_checks/elastic/elastic.py b/elastic/datadog_checks/elastic/elastic.py index aa41cf8eff95f..cb0b1cbd012de 100644 --- a/elastic/datadog_checks/elastic/elastic.py +++ b/elastic/datadog_checks/elastic/elastic.py @@ -6,10 +6,9 @@ from collections import defaultdict, namedtuple from copy import deepcopy from itertools import product +from urllib.parse import urljoin, urlparse import requests -from six import iteritems, itervalues -from six.moves.urllib.parse import urljoin, urlparse from datadog_checks.base import AgentCheck, is_affirmative, to_string @@ -262,13 +261,13 @@ def _get_index_metrics(self, admin_forwarder, version, base_tags): index_data['health_reverse'] = dd_health.reverse_status # Ensure that index_data does not contain None values - for key, value in list(iteritems(index_data)): + for key, value in index_data.items(): if value is None: del index_data[key] self.log.debug("The index %s has no metric data for %s", idx['index'], key) tags = base_tags + ['index_name:' + idx['index']] - for metric, desc in iteritems(index_stats_for_version(version)): + for metric, desc in index_stats_for_version(version).items(): self._process_metric(index_data, metric, *desc, tags=tags) self._get_index_search_stats(admin_forwarder, base_tags) @@ -282,7 +281,7 @@ def _get_template_metrics(self, admin_forwarder, base_tags): filtered_templates = [t for t in template_resp if not t['name'].startswith(TEMPLATE_EXCLUSION_LIST)] - for metric, desc in iteritems(TEMPLATE_METRICS): + for metric, desc in TEMPLATE_METRICS.items(): self._process_metric({'templates': filtered_templates}, metric, *desc, tags=base_tags) def _get_index_search_stats(self, admin_forwarder, base_tags): @@ -294,7 +293,7 @@ def _get_index_search_stats(self, admin_forwarder, base_tags): # The health we can get from /_cluster/health if we pass level=indices query param. Reference: # https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html#cluster-health-api-query-params # noqa: E501 indices = self._get_data(self._join_url('/_stats/search', admin_forwarder))['indices'] - for (idx_name, data), (m_name, path) in product(iteritems(indices), INDEX_SEARCH_STATS): + for (idx_name, data), (m_name, path) in product(indices.items(), INDEX_SEARCH_STATS): tags = base_tags + ['index_name:' + idx_name] self._process_metric(data, m_name, 'gauge', path, tags=tags) @@ -362,7 +361,7 @@ def _process_pending_tasks_data(self, data, base_tags): p_tasks[task.get('priority')] += 1 average_time_in_queue += task.get('time_in_queue_millis', 0) - total = sum(itervalues(p_tasks)) + total = sum(p_tasks.values()) node_data = { 'pending_task_total': total, 'pending_tasks_priority_high': p_tasks['high'], @@ -377,7 +376,7 @@ def _process_pending_tasks_data(self, data, base_tags): self._process_metric(node_data, metric, *desc, tags=base_tags) def _process_stats_data(self, data, stats_metrics, base_tags): - for node_data in itervalues(data.get('nodes', {})): + for node_data in data.get('nodes', {}).values(): metric_hostname = None metrics_tags = list(base_tags) @@ -396,18 +395,18 @@ def _process_stats_data(self, data, stats_metrics, base_tags): metric_hostname = node_data[k] break - for metric, desc in iteritems(stats_metrics): + for metric, desc in stats_metrics.items(): self._process_metric(node_data, metric, *desc, tags=metrics_tags, hostname=metric_hostname) def _process_pshard_stats_data(self, data, pshard_stats_metrics, base_tags): - for metric, desc in iteritems(pshard_stats_metrics): + for metric, desc in pshard_stats_metrics.items(): pshard_tags = base_tags if desc[1].startswith('_all.'): pshard_tags = pshard_tags + ['index_name:_all'] self._process_metric(data, metric, *desc, tags=pshard_tags) # process index-level metrics if self._config.cluster_stats and self._config.detailed_index_stats: - for metric, desc in iteritems(pshard_stats_metrics): + for metric, desc in pshard_stats_metrics.items(): if desc[1].startswith('_all.'): for index in data['indices']: self.log.debug("Processing index %s", index) @@ -449,7 +448,7 @@ def _process_health_data(self, data, version, base_tags, service_check_tags): ): self.event(self._create_event(current_status, tags=base_tags)) - for metric, desc in iteritems(health_stats_for_version(version)): + for metric, desc in health_stats_for_version(version).items(): self._process_metric(data, metric, *desc, tags=base_tags) # Process the service check @@ -473,12 +472,12 @@ def _process_health_data(self, data, version, base_tags, service_check_tags): self.service_check(self.SERVICE_CHECK_CLUSTER_STATUS, dd_health.status, message=msg, tags=service_check_tags) def _process_policy_data(self, data, version, base_tags): - for policy, policy_data in iteritems(data): + for policy, policy_data in data.items(): repo = policy_data.get('policy', {}).get('repository', 'unknown') tags = base_tags + ['policy:{}'.format(policy), 'repository:{}'.format(repo)] slm_stats = slm_stats_for_version(version) - for metric, desc in iteritems(slm_stats): + for metric, desc in slm_stats.items(): self._process_metric(policy_data, metric, *desc, tags=tags) def _process_cat_allocation_data(self, admin_forwarder, version, base_tags): diff --git a/envoy/datadog_checks/envoy/check.py b/envoy/datadog_checks/envoy/check.py index dbc3576ca20b3..c825e5e48c11c 100644 --- a/envoy/datadog_checks/envoy/check.py +++ b/envoy/datadog_checks/envoy/check.py @@ -3,8 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import re from collections import defaultdict - -from six.moves.urllib.parse import urljoin, urlparse, urlunparse +from urllib.parse import urljoin, urlparse, urlunparse from datadog_checks.base import AgentCheck, OpenMetricsBaseCheckV2 diff --git a/envoy/datadog_checks/envoy/envoy.py b/envoy/datadog_checks/envoy/envoy.py index 747432d4a5950..b656fa34cfa2a 100644 --- a/envoy/datadog_checks/envoy/envoy.py +++ b/envoy/datadog_checks/envoy/envoy.py @@ -3,10 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import re from collections import defaultdict +from urllib.parse import urljoin import requests from six import PY2 -from six.moves.urllib.parse import urljoin from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative diff --git a/envoy/datadog_checks/envoy/parser.py b/envoy/datadog_checks/envoy/parser.py index 24e745e6f39ce..aa5be1f6dfd1b 100644 --- a/envoy/datadog_checks/envoy/parser.py +++ b/envoy/datadog_checks/envoy/parser.py @@ -6,8 +6,6 @@ from math import isnan from typing import Any, Dict, List, Tuple # noqa: F401 -from six.moves import range, zip - from .errors import UnknownMetric, UnknownTags from .metrics import LEGACY_TAG_OVERWRITE, METRIC_PREFIX, METRIC_TREE, MOD_METRICS diff --git a/esxi/datadog_checks/esxi/check.py b/esxi/datadog_checks/esxi/check.py index 6876c46976a6c..2a4d162a9bcb6 100644 --- a/esxi/datadog_checks/esxi/check.py +++ b/esxi/datadog_checks/esxi/check.py @@ -11,7 +11,6 @@ import socks from pyVim import connect from pyVmomi import vim, vmodl -from six import iteritems from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.common import to_string @@ -132,7 +131,7 @@ def _validate_excluded_host_tags(self, excluded_host_tags): def _parse_metric_regex_filters(self, all_metric_filters): allowed_resource_types = RESOURCE_TYPE_TO_NAME.values() metric_filters = {} - for resource_type, filters in iteritems(all_metric_filters): + for resource_type, filters in all_metric_filters.items(): if resource_type not in allowed_resource_types: self.log.warning( "Ignoring metric_filter for resource '%s'. It should be one of '%s'", @@ -142,7 +141,7 @@ def _parse_metric_regex_filters(self, all_metric_filters): continue metric_filters[resource_type] = filters - return {k: [re.compile(r) for r in v] for k, v in iteritems(metric_filters)} + return {k: [re.compile(r) for r in v] for k, v in metric_filters.items()} def _parse_resource_filters(self, all_resource_filters): # Keep a list of resource filters ids (tuple of resource, property and type) that are already registered. diff --git a/esxi/datadog_checks/esxi/utils.py b/esxi/datadog_checks/esxi/utils.py index d4540881af139..bc2134e3c62ac 100644 --- a/esxi/datadog_checks/esxi/utils.py +++ b/esxi/datadog_checks/esxi/utils.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from pyVmomi import vim -from six import iteritems from datadog_checks.base import to_string @@ -76,7 +75,7 @@ def get_mapped_instance_tag(metric_name): tag cannot be guessed by looking at the api results and has to be inferred using documentation or experience. This method acts as a utility to map a metric_name to the meaning of its instance tag. """ - for prefix, tag_key in iteritems(METRIC_TO_INSTANCE_TAG_MAPPING): + for prefix, tag_key in METRIC_TO_INSTANCE_TAG_MAPPING.items(): if metric_name.startswith(prefix): return tag_key return 'instance' diff --git a/etcd/datadog_checks/etcd/etcd.py b/etcd/datadog_checks/etcd/etcd.py index e219ff0575f29..a01ff4ea12373 100644 --- a/etcd/datadog_checks/etcd/etcd.py +++ b/etcd/datadog_checks/etcd/etcd.py @@ -1,7 +1,7 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import ConfigurationError, OpenMetricsBaseCheck, is_affirmative diff --git a/fluentd/datadog_checks/fluentd/fluentd.py b/fluentd/datadog_checks/fluentd/fluentd.py index f05e6cc2a116f..246cc2bfbdb84 100644 --- a/fluentd/datadog_checks/fluentd/fluentd.py +++ b/fluentd/datadog_checks/fluentd/fluentd.py @@ -4,8 +4,7 @@ # Licensed under Simplified BSD License (see LICENSE) import re - -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.utils.subprocess_output import get_subprocess_output diff --git a/fly_io/datadog_checks/fly_io/check.py b/fly_io/datadog_checks/fly_io/check.py index 840accdd57a02..7b44f5942e72d 100644 --- a/fly_io/datadog_checks/fly_io/check.py +++ b/fly_io/datadog_checks/fly_io/check.py @@ -2,7 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from six.moves.urllib.parse import quote_plus +from urllib.parse import quote_plus from datadog_checks.base import OpenMetricsBaseCheckV2 diff --git a/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py b/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py index 0e28fbaa39e54..54c625e03cc86 100644 --- a/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py +++ b/gitlab_runner/datadog_checks/gitlab_runner/gitlab_runner.py @@ -3,9 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from copy import deepcopy +from urllib.parse import urlparse import requests -from six.moves.urllib.parse import urlparse from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.base.errors import CheckException diff --git a/glusterfs/datadog_checks/glusterfs/check.py b/glusterfs/datadog_checks/glusterfs/check.py index 71ff1cdcf6acb..64f84d5fe1dd7 100644 --- a/glusterfs/datadog_checks/glusterfs/check.py +++ b/glusterfs/datadog_checks/glusterfs/check.py @@ -12,8 +12,6 @@ import subprocess from typing import Dict, List # noqa: F401 -from six import iteritems - from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.config import is_affirmative @@ -180,7 +178,7 @@ def submit_metrics(self, payload, prefix, metric_mapping, tags): Parse a payload with a given metric_mapping and submit metric for valid values. Some values contain measurements like `GiB` which should be removed and only submitted if consistent """ - for key, metric in iteritems(metric_mapping): + for key, metric in metric_mapping.items(): if key in payload: value = payload[key] diff --git a/go_expvar/datadog_checks/go_expvar/go_expvar.py b/go_expvar/datadog_checks/go_expvar/go_expvar.py index f26658bc38a3f..280d4adbf7783 100644 --- a/go_expvar/datadog_checks/go_expvar/go_expvar.py +++ b/go_expvar/datadog_checks/go_expvar/go_expvar.py @@ -5,9 +5,7 @@ import re from collections import defaultdict - -from six import iteritems -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import AgentCheck @@ -269,7 +267,7 @@ def items(self, object): for new_key, new_content in enumerate(object): yield str(new_key), new_content elif isinstance(object, dict): - for new_key, new_content in iteritems(object): + for new_key, new_content in object.items(): yield str(new_key), new_content else: self.log.warning("Could not parse this object, check the json served by the expvar") diff --git a/hdfs_datanode/datadog_checks/hdfs_datanode/hdfs_datanode.py b/hdfs_datanode/datadog_checks/hdfs_datanode/hdfs_datanode.py index 709b3f73540f3..f5fc7712adccd 100644 --- a/hdfs_datanode/datadog_checks/hdfs_datanode/hdfs_datanode.py +++ b/hdfs_datanode/datadog_checks/hdfs_datanode/hdfs_datanode.py @@ -1,10 +1,10 @@ # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from urllib.parse import urljoin + from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from simplejson import JSONDecodeError -from six import iteritems -from six.moves.urllib.parse import urljoin from datadog_checks.base import AgentCheck @@ -87,7 +87,7 @@ def _hdfs_datanode_metrics(self, beans, tags): self.log.debug("Bean name retrieved: %s", bean_name) - for metric, (metric_name, metric_type) in iteritems(self.HDFS_METRICS): + for metric, (metric_name, metric_type) in self.HDFS_METRICS.items(): metric_value = bean.get(metric) if metric_value is not None: self._set_metric(metric_name, metric_type, metric_value, tags) @@ -110,7 +110,7 @@ def _rest_request_to_json(self, url, object_path, query_params, tags): # Add query_params as arguments if query_params: - query = '&'.join(['{}={}'.format(key, value) for key, value in iteritems(query_params)]) + query = '&'.join(['{}={}'.format(key, value) for key, value in query_params.items()]) url = urljoin(url, '?' + query) self.log.debug('Attempting to connect to "%s"', url) diff --git a/hdfs_namenode/datadog_checks/hdfs_namenode/hdfs_namenode.py b/hdfs_namenode/datadog_checks/hdfs_namenode/hdfs_namenode.py index 9fd30a0c1dcc2..259703128e68c 100644 --- a/hdfs_namenode/datadog_checks/hdfs_namenode/hdfs_namenode.py +++ b/hdfs_namenode/datadog_checks/hdfs_namenode/hdfs_namenode.py @@ -3,10 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from __future__ import division +from urllib.parse import urljoin + from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from simplejson import JSONDecodeError -from six import iteritems -from six.moves.urllib.parse import urljoin from datadog_checks.base import AgentCheck from datadog_checks.base.utils.common import compute_percent @@ -113,7 +113,7 @@ def _hdfs_namenode_metrics(self, beans, metrics, tags): if bean_name != bean_name: raise Exception("Unexpected bean name {}".format(bean_name)) - for metric, (metric_name, metric_type) in iteritems(metrics): + for metric, (metric_name, metric_type) in metrics.items(): metric_value = bean.get(metric) if metric_value is not None: @@ -146,7 +146,7 @@ def _rest_request_to_json(self, url, object_path, query_params, tags=None): # Add query_params as arguments if query_params: - query = '&'.join(['{}={}'.format(key, value) for key, value in iteritems(query_params)]) + query = '&'.join(['{}={}'.format(key, value) for key, value in query_params.items()]) url = urljoin(url, '?' + query) self.log.debug('Attempting to connect to "%s"', url) diff --git a/http_check/datadog_checks/http_check/http_check.py b/http_check/datadog_checks/http_check/http_check.py index 0bc665d290e9e..a3b93d3da6517 100644 --- a/http_check/datadog_checks/http_check/http_check.py +++ b/http_check/datadog_checks/http_check/http_check.py @@ -8,22 +8,17 @@ import socket import time from datetime import datetime +from urllib.parse import urlparse import requests from cryptography import x509 from requests import Response # noqa: F401 -from six import PY2, string_types -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck, ensure_unicode, is_affirmative from .config import DEFAULT_EXPECTED_CODE, from_instance from .utils import get_ca_certs_path -# Apply thread-safety fix, see https://bugs.python.org/issue7980 -if PY2: - import _strptime # noqa - DEFAULT_EXPIRE_DAYS_WARNING = 14 DEFAULT_EXPIRE_DAYS_CRITICAL = 7 DEFAULT_EXPIRE_WARNING = DEFAULT_EXPIRE_DAYS_WARNING * 24 * 3600 @@ -132,7 +127,7 @@ def send_status_down(loginfo, down_msg): persist=True, stream=stream, json=data if method.upper() in DATA_METHODS and isinstance(data, dict) else None, - data=data if method.upper() in DATA_METHODS and isinstance(data, string_types) else None, + data=data if method.upper() in DATA_METHODS and isinstance(data, str) else None, ) except ( socket.timeout, diff --git a/ibm_mq/datadog_checks/ibm_mq/collectors/channel_metric_collector.py b/ibm_mq/datadog_checks/ibm_mq/collectors/channel_metric_collector.py index 61cf572d918ed..18145329f8ef2 100644 --- a/ibm_mq/datadog_checks/ibm_mq/collectors/channel_metric_collector.py +++ b/ibm_mq/datadog_checks/ibm_mq/collectors/channel_metric_collector.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from typing import Callable, Dict, List # noqa: F401 -from six import iteritems - from datadog_checks.base import AgentCheck, to_string from datadog_checks.base.log import CheckLoggingAdapter # noqa: F401 @@ -168,7 +166,7 @@ def _submit_channel_status(self, queue_manager, search_channel_name, tags, chann def _submit_metrics_from_properties(self, channel_info, channel_name, metrics_map, tags): # type: (Dict, str, Dict[str, int], List[str] ) -> None - for metric_name, pymqi_type in iteritems(metrics_map): + for metric_name, pymqi_type in metrics_map.items(): metric_full_name = '{}.channel.{}'.format(metrics.METRIC_PREFIX, metric_name) if pymqi_type not in channel_info: self.log.debug("metric '%s' not found in channel: %s", metric_name, channel_name) @@ -181,7 +179,7 @@ def _submit_channel_count(self, channel_name, channel_status, channel_tags): self.log.warning("Status `%s` not found for channel `%s`", channel_status, channel_name) channel_status = STATUS_MQCHS_UNKNOWN - for status, status_label in iteritems(CHANNEL_STATUS_NAME_MAPPING): + for status, status_label in CHANNEL_STATUS_NAME_MAPPING.items(): status_active = int(status == channel_status) self.gauge( self.CHANNEL_COUNT_CHECK, diff --git a/ibm_mq/datadog_checks/ibm_mq/collectors/queue_metric_collector.py b/ibm_mq/datadog_checks/ibm_mq/collectors/queue_metric_collector.py index 2e7413fe3c68f..89ea372bcc564 100644 --- a/ibm_mq/datadog_checks/ibm_mq/collectors/queue_metric_collector.py +++ b/ibm_mq/datadog_checks/ibm_mq/collectors/queue_metric_collector.py @@ -4,8 +4,6 @@ import logging # noqa: F401 from typing import Any, Callable, Dict, List, Set # noqa: F401 -from six import iteritems - from datadog_checks.base import AgentCheck, to_string from datadog_checks.base.types import ServiceCheck # noqa: F401 from datadog_checks.ibm_mq.metrics import GAUGE @@ -150,7 +148,7 @@ def queue_manager_stats(self, queue_manager, tags): """ Get stats from the queue manager """ - for mname, pymqi_value in iteritems(metrics.queue_manager_metrics()): + for mname, pymqi_value in metrics.queue_manager_metrics().items(): try: m = queue_manager.inquire(pymqi_value) mname = '{}.queue_manager.{}'.format(metrics.METRIC_PREFIX, mname) @@ -198,7 +196,7 @@ def queue_stats(self, queue_manager, queue_name, tags): return enriched_tags def _submit_queue_stats(self, queue_info, queue_name, tags): - for metric_suffix, mq_attr in iteritems(metrics.queue_metrics()): + for metric_suffix, mq_attr in metrics.queue_metrics().items(): metric_name = '{}.queue.{}'.format(metrics.METRIC_PREFIX, metric_suffix) if callable(mq_attr): metric_value = mq_attr(queue_info) @@ -235,7 +233,7 @@ def get_pcf_queue_status_metrics(self, queue_manager, queue_name, tags): else: # Response is a list. It likely has only one member in it. for queue_info in response: - for mname, values in iteritems(metrics.pcf_metrics()): + for mname, values in metrics.pcf_metrics().items(): metric_name = '{}.queue.{}'.format(metrics.METRIC_PREFIX, mname) try: if callable(values): diff --git a/ibm_mq/datadog_checks/ibm_mq/config.py b/ibm_mq/datadog_checks/ibm_mq/config.py index 281ccab9c2dcd..b4a4385ce577a 100644 --- a/ibm_mq/datadog_checks/ibm_mq/config.py +++ b/ibm_mq/datadog_checks/ibm_mq/config.py @@ -6,7 +6,6 @@ import re from dateutil.tz import UTC -from six import PY2, iteritems from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.base.constants import ServiceCheck @@ -161,9 +160,6 @@ def __init__(self, instance, init_config): pattern = instance.get('queue_manager_process', init_config.get('queue_manager_process', '')) if pattern: - if PY2: - raise ConfigurationError('The `queue_manager_process` option is only supported on Agent 7') - pattern = pattern.replace('', re.escape(self.queue_manager_name)) self.queue_manager_process_pattern = re.compile(pattern) @@ -183,7 +179,7 @@ def _compile_tag_re(self): Compile regex strings from queue_tag_re option and return list of compiled regex/tag pairs """ queue_tag_list = [] - for regex_str, tags in iteritems(self._queue_tag_re): + for regex_str, tags in self._queue_tag_re.items(): try: queue_tag_list.append([re.compile(regex_str), [t.strip() for t in tags.split(',')]]) except TypeError: diff --git a/ibm_mq/datadog_checks/ibm_mq/ibm_mq.py b/ibm_mq/datadog_checks/ibm_mq/ibm_mq.py index 68e43342da3b4..9f72f4d110234 100644 --- a/ibm_mq/datadog_checks/ibm_mq/ibm_mq.py +++ b/ibm_mq/datadog_checks/ibm_mq/ibm_mq.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import threading -from six import iteritems - from datadog_checks.base import AgentCheck from datadog_checks.ibm_mq.collectors.stats_collector import StatsCollector from datadog_checks.ibm_mq.metrics import COUNT, GAUGE @@ -121,7 +119,7 @@ def _collect_metadata(self, queue_manager): def send_metrics_from_properties(self, properties, metrics_map, prefix, tags): # type: (Dict, Dict, str, List[str]) -> None - for metric_name, (pymqi_type, metric_type) in iteritems(metrics_map): + for metric_name, (pymqi_type, metric_type) in metrics_map.items(): metric_full_name = '{}.{}'.format(prefix, metric_name) if pymqi_type not in properties: self.log.debug("MQ type `%s` not found in properties for metric `%s` and tags `%s`", metric_name, tags) diff --git a/ibm_was/datadog_checks/ibm_was/ibm_was.py b/ibm_was/datadog_checks/ibm_was/ibm_was.py index a5e47623beeb5..1ec764b3e9e47 100644 --- a/ibm_was/datadog_checks/ibm_was/ibm_was.py +++ b/ibm_was/datadog_checks/ibm_was/ibm_was.py @@ -5,7 +5,6 @@ import requests from lxml import etree -from six import iteritems from datadog_checks.base import AgentCheck, ConfigurationError, ensure_unicode, is_affirmative @@ -63,7 +62,7 @@ def check(self, _): server_tags = ['server:{}'.format(server.get('name'))] server_tags.extend(node_tags) - for category, prefix in iteritems(self.metric_categories): + for category, prefix in self.metric_categories.items(): if self.collect_stats.get(category): self.log.debug("Collecting %s stats", category) stats = self.get_node_from_name(server, category) @@ -151,7 +150,7 @@ def append_custom_queries(self): def setup_configured_stats(self): collect_stats = {} - for category, prefix in iteritems(metrics.METRIC_CATEGORIES): + for category, prefix in metrics.METRIC_CATEGORIES.items(): if is_affirmative(self.instance.get('collect_{}_stats'.format(prefix), True)): collect_stats[category] = True return collect_stats diff --git a/iis/datadog_checks/iis/iis.py b/iis/datadog_checks/iis/iis.py index 195ec391bf41e..97c6fd8295cfc 100644 --- a/iis/datadog_checks/iis/iis.py +++ b/iis/datadog_checks/iis/iis.py @@ -1,8 +1,6 @@ # (C) Datadog, Inc. 2010-present # All rights reserved # Licensed under Simplified BSD License (see LICENSE) -from six import PY3, iteritems - from datadog_checks.base import PDHBaseCheck, is_affirmative from .service_check import app_pool_service_check, site_service_check @@ -49,7 +47,7 @@ class IIS(PDHBaseCheck): APP_POOL = 'app_pool' def __new__(cls, name, init_config, instances): - if PY3 and not is_affirmative(instances[0].get('use_legacy_check_version', False)): + if not is_affirmative(instances[0].get('use_legacy_check_version', False)): from .check import IISCheckV2 return IISCheckV2(name, init_config, instances) @@ -84,7 +82,7 @@ def check(self, _): self.log.debug( "Unknown IIS counter: %s. Falling back to default submission.", counter.english_class_name ) - for instance_name, val in iteritems(counter_values): + for instance_name, val in counter_values.items(): tags = list(self._tags.get(self.instance_hash, [])) if not counter.is_single_instance(): @@ -102,7 +100,7 @@ def collect_sites(self, dd_name, metric_func, counter, counter_values): namespace = self.SITE remaining_sites = self._remaining_data[namespace] - for site_name, value in iteritems(counter_values): + for site_name, value in counter_values.items(): is_single_instance = counter.is_single_instance() if ( not is_single_instance @@ -139,7 +137,7 @@ def collect_app_pools(self, dd_name, metric_func, counter, counter_values): namespace = self.APP_POOL remaining_app_pools = self._remaining_data[namespace] - for app_pool_name, value in iteritems(counter_values): + for app_pool_name, value in counter_values.items(): is_single_instance = counter.is_single_instance() if ( not is_single_instance diff --git a/kong/datadog_checks/kong/kong.py b/kong/datadog_checks/kong/kong.py index 75c2ea5e345b2..ff5eb45768710 100644 --- a/kong/datadog_checks/kong/kong.py +++ b/kong/datadog_checks/kong/kong.py @@ -1,9 +1,10 @@ # (C) Datadog, Inc. 2010-present # All rights reserved # Licensed under Simplified BSD License (see LICENSE) +from urllib.parse import urlparse + import simplejson as json from six import PY2 -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck, ConfigurationError diff --git a/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py b/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py index b8780f79a8398..70f372d78fdfd 100644 --- a/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py +++ b/kube_controller_manager/datadog_checks/kube_controller_manager/kube_controller_manager.py @@ -5,7 +5,6 @@ import re import requests -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.checks.kube_leader import KubeLeaderElectionMixin @@ -169,7 +168,7 @@ def check(self, instance): transformers[limiter + "_rate_limiter_use"] = self.rate_limiter_use queues = self.DEFAULT_QUEUES + instance.get("extra_queues", []) for queue in queues: - for metric, func in iteritems(self.QUEUE_METRICS_TRANSFORMERS): + for metric, func in self.QUEUE_METRICS_TRANSFORMERS.items(): transformers[queue + metric] = func # Support new metrics (introduced in v1.14.0) diff --git a/kube_dns/datadog_checks/kube_dns/kube_dns.py b/kube_dns/datadog_checks/kube_dns/kube_dns.py index 2efb8a8831144..799fd7586c50d 100644 --- a/kube_dns/datadog_checks/kube_dns/kube_dns.py +++ b/kube_dns/datadog_checks/kube_dns/kube_dns.py @@ -6,7 +6,6 @@ from copy import deepcopy import requests -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck @@ -108,7 +107,7 @@ def submit_as_gauge_and_monotonic_count(self, metric_suffix, metric, scraper_con # Explicit shallow copy of the instance tags _tags = list(scraper_config['custom_tags']) - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): _tags.append('{}:{}'.format(label_name, label_value)) # submit raw metric self.gauge(metric_name, sample[self.SAMPLE_VALUE], _tags) diff --git a/kubelet/datadog_checks/kubelet/cadvisor.py b/kubelet/datadog_checks/kubelet/cadvisor.py index 274699b2145cf..18175f5c482c8 100644 --- a/kubelet/datadog_checks/kubelet/cadvisor.py +++ b/kubelet/datadog_checks/kubelet/cadvisor.py @@ -6,10 +6,9 @@ import logging import numbers from fnmatch import fnmatch +from urllib.parse import urlparse import requests -from six import iteritems -from six.moves.urllib.parse import urlparse from datadog_checks.base.utils.tagging import tagger @@ -118,7 +117,7 @@ def _publish_raw_metrics(self, metric, dat, tags, is_pod, depth=0): self.gauge(metric, float(dat), tags) elif isinstance(dat, dict): - for k, v in iteritems(dat): + for k, v in dat.items(): self._publish_raw_metrics(metric + '.%s' % k.lower(), v, tags, is_pod, depth + 1) elif isinstance(dat, list): diff --git a/kubelet/datadog_checks/kubelet/kubelet.py b/kubelet/datadog_checks/kubelet/kubelet.py index ac4d8f9826ef2..b028e067609c3 100644 --- a/kubelet/datadog_checks/kubelet/kubelet.py +++ b/kubelet/datadog_checks/kubelet/kubelet.py @@ -7,11 +7,10 @@ import sys from collections import defaultdict from copy import deepcopy +from urllib.parse import urlparse import requests from kubeutil import get_connection_info -from six import iteritems -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck, OpenMetricsBaseCheck from datadog_checks.base.checks.kubelet_base.base import KubeletBase, KubeletCredentials, urljoin @@ -532,9 +531,9 @@ def _report_pods_running(self, pods, instance_tags): tags += instance_tags hash_tags = tuple(sorted(tags)) pods_tag_counter[hash_tags] += 1 - for tags, count in iteritems(pods_tag_counter): + for tags, count in pods_tag_counter.items(): self.gauge(self.NAMESPACE + '.pods.running', count, list(tags)) - for tags, count in iteritems(containers_tag_counter): + for tags, count in containers_tag_counter.items(): self.gauge(self.NAMESPACE + '.containers.running', count, list(tags)) def _report_container_spec_metrics(self, pod_list, instance_tags): @@ -578,14 +577,14 @@ def _report_container_spec_metrics(self, pod_list, instance_tags): tags += instance_tags try: - for resource, value_str in iteritems(ctr.get('resources', {}).get('requests', {})): + for resource, value_str in ctr.get('resources', {}).get('requests', {}).items(): value = self.parse_quantity(value_str) self.gauge('{}.{}.requests'.format(self.NAMESPACE, resource), value, tags) except (KeyError, AttributeError) as e: self.log.debug("Unable to retrieve container requests for %s: %s", c_name, e) try: - for resource, value_str in iteritems(ctr.get('resources', {}).get('limits', {})): + for resource, value_str in ctr.get('resources', {}).get('limits', {}).items(): value = self.parse_quantity(value_str) self.gauge('{}.{}.limits'.format(self.NAMESPACE, resource), value, tags) except (KeyError, AttributeError) as e: @@ -730,7 +729,7 @@ def append_pod_tags_to_volume_metrics(self, metric, scraper_config, hostname=Non # Determine the tags to send tags = self._metric_tags(metric.name, val, sample, scraper_config, hostname=custom_hostname) pvc_name, kube_ns = None, None - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name == "persistentvolumeclaim": pvc_name = label_value elif label_name == "namespace": diff --git a/kubelet/datadog_checks/kubelet/prometheus.py b/kubelet/datadog_checks/kubelet/prometheus.py index 7c5d53c0e7937..fa2537a598226 100644 --- a/kubelet/datadog_checks/kubelet/prometheus.py +++ b/kubelet/datadog_checks/kubelet/prometheus.py @@ -5,8 +5,6 @@ from copy import deepcopy -from six import iteritems - from datadog_checks.base.checks.kubelet_base.base import urljoin from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.base.utils.tagging import tagger @@ -294,7 +292,7 @@ def _process_container_metric(self, type, metric_name, metric, scraper_config, l return samples = self._sum_values_by_context(metric, self._get_entity_id_if_container_metric) - for c_id, sample in iteritems(samples): + for c_id, sample in samples.items(): pod_uid = self._get_pod_uid(sample[self.SAMPLE_LABELS]) if self.pod_list_utils.is_excluded(c_id, pod_uid): continue @@ -340,7 +338,7 @@ def _process_pod_rate(self, metric_name, metric, scraper_config, labels=None): return samples = self._sum_values_by_context(metric, self._get_pod_uid_if_pod_metric) - for pod_uid, sample in iteritems(samples): + for pod_uid, sample in samples.items(): pod = get_pod_by_uid(pod_uid, self.pod_list) namespace = pod.get('metadata', {}).get('namespace', None) if self.pod_list_utils.is_namespace_excluded(namespace): @@ -372,7 +370,7 @@ def _process_usage_metric(self, m_name, metric, cache, scraper_config, labels=No seen_keys = {k: False for k in cache} samples = self._sum_values_by_context(metric, self._get_entity_id_if_container_metric) - for c_id, sample in iteritems(samples): + for c_id, sample in samples.items(): c_name = get_container_label(sample[self.SAMPLE_LABELS], 'name') if not c_name: continue @@ -407,7 +405,7 @@ def _process_usage_metric(self, m_name, metric, cache, scraper_config, labels=No self.gauge(m_name, val, tags) # purge the cache - for k, seen in iteritems(seen_keys): + for k, seen in seen_keys.items(): if not seen: del cache[k] @@ -418,7 +416,7 @@ def _process_limit_metric(self, m_name, metric, cache, scraper_config, pct_m_nam for each sample in the metric and reports the usage_pct """ samples = self._latest_value_by_context(metric, self._get_entity_id_if_container_metric) - for c_id, sample in iteritems(samples): + for c_id, sample in samples.items(): limit = sample[self.SAMPLE_VALUE] pod_uid = self._get_pod_uid(sample[self.SAMPLE_LABELS]) if self.pod_list_utils.is_excluded(c_id, pod_uid): diff --git a/kubernetes_state/datadog_checks/kubernetes_state/kubernetes_state.py b/kubernetes_state/datadog_checks/kubernetes_state/kubernetes_state.py index 28e4fd78d6a00..f74ff28cf8d77 100644 --- a/kubernetes_state/datadog_checks/kubernetes_state/kubernetes_state.py +++ b/kubernetes_state/datadog_checks/kubernetes_state/kubernetes_state.py @@ -7,8 +7,6 @@ from collections import Counter, defaultdict from copy import deepcopy -from six import iteritems - from datadog_checks.base.checks.openmetrics import OpenMetricsBaseCheck from datadog_checks.base.config import is_affirmative from datadog_checks.base.errors import CheckException @@ -175,18 +173,18 @@ def check(self, instance): self.process(scraper_config, metric_transformers=self.METRIC_TRANSFORMERS) # Logic for Cron Jobs - for job_tags, job in iteritems(self.failed_cron_job_counts): + for job_tags, job in self.failed_cron_job_counts.items(): self.monotonic_count(scraper_config['namespace'] + '.job.failed', job.count, list(job_tags)) job.set_previous_and_reset_current_ts() - for job_tags, job in iteritems(self.succeeded_cron_job_counts): + for job_tags, job in self.succeeded_cron_job_counts.items(): self.monotonic_count(scraper_config['namespace'] + '.job.succeeded', job.count, list(job_tags)) job.set_previous_and_reset_current_ts() # Logic for Jobs - for job_tags, job_count in iteritems(self.job_succeeded_count): + for job_tags, job_count in self.job_succeeded_count.items(): self.monotonic_count(scraper_config['namespace'] + '.job.succeeded', job_count, list(job_tags)) - for job_tags, job_count in iteritems(self.job_failed_count): + for job_tags, job_count in self.job_failed_count.items(): self.monotonic_count(scraper_config['namespace'] + '.job.failed', job_count, list(job_tags)) def _filter_metric(self, metric, scraper_config): @@ -508,7 +506,7 @@ def _condition_to_tag_check(self, sample, base_sc_name, mapping, scraper_config, metric_name = scraper_config['namespace'] + '.node.by_condition' metric_tags = [] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): metric_tags += self._build_tags(label_name, label_value, scraper_config) self.gauge( metric_name, @@ -632,7 +630,7 @@ def kube_pod_status_phase(self, metric, scraper_config): ) status_phase_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE] - for tags, count in iteritems(status_phase_counter): + for tags, count in status_phase_counter.items(): self.gauge(metric_name, count, tags=list(tags)) def _submit_metric_kube_pod_container_status_reason( @@ -650,7 +648,7 @@ def _submit_metric_kube_pod_container_status_reason( else: continue - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name == "reason": continue @@ -687,7 +685,7 @@ def kube_cronjob_next_schedule_time(self, metric, scraper_config): for sample in metric.samples: on_schedule = int(sample[self.SAMPLE_VALUE]) - curr_time tags = [] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): tags += self._build_tags(label_name, label_value, scraper_config) tags += scraper_config['custom_tags'] @@ -703,7 +701,7 @@ def kube_job_complete(self, metric, scraper_config): service_check_name = scraper_config['namespace'] + '.job.complete' for sample in metric.samples: tags = [] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name == 'job' or label_name == 'job_name': tags += self._get_job_tags(label_name, label_value, scraper_config) else: @@ -714,7 +712,7 @@ def kube_job_failed(self, metric, scraper_config): service_check_name = scraper_config['namespace'] + '.job.complete' for sample in metric.samples: tags = [] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name == 'job' or label_name == 'job_name': tags += self._get_job_tags(label_name, label_value, scraper_config) else: @@ -725,7 +723,7 @@ def kube_job_status_failed(self, metric, scraper_config): for sample in metric.samples: job_ts = None tags = [] + scraper_config['custom_tags'] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name == 'job' or label_name == 'job_name': tags += self._get_job_tags(label_name, label_value, scraper_config) job_ts = self._extract_job_timestamp(label_value) @@ -742,7 +740,7 @@ def kube_job_status_succeeded(self, metric, scraper_config): for sample in metric.samples: job_ts = None tags = [] + scraper_config['custom_tags'] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name == 'job' or label_name == 'job_name': tags += self._get_job_tags(label_name, label_value, scraper_config) job_ts = self._extract_job_timestamp(label_value) @@ -780,7 +778,7 @@ def kube_node_status_condition(self, metric, scraper_config): ) by_condition_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE] - for tags, count in iteritems(by_condition_counter): + for tags, count in by_condition_counter.items(): self.gauge(metric_name, count, tags=list(tags)) def kube_node_status_ready(self, metric, scraper_config): @@ -850,7 +848,7 @@ def kube_node_spec_unschedulable(self, metric, scraper_config): if metric.type in METRIC_TYPES: for sample in metric.samples: tags = [] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): tags += self._build_tags(label_name, label_value, scraper_config) tags += scraper_config['custom_tags'] status = statuses[int(sample[self.SAMPLE_VALUE])] # value can be 0 or 1 @@ -920,7 +918,7 @@ def sum_values_by_tags(self, metric, scraper_config): tags = self._tags_for_count(sample, config, scraper_config) object_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE] - for tags, count in iteritems(object_counter): + for tags, count in object_counter.items(): self.gauge(metric_name, count, tags=list(tags)) def count_objects_by_tags(self, metric, scraper_config): @@ -933,7 +931,7 @@ def count_objects_by_tags(self, metric, scraper_config): tags = self._tags_for_count(sample, config, scraper_config) object_counter[tuple(sorted(tags))] += 1 - for tags, count in iteritems(object_counter): + for tags, count in object_counter.items(): self.gauge(metric_name, count, tags=list(tags)) def _tags_for_count(self, sample, count_config, scraper_config): @@ -980,7 +978,7 @@ def _metric_tags(self, metric_name, val, sample, scraper_config, hostname=None): custom_tags = scraper_config['custom_tags'] _tags = list(custom_tags) _tags += scraper_config['_metric_tags'] - for label_name, label_value in iteritems(sample[self.SAMPLE_LABELS]): + for label_name, label_value in sample[self.SAMPLE_LABELS].items(): if label_name not in scraper_config['exclude_labels']: _tags += self._build_tags(label_name, label_value, scraper_config) return self._finalize_tags_to_submit( diff --git a/lighttpd/datadog_checks/lighttpd/lighttpd.py b/lighttpd/datadog_checks/lighttpd/lighttpd.py index b3b05a8ac0cfb..0c9201891f20c 100644 --- a/lighttpd/datadog_checks/lighttpd/lighttpd.py +++ b/lighttpd/datadog_checks/lighttpd/lighttpd.py @@ -3,8 +3,7 @@ # Licensed under Simplified BSD License (see LICENSE) import re - -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import AgentCheck diff --git a/linux_proc_extras/datadog_checks/linux_proc_extras/linux_proc_extras.py b/linux_proc_extras/datadog_checks/linux_proc_extras/linux_proc_extras.py index d44a735bd01cf..18228bd74154f 100644 --- a/linux_proc_extras/datadog_checks/linux_proc_extras/linux_proc_extras.py +++ b/linux_proc_extras/datadog_checks/linux_proc_extras/linux_proc_extras.py @@ -5,8 +5,6 @@ from collections import defaultdict -from six import iteritems - from datadog_checks.base import AgentCheck from datadog_checks.base.utils.subprocess_output import get_subprocess_output @@ -53,7 +51,7 @@ def set_paths(self): "interrupts_info": "interrupts", } - for key, path in iteritems(self.proc_path_map): + for key, path in self.proc_path_map.items(): self.proc_path_map[key] = "{procfs}/{path}".format(procfs=proc_location, path=path) def get_inode_info(self): diff --git a/mapr/datadog_checks/mapr/mapr.py b/mapr/datadog_checks/mapr/mapr.py index 295b4e6364e53..994a17302a65f 100644 --- a/mapr/datadog_checks/mapr/mapr.py +++ b/mapr/datadog_checks/mapr/mapr.py @@ -6,8 +6,6 @@ import os import re -from six import iteritems - from datadog_checks.base import AgentCheck, ensure_unicode, is_affirmative from datadog_checks.base.errors import CheckException @@ -191,7 +189,7 @@ def should_collect_metric(self, metric_name): def submit_metric(self, metric): metric_name = metric['metric'] tags = copy.deepcopy(self.custom_tags) - for k, v in iteritems(metric['tags']): + for k, v in metric['tags'].items(): if k == 'clustername': tags.append("{}:{}".format('mapr_cluster', v)) if not self._disable_legacy_cluster_tag: diff --git a/marathon/datadog_checks/marathon/marathon.py b/marathon/datadog_checks/marathon/marathon.py index 5bacbf863f2c9..c5edf1d3bd393 100644 --- a/marathon/datadog_checks/marathon/marathon.py +++ b/marathon/datadog_checks/marathon/marathon.py @@ -2,10 +2,9 @@ # (C) graemej 2014 # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from urllib.parse import urljoin import requests -from six import iteritems -from six.moves.urllib.parse import urljoin from datadog_checks.base import AgentCheck @@ -217,7 +216,7 @@ def process_queues(self, url, acs_url, tags=None, label_tags=None, group=None): queued.add(queue['app']['id']) - for m_type, sub_metric in iteritems(self.QUEUE_METRICS): + for m_type, sub_metric in self.QUEUE_METRICS.items(): if isinstance(sub_metric, list): for attr, name in sub_metric: try: diff --git a/marklogic/datadog_checks/marklogic/parsers/request.py b/marklogic/datadog_checks/marklogic/parsers/request.py index 8537926a95e77..7c54d3621d5b6 100644 --- a/marklogic/datadog_checks/marklogic/parsers/request.py +++ b/marklogic/datadog_checks/marklogic/parsers/request.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from typing import Any, Dict, Generator, List, Tuple # noqa: F401 -from six import iteritems - from .common import build_metric_to_submit, is_metric @@ -22,7 +20,7 @@ def _parse_request_metrics(data, tags): # type: (Dict[str, Any], List[str]) -> Generator[Tuple, None, None] list_summary = data['request-default-list']['list-summary'] - for key, value in iteritems(list_summary): + for key, value in list_summary.items(): if is_metric(value): metric = build_metric_to_submit("requests.{}".format(key), value, tags) if metric is not None: diff --git a/marklogic/datadog_checks/marklogic/parsers/status.py b/marklogic/datadog_checks/marklogic/parsers/status.py index 50af6190ee4fa..20ca0459773fa 100644 --- a/marklogic/datadog_checks/marklogic/parsers/status.py +++ b/marklogic/datadog_checks/marklogic/parsers/status.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from typing import Any, Dict, Generator, List, Tuple # noqa: F401 -from six import iteritems - from ..constants import RESOURCE_TYPES from .common import build_metric_to_submit, is_metric @@ -26,7 +24,7 @@ def parse_per_resource_status_metrics(resource_type, data, tags): def parse_summary_status_base_metrics(data, tags): # type: (Dict[str, Any], List[str]) -> Generator[Tuple, None, None] relations = data['local-cluster-status']['status-relations'] - for key, resource_data in iteritems(relations): + for key, resource_data in relations.items(): if not key.endswith('-status'): continue resource_type = resource_data['typeref'] @@ -40,7 +38,7 @@ def parse_summary_status_base_metrics(data, tags): def _parse_status_metrics(metric_prefix, metrics, tags): # type: (str, Dict[str, Any], List[str]) -> Generator[Tuple, None, None] - for key, data in iteritems(metrics): + for key, data in metrics.items(): if key in ['rate-properties', 'load-properties']: prop_type = key[: key.index('-properties')] total_key = 'total-' + prop_type diff --git a/marklogic/datadog_checks/marklogic/parsers/storage.py b/marklogic/datadog_checks/marklogic/parsers/storage.py index 2d131e371a305..aa5e1d51b84ba 100644 --- a/marklogic/datadog_checks/marklogic/parsers/storage.py +++ b/marklogic/datadog_checks/marklogic/parsers/storage.py @@ -3,8 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) from typing import Any, Dict, Generator, List, Tuple # noqa: F401 -from six import iteritems - from .common import build_metric_to_submit, is_metric @@ -39,7 +37,7 @@ def _parse_storage_metrics(data, tags, include_location_forest): host_tags.append('marklogic_host_name:{}'.format(hosts_meta[host_id])) for location_data in host_data['locations']['location']: location_tags = host_tags + ['storage_path:{}'.format(location_data['path'])] - for host_key, host_value in iteritems(location_data): + for host_key, host_value in location_data.items(): if host_key == 'location-forests': location_value = host_value['location-forest'] for forest_data in location_value: @@ -48,7 +46,7 @@ def _parse_storage_metrics(data, tags, include_location_forest): "forest_name:{}".format(forest_data['nameref']), ] if include_location_forest: - for forest_key, forest_value in iteritems(forest_data): + for forest_key, forest_value in forest_data.items(): if forest_key == 'disk-size': metric = build_metric_to_submit( "forests.storage.{}".format(forest_key), forest_value, tags=forest_tags diff --git a/mesos_slave/datadog_checks/mesos_slave/mesos_slave.py b/mesos_slave/datadog_checks/mesos_slave/mesos_slave.py index 49f381e45ca84..4b53468797345 100644 --- a/mesos_slave/datadog_checks/mesos_slave/mesos_slave.py +++ b/mesos_slave/datadog_checks/mesos_slave/mesos_slave.py @@ -6,10 +6,9 @@ Collects metrics from mesos slave node. """ +from urllib.parse import urlparse from requests.exceptions import Timeout -from six import iteritems -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck, ConfigurationError @@ -157,7 +156,7 @@ def _process_stats_info(self, url, tags): self.STATS_METRICS, ] for m in metrics: - for key_name, (metric_name, metric_func) in iteritems(m): + for key_name, (metric_name, metric_func) in m.items(): if key_name in stats_metrics: metric_func(self, metric_name, stats_metrics[key_name], tags=stats_tags) self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=stats_tags) @@ -222,5 +221,5 @@ def _process_tasks(self, tasks, state_metrics, tags): task_tags = ['task_name:' + t['name']] task_tags.extend(tags) self.service_check(t['name'] + '.ok', self.TASK_STATUS[t['state']], tags=task_tags) - for key_name, (metric_name, metric_func) in iteritems(self.TASK_METRICS): + for key_name, (metric_name, metric_func) in self.TASK_METRICS.items(): metric_func(self, metric_name, t['resources'][key_name], tags=task_tags) diff --git a/mongo/datadog_checks/mongo/collectors/replica.py b/mongo/datadog_checks/mongo/collectors/replica.py index 95404d3dedfcd..bc0eec37afc89 100644 --- a/mongo/datadog_checks/mongo/collectors/replica.py +++ b/mongo/datadog_checks/mongo/collectors/replica.py @@ -3,8 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import time - -from six.moves.urllib.parse import urlsplit +from urllib.parse import urlsplit from datadog_checks.mongo.api import MongoApi from datadog_checks.mongo.collectors.base import MongoCollector diff --git a/mongo/datadog_checks/mongo/utils.py b/mongo/datadog_checks/mongo/utils.py index 24a84ca025c0f..f2c72e3303a94 100644 --- a/mongo/datadog_checks/mongo/utils.py +++ b/mongo/datadog_checks/mongo/utils.py @@ -2,9 +2,9 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import re +from urllib.parse import quote_plus, unquote_plus, urlencode, urlunparse import pymongo -from six.moves.urllib.parse import quote_plus, unquote_plus, urlencode, urlunparse def build_connection_string(hosts, scheme, username=None, password=None, database=None, options=None): diff --git a/network/datadog_checks/network/check_bsd.py b/network/datadog_checks/network/check_bsd.py index fd76bd9b447c5..545694c05ad4a 100644 --- a/network/datadog_checks/network/check_bsd.py +++ b/network/datadog_checks/network/check_bsd.py @@ -2,17 +2,12 @@ # All rights reserved # Licensed under Simplified BSD License (see LICENSE) -from six import PY3, iteritems - from datadog_checks.base.utils.platform import Platform from datadog_checks.base.utils.subprocess_output import SubprocessOutputEmptyError, get_subprocess_output from . import Network from .const import BSD_TCP_METRICS -if PY3: - long = int - class BSDNetwork(Network): def __init__(self, name, init_config, instances): @@ -80,15 +75,15 @@ def check(self, _): current = iface # Filter inactive interfaces - if self.parse_long(x[-5]) or self.parse_long(x[-2]): + if self.parse_int(x[-5]) or self.parse_int(x[-2]): iface = current metrics = { - 'bytes_rcvd': self.parse_long(x[-5]), - 'bytes_sent': self.parse_long(x[-2]), - 'packets_in.count': self.parse_long(x[-7]), - 'packets_in.error': self.parse_long(x[-6]), - 'packets_out.count': self.parse_long(x[-4]), - 'packets_out.error': self.parse_long(x[-3]), + 'bytes_rcvd': self.parse_int(x[-5]), + 'bytes_sent': self.parse_int(x[-2]), + 'packets_in.count': self.parse_int(x[-7]), + 'packets_in.error': self.parse_int(x[-6]), + 'packets_out.count': self.parse_int(x[-4]), + 'packets_out.error': self.parse_int(x[-3]), } self.submit_devicemetrics(iface, metrics, custom_tags) except SubprocessOutputEmptyError: @@ -139,7 +134,7 @@ def check(self, _): # udp6 0 0 :::41458 :::* metrics = self.parse_cx_state(lines[2:], self.tcp_states['netstat'], 5) - for metric, value in iteritems(metrics): + for metric, value in metrics.items(): self.gauge(metric, value, tags=custom_tags) except SubprocessOutputEmptyError: self.log.exception("Error collecting connection states.") diff --git a/network/datadog_checks/network/check_linux.py b/network/datadog_checks/network/check_linux.py index dc4d1683c213b..986003b763052 100644 --- a/network/datadog_checks/network/check_linux.py +++ b/network/datadog_checks/network/check_linux.py @@ -4,8 +4,6 @@ import os import socket -from six import PY3, iteritems - from datadog_checks.base import is_affirmative from datadog_checks.base.utils.common import pattern_filter from datadog_checks.base.utils.subprocess_output import SubprocessOutputEmptyError, get_subprocess_output @@ -19,9 +17,6 @@ except ImportError: from datadog_checks.base.stubs import datadog_agent -if PY3: - long = int - class LinuxNetwork(Network): def __init__(self, name, init_config, instances): @@ -89,7 +84,7 @@ def check(self, _): self.histogram('system.net.tcp.recv_q', recvq, custom_tags + ["state:" + state]) self.histogram('system.net.tcp.send_q', sendq, custom_tags + ["state:" + state]) - for metric, value in iteritems(metrics): + for metric, value in metrics.items(): self.gauge(metric, value, tags=custom_tags) except OSError as e: @@ -107,7 +102,7 @@ def check(self, _): # udp6 0 0 :::41458 :::* metrics = self.parse_cx_state(lines[2:], self.tcp_states['netstat'], 5) - for metric, value in iteritems(metrics): + for metric, value in metrics.items(): self.gauge(metric, value, tags=custom_tags) if self._collect_cx_queues: @@ -136,17 +131,17 @@ def check(self, _): cols = line.split(':', 1) x = cols[1].split() # Filter inactive interfaces - if self.parse_long(x[0]) or self.parse_long(x[8]): + if self.parse_int(x[0]) or self.parse_int(x[8]): iface = cols[0].strip() metrics = { - 'bytes_rcvd': self.parse_long(x[0]), - 'bytes_sent': self.parse_long(x[8]), - 'packets_in.count': self.parse_long(x[1]), - 'packets_in.drop': self.parse_long(x[3]), - 'packets_in.error': self.parse_long(x[2]) + self.parse_long(x[3]), - 'packets_out.count': self.parse_long(x[9]), - 'packets_out.drop': self.parse_long(x[11]), - 'packets_out.error': self.parse_long(x[10]) + self.parse_long(x[11]), + 'bytes_rcvd': self.parse_int(x[0]), + 'bytes_sent': self.parse_int(x[8]), + 'packets_in.count': self.parse_int(x[1]), + 'packets_in.drop': self.parse_int(x[3]), + 'packets_in.error': self.parse_int(x[2]) + self.parse_int(x[3]), + 'packets_out.count': self.parse_int(x[9]), + 'packets_out.drop': self.parse_int(x[11]), + 'packets_out.error': self.parse_int(x[10]) + self.parse_int(x[11]), } self.submit_devicemetrics(iface, metrics, custom_tags) self._handle_ethtool_stats(iface, custom_tags) @@ -251,14 +246,14 @@ def check(self, _): for met in nstat_metrics_names[k]: if met in netstat_data.get(k, {}): self.submit_netmetric( - nstat_metrics_names[k][met], self.parse_long(netstat_data[k][met]), tags=custom_tags + nstat_metrics_names[k][met], self.parse_int(netstat_data[k][met]), tags=custom_tags ) for k in nstat_metrics_gauge_names: for met in nstat_metrics_gauge_names[k]: if met in netstat_data.get(k, {}): self._submit_netmetric_gauge( - nstat_metrics_gauge_names[k][met], self.parse_long(netstat_data[k][met]), tags=custom_tags + nstat_metrics_gauge_names[k][met], self.parse_int(netstat_data[k][met]), tags=custom_tags ) # Get the conntrack -S information @@ -460,7 +455,7 @@ def _submit_ena_metrics(self, iface, vals_by_metric, tags): assert m in allowed count = 0 - for metric, val in iteritems(vals_by_metric): + for metric, val in vals_by_metric.items(): self.log.debug("Submitting system.net.%s", metric) self.gauge('system.net.%s' % metric, val, tags=metric_tags) count += 1 @@ -480,9 +475,9 @@ def _submit_ethtool_metrics(self, iface, ethtool_metrics, base_tags): base_tags_with_device.append('device:{}'.format(iface)) count = 0 - for ethtool_tag, metric_map in iteritems(ethtool_metrics): + for ethtool_tag, metric_map in ethtool_metrics.items(): tags = base_tags_with_device + [ethtool_tag] - for metric, val in iteritems(metric_map): + for metric, val in metric_map.items(): self.log.debug("Submitting system.net.%s", metric) self.monotonic_count('system.net.%s' % metric, val, tags=tags) count += 1 diff --git a/network/datadog_checks/network/check_solaris.py b/network/datadog_checks/network/check_solaris.py index 62be4697b88ce..da5868ec0f4e8 100644 --- a/network/datadog_checks/network/check_solaris.py +++ b/network/datadog_checks/network/check_solaris.py @@ -2,16 +2,11 @@ # All rights reserved # Licensed under Simplified BSD License (see LICENSE) -from six import PY3, iteritems - from datadog_checks.base.utils.subprocess_output import SubprocessOutputEmptyError, get_subprocess_output from . import Network from .const import SOLARIS_TCP_METRICS -if PY3: - long = int - class SolarisNetwork(Network): def __init__(self, name, init_config, instances): @@ -25,7 +20,7 @@ def check(self, instance): try: netstat, _, _ = get_subprocess_output(["kstat", "-p", "link:0:"], self.log) metrics_by_interface = self._parse_solaris_netstat(netstat) - for interface, metrics in iteritems(metrics_by_interface): + for interface, metrics in metrics_by_interface.items(): self.submit_devicemetrics(interface, metrics, custom_tags) except SubprocessOutputEmptyError: self.log.exception("Error collecting kstat stats.") @@ -136,7 +131,7 @@ def _parse_solaris_netstat(self, netstat_output): # Add it to this interface's list of metrics. metrics = metrics_by_interface.get(iface, {}) - metrics[ddname] = self.parse_long(cols[1]) + metrics[ddname] = self.parse_int(cols[1]) metrics_by_interface[iface] = metrics return metrics_by_interface diff --git a/network/datadog_checks/network/check_windows.py b/network/datadog_checks/network/check_windows.py index 0115a4588fb3a..fa56342dc77b7 100644 --- a/network/datadog_checks/network/check_windows.py +++ b/network/datadog_checks/network/check_windows.py @@ -7,13 +7,10 @@ from ctypes.wintypes import DWORD import psutil -from six import PY3, iteritems from . import Network Iphlpapi = windll.Iphlpapi -if PY3: - long = int class TCPSTATS(Structure): @@ -82,7 +79,7 @@ def _cx_state_psutil(self, tags=None): else: metrics[metric] += 1 - for metric, value in iteritems(metrics): + for metric, value in metrics.items(): self.gauge(metric, value, tags=tags) def _cx_counters_psutil(self, tags=None): @@ -90,7 +87,7 @@ def _cx_counters_psutil(self, tags=None): Collect metrics about interfaces counters using psutil """ tags = [] if tags is None else tags - for iface, counters in iteritems(psutil.net_io_counters(pernic=True)): + for iface, counters in psutil.net_io_counters(pernic=True).items(): metrics = { 'bytes_rcvd': counters.bytes_recv, 'bytes_sent': counters.bytes_sent, diff --git a/network/datadog_checks/network/network.py b/network/datadog_checks/network/network.py index ef20c46657bc1..b9f6d5cf97cac 100644 --- a/network/datadog_checks/network/network.py +++ b/network/datadog_checks/network/network.py @@ -7,35 +7,24 @@ """ import re +import shutil import socket import psutil -from six import PY3, iteritems, itervalues from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.errors import CheckException from datadog_checks.base.utils.platform import Platform +# fcntl only available on Unix systems try: import fcntl except ImportError: fcntl = None -if PY3: - long = int - -# Use a different find_executable implementation depending on Python version, -# because we want to avoid depending on distutils. -if PY3: - import shutil - - def find_executable(name): - return shutil.which(name) - -else: - # Fallback to distutils for Python 2 as shutil.which was added on Python 3.3 - from distutils.spawn import find_executable +def find_executable(name): + return shutil.which(name) class Network(AgentCheck): @@ -257,7 +246,7 @@ def submit_devicemetrics(self, iface, vals_by_metric, tags): ) count = 0 - for metric, val in iteritems(vals_by_metric): + for metric, val in vals_by_metric.items(): self.rate('system.net.%s' % metric, val, tags=metric_tags) count += 1 self.log.debug("tracked %s network metrics for interface %s", count, iface) @@ -273,9 +262,9 @@ def get_expected_metrics(self): ] return expected_metrics - def parse_long(self, v): + def parse_int(self, v): try: - return long(v) + return int(v) except ValueError: return 0 @@ -291,7 +280,7 @@ def submit_regexed_values(self, output, regex_list, tags): for regex, metric in regex_list: value = re.match(regex, line) if value: - self.submit_netmetric(metric, self.parse_long(value.group(1)), tags=tags) + self.submit_netmetric(metric, self.parse_int(value.group(1)), tags=tags) def is_collect_cx_state_runnable(self, proc_location): """ @@ -325,7 +314,7 @@ def get_net_proc_base_location(proc_location): return net_proc_base_location def _get_metrics(self): - return {val: 0 for val in itervalues(self.cx_state_gauge)} + return {val: 0 for val in self.cx_state_gauge.values()} def parse_cx_state(self, lines, tcp_states, state_col, protocol=None, ip_version=None): """ diff --git a/openstack/datadog_checks/openstack/openstack.py b/openstack/datadog_checks/openstack/openstack.py index c8a3bfae19c44..e4f141197b98f 100644 --- a/openstack/datadog_checks/openstack/openstack.py +++ b/openstack/datadog_checks/openstack/openstack.py @@ -8,11 +8,10 @@ import re import time from datetime import datetime, timedelta +from urllib.parse import urljoin import requests import simplejson as json -from six import iteritems -from six.moves.urllib.parse import urljoin from datadog_checks.base import AgentCheck, is_affirmative @@ -852,7 +851,7 @@ def get_stats_for_single_hypervisor(self, hyp_id, instance, host_tags=None, cust else: self.service_check(self.HYPERVISOR_SC, AgentCheck.OK, tags=service_check_tags) - for label, val in iteritems(hyp): + for label, val in hyp.items(): if label in NOVA_HYPERVISOR_METRICS: metric_label = "openstack.nova.{0}".format(label) self.gauge(metric_label, val, tags=tags) @@ -1196,7 +1195,7 @@ def check(self, instance): # The scopes we iterate over should all be OpenStackProjectScope # instances projects = [] - for _, scope in iteritems(scope_map): + for _, scope in scope_map.items(): # Store the scope on the object so we don't have to keep passing it around self._current_scope = scope @@ -1408,7 +1407,7 @@ def get_external_host_tags(self): """ self.log.debug("Collecting external_host_tags now") external_host_tags = [] - for k, v in iteritems(self.external_host_tags): + for k, v in self.external_host_tags.items(): external_host_tags.append((k, {SOURCE_TYPE: v})) self.log.debug("Sending external_host_tags: %s", external_host_tags) diff --git a/openstack_controller/datadog_checks/openstack_controller/legacy/api.py b/openstack_controller/datadog_checks/openstack_controller/legacy/api.py index ce758ccb9c0eb..6d115ef6e0163 100644 --- a/openstack_controller/datadog_checks/openstack_controller/legacy/api.py +++ b/openstack_controller/datadog_checks/openstack_controller/legacy/api.py @@ -4,11 +4,10 @@ import copy from os import environ +from urllib.parse import urljoin import requests from openstack import connection -from six import PY3 -from six.moves.urllib.parse import urljoin from .exceptions import ( AuthenticationNeeded, @@ -215,15 +214,10 @@ def get_os_hypervisors_detail(self): return self.connection.list_hypervisors() def get_os_hypervisor_uptime(self, hypervisor): - if PY3: - if hypervisor.uptime is None: - self._check_authentication() - self.connection.compute.get_hypervisor_uptime(hypervisor) - return hypervisor.uptime - else: - # Hypervisor uptime is not available in openstacksdk 0.24.0. - self.logger.warning("Hypervisor uptime is not available with this version of openstacksdk") - raise NotImplementedError() + if hypervisor.uptime is None: + self._check_authentication() + self.connection.compute.get_hypervisor_uptime(hypervisor) + return hypervisor.uptime def get_os_aggregates(self): # Each aggregate is missing the 'uuid' attribute compared to what is returned by SimpleApi diff --git a/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py b/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py index 2f567008fe4aa..0be77873e84dc 100644 --- a/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py +++ b/openstack_controller/datadog_checks/openstack_controller/legacy/openstack_controller_legacy.py @@ -10,7 +10,6 @@ import requests from openstack.config.loader import OpenStackConfig -from six import iteritems, itervalues from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.common import pattern_filter @@ -213,7 +212,7 @@ def collect_hypervisors_metrics( """ # Create a dictionary with hypervisor hostname as key and the list of project names as value hyp_project_names = defaultdict(set) - for server in itervalues(servers): + for server in servers.values(): hypervisor_hostname = server.get('hypervisor_hostname') if not hypervisor_hostname: self.log.debug( @@ -276,7 +275,7 @@ def get_stats_for_single_hypervisor( if not collect_hypervisor_metrics: return - for label, val in iteritems(hyp): + for label, val in hyp.items(): if label in NOVA_HYPERVISOR_METRICS: metric_label = "openstack.nova.{}".format(label) self.gauge(metric_label, val, tags=tags) @@ -365,7 +364,7 @@ def populate_servers_cache(self, projects, exclude_server_id_rules): # "tenant_id can also be requested which is alias of project_id but that is not # recommended to use as that will be removed in future." tenant_to_name = {} - for name, p in iteritems(projects): + for name, p in projects.items(): tenant_to_name[p.get('id')] = name cached_servers = self.servers_cache.get('servers') @@ -379,7 +378,7 @@ def populate_servers_cache(self, projects, exclude_server_id_rules): # Filter out excluded servers servers = {} - for updated_server_id, updated_server in iteritems(updated_servers): + for updated_server_id, updated_server in updated_servers.items(): if not any(re.match(rule, updated_server_id) for rule in exclude_server_id_rules): servers[updated_server_id] = updated_server @@ -717,7 +716,7 @@ def check(self, instance): projects = self.get_projects(include_project_name_rules, exclude_project_name_rules) if collect_project_metrics: - for project in itervalues(projects): + for project in projects.values(): self.collect_project_limit(project, custom_tags) servers = self.populate_servers_cache(projects, exclude_server_id_rules) @@ -733,16 +732,16 @@ def check(self, instance): if collect_server_diagnostic_metrics or collect_server_flavor_metrics: if collect_server_diagnostic_metrics: self.log.debug("Fetching stats from %s server(s)", len(servers)) - for server in itervalues(servers): + for server in servers.values(): self.collect_server_diagnostic_metrics(server, tags=custom_tags, use_shortname=use_shortname) if collect_server_flavor_metrics: - if len(servers) >= 1 and 'flavor_id' in next(itervalues(servers)): + if len(servers) >= 1 and 'flavor_id' in next(iter(servers.values())): self.log.debug("Fetching server flavors") # If flavors are not part of servers detail (new in version 2.47) then we need to fetch them flavors = self.get_flavors() else: flavors = None - for server in itervalues(servers): + for server in servers.values(): self.collect_server_flavor_metrics( server, flavors, tags=custom_tags, use_shortname=use_shortname ) @@ -790,7 +789,7 @@ def get_external_host_tags(self): """ self.log.debug("Collecting external_host_tags") external_host_tags = [] - for k, v in iteritems(self.external_host_tags): + for k, v in self.external_host_tags.items(): external_host_tags.append((k, {SOURCE_TYPE: v})) self.log.debug("Sending external_host_tags: %s", external_host_tags) @@ -831,7 +830,7 @@ def get_projects(self, include_project_name_rules, exclude_project_name_rules): filtered_project_names = pattern_filter( list(project_by_name), whitelist=include_project_name_rules, blacklist=exclude_project_name_rules ) - result = {name: v for (name, v) in iteritems(project_by_name) if name in filtered_project_names} + result = {name: v for (name, v) in project_by_name.items() if name in filtered_project_names} return result # Neutron Proxy Methods diff --git a/pgbouncer/datadog_checks/pgbouncer/pgbouncer.py b/pgbouncer/datadog_checks/pgbouncer/pgbouncer.py index 8a95e0d5f07c0..8401d06a81a2c 100644 --- a/pgbouncer/datadog_checks/pgbouncer/pgbouncer.py +++ b/pgbouncer/datadog_checks/pgbouncer/pgbouncer.py @@ -3,10 +3,10 @@ # Licensed under Simplified BSD License (see LICENSE) import re import time +from urllib.parse import urlparse import psycopg2 as pg from psycopg2 import extras as pgextras -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.pgbouncer.metrics import ( diff --git a/php_fpm/datadog_checks/php_fpm/php_fpm.py b/php_fpm/datadog_checks/php_fpm/php_fpm.py index 5aab48ccab25c..44096e33a6eb3 100644 --- a/php_fpm/datadog_checks/php_fpm/php_fpm.py +++ b/php_fpm/datadog_checks/php_fpm/php_fpm.py @@ -5,36 +5,31 @@ import random import socket import time - -from six import PY3, StringIO, iteritems, string_types -from six.moves.urllib.parse import urlparse +from io import StringIO +from urllib.parse import urlparse from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.time import get_precise_time -if PY3: - # Flup package does not exist anymore so what's needed is vendored - # flup.client.fcgi_app.FCGIApp flup-py3 version 1.0.3 - from .vendor.fcgi_app import FCGIApp - - def get_connection(self): - if self._connect is not None: - if isinstance(self._connect, string_types): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.connect(self._connect) - elif hasattr(socket, 'create_connection'): - sock = socket.create_connection(self._connect) - else: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect(self._connect) - return sock +# Flup package does not exist anymore so what's needed is vendored +# flup.client.fcgi_app.FCGIApp flup-py3 version 1.0.3 +from .vendor.fcgi_app import FCGIApp - FCGIApp._getConnection = get_connection -else: - # flup version 1.0.3.dev-20110405 - from .vendor.fcgi_app_py2 import FCGIApp + +def get_connection(self): + if self._connect is not None: + if isinstance(self._connect, str): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.connect(self._connect) + elif hasattr(socket, 'create_connection'): + sock = socket.create_connection(self._connect) + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.connect(self._connect) + return sock +FCGIApp._getConnection = get_connection # Relax param filtering FCGIApp._environPrefixes.extend(('DOCUMENT_', 'SCRIPT_')) DEFAULT_TIMEOUT = 10 @@ -142,13 +137,13 @@ def _process_status(self, status_url, tags, http_host, use_fastcgi): self.gauge(self.STATUS_DURATION_NAME, check_duration, tags=metric_tags) - for key, mname in iteritems(self.GAUGES): + for key, mname in self.GAUGES.items(): if key not in data: self.log.warning("Gauge metric %s is missing from FPM status", key) continue self.gauge(mname, int(data[key]), tags=metric_tags) - for key, mname in iteritems(self.MONOTONIC_COUNTS): + for key, mname in self.MONOTONIC_COUNTS.items(): if key not in data: self.log.warning("Counter metric %s is missing from FPM status", key) continue diff --git a/postgres/datadog_checks/postgres/config.py b/postgres/datadog_checks/postgres/config.py index e1af379df5b1e..77a9849fcb123 100644 --- a/postgres/datadog_checks/postgres/config.py +++ b/postgres/datadog_checks/postgres/config.py @@ -4,8 +4,6 @@ # https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS from typing import Optional -from six import PY2, PY3, iteritems - from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.base.utils.aws import rds_parse_tags_from_endpoint from datadog_checks.base.utils.db.utils import get_agent_host_tags @@ -51,7 +49,7 @@ def __init__(self, instance, init_config, check): ) self.application_name = instance.get('application_name', 'datadog-agent') - if not self.isascii(self.application_name): + if not self.application_name.isascii(): raise ConfigurationError("Application name can include only ASCII characters: %s", self.application_name) self.query_timeout = int(instance.get('query_timeout', 5000)) @@ -220,7 +218,7 @@ def _get_custom_metrics(custom_metrics): m['query'] = m['query'] % '{metrics_columns}' try: - for ref, (_, mtype) in iteritems(m['metrics']): + for ref, (_, mtype) in m['metrics'].items(): cap_mtype = mtype.upper() if cap_mtype not in ('RATE', 'GAUGE', 'MONOTONIC'): raise ConfigurationError( @@ -233,17 +231,6 @@ def _get_custom_metrics(custom_metrics): raise Exception('Error processing custom metric `{}`: {}'.format(m, e)) return custom_metrics - @staticmethod - def isascii(application_name): - if PY3: - return application_name.isascii() - elif PY2: - try: - application_name.encode('ascii') - return True - except UnicodeEncodeError: - return False - @staticmethod def _aws_managed_authentication(aws, password): if 'managed_authentication' not in aws: diff --git a/postgres/datadog_checks/postgres/postgres.py b/postgres/datadog_checks/postgres/postgres.py index 76d7e8bb2101e..03d77018b93ee 100644 --- a/postgres/datadog_checks/postgres/postgres.py +++ b/postgres/datadog_checks/postgres/postgres.py @@ -9,7 +9,6 @@ import psycopg2 from cachetools import TTLCache -from six import iteritems from datadog_checks.base import AgentCheck from datadog_checks.base.utils.db import QueryExecutor @@ -595,7 +594,7 @@ def _query_scope(self, cursor, scope, instance_tags, is_custom_metrics, dbname=N tags = copy.copy(instance_tags) # Add tags from descriptors. - tags += [("%s:%s" % (k, v)) for (k, v) in iteritems(desc_map)] + tags += [("%s:%s" % (k, v)) for (k, v) in desc_map.items()] # Submit metrics to the Agent. for column, value in zip(cols, column_values): diff --git a/process/datadog_checks/process/process.py b/process/datadog_checks/process/process.py index 467f40b5e9708..8df86cd8119b8 100644 --- a/process/datadog_checks/process/process.py +++ b/process/datadog_checks/process/process.py @@ -10,7 +10,6 @@ from collections import defaultdict import psutil -from six import iteritems from datadog_checks.base import AgentCheck, is_affirmative from datadog_checks.base.utils.platform import Platform @@ -464,7 +463,7 @@ def check(self, _): self.last_pid_cache_ts[self.name] = 0 self.process_list_cache.reset() - for attr, mname in iteritems(ATTR_TO_METRIC): + for attr, mname in ATTR_TO_METRIC.items(): vals = [x for x in proc_state[attr] if x is not None] # skip [] if vals: @@ -480,7 +479,7 @@ def check(self, _): if mname in ['ioread_bytes', 'iowrite_bytes']: self.monotonic_count('system.processes.{}_count'.format(mname), sum_vals, tags=tags) - for attr, mname in iteritems(ATTR_TO_METRIC_RATE): + for attr, mname in ATTR_TO_METRIC_RATE.items(): vals = [x for x in proc_state[attr] if x is not None] if vals: self.rate('system.processes.{}'.format(mname), sum(vals), tags=tags) diff --git a/rabbitmq/datadog_checks/rabbitmq/rabbitmq.py b/rabbitmq/datadog_checks/rabbitmq/rabbitmq.py index 20ed0e56517d9..f52d2d9c8d62f 100644 --- a/rabbitmq/datadog_checks/rabbitmq/rabbitmq.py +++ b/rabbitmq/datadog_checks/rabbitmq/rabbitmq.py @@ -5,10 +5,9 @@ import re import time from collections import defaultdict +from urllib.parse import quote_plus, urljoin, urlparse from requests.exceptions import RequestException -from six import iteritems -from six.moves.urllib.parse import quote_plus, urljoin, urlparse from datadog_checks.base import AgentCheck, is_affirmative, to_native_string @@ -88,8 +87,8 @@ def _get_config(self, instance): NODE_TYPE: {'explicit': instance.get('nodes', []), 'regexes': instance.get('nodes_regexes', [])}, } - for object_type, filters in iteritems(specified): - for _, filter_objects in iteritems(filters): + for object_type, filters in specified.items(): + for _, filter_objects in filters.items(): if type(filter_objects) != list: raise TypeError("{0} / {0}_regexes parameter must be a list".format(object_type)) @@ -499,10 +498,10 @@ def get_connections_stat(self, instance, base_url, object_type, vhosts, limit_vh # 'state' does not exist for direct type connections. connection_states[conn.get('state', 'direct')] += 1 - for vhost, nb_conn in iteritems(stats): + for vhost, nb_conn in stats.items(): self.gauge('rabbitmq.connections', nb_conn, tags=['{}_vhost:{}'.format(TAG_PREFIX, vhost)] + custom_tags) - for conn_state, nb_conn in iteritems(connection_states): + for conn_state, nb_conn in connection_states.items(): self.gauge( 'rabbitmq.connections.state', nb_conn, diff --git a/silk/datadog_checks/silk/check.py b/silk/datadog_checks/silk/check.py index 75e2bd68986d1..ba54f7203cc87 100644 --- a/silk/datadog_checks/silk/check.py +++ b/silk/datadog_checks/silk/check.py @@ -2,8 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from copy import deepcopy - -from six.moves.urllib.parse import urljoin, urlparse +from urllib.parse import urljoin, urlparse from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.utils.time import get_timestamp diff --git a/snmp/datadog_checks/snmp/snmp.py b/snmp/datadog_checks/snmp/snmp.py index 40dd1e2f8417b..99dd29279da8a 100644 --- a/snmp/datadog_checks/snmp/snmp.py +++ b/snmp/datadog_checks/snmp/snmp.py @@ -14,8 +14,6 @@ from concurrent import futures from typing import Any, DefaultDict, Dict, List, Optional, Pattern, Tuple # noqa: F401 -from six import iteritems - from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative from datadog_checks.base.errors import CheckException from datadog_checks.snmp.utils import extract_value @@ -515,7 +513,7 @@ def report_metrics( self.log.debug('Ignoring metric %s', name) continue if isinstance(metric, ParsedTableMetric): - for index, val in iteritems(results[name]): + for index, val in results[name].items(): metric_tags = tags + self.get_index_tags(index, results, metric.index_tags, metric.column_tags) self.submit_metric( name, val, metric.forced_type, metric_tags, metric.options, metric.extract_value_pattern diff --git a/sqlserver/datadog_checks/sqlserver/connection.py b/sqlserver/datadog_checks/sqlserver/connection.py index 32af256ab57e0..525b852a626b2 100644 --- a/sqlserver/datadog_checks/sqlserver/connection.py +++ b/sqlserver/datadog_checks/sqlserver/connection.py @@ -5,8 +5,6 @@ import socket from contextlib import closing, contextmanager -from six import raise_from - from datadog_checks.base import AgentCheck, ConfigurationError from datadog_checks.base.log import get_check_logger from datadog_checks.sqlserver.cursor import CommenterCursorWrapper @@ -316,7 +314,7 @@ def open_db_connections(self, db_key, db_name=None, is_default=True, key_prefix= if is_default: # the message that is raised here (along with the exception stack trace) # is what will be seen in the agent status output. - raise_from(SQLConnectionError(check_err_message), None) + raise SQLConnectionError(check_err_message) from None else: # if not the default db, we should still log this exception # to give the customer an opportunity to fix the issue diff --git a/statsd/datadog_checks/statsd/statsd.py b/statsd/datadog_checks/statsd/statsd.py index a8bf7530252b3..9a9a4eddc5f1b 100644 --- a/statsd/datadog_checks/statsd/statsd.py +++ b/statsd/datadog_checks/statsd/statsd.py @@ -4,8 +4,7 @@ import re import socket - -from six import BytesIO +from io import BytesIO from datadog_checks.base import AgentCheck, ensure_bytes, ensure_unicode diff --git a/tls/datadog_checks/tls/tls.py b/tls/datadog_checks/tls/tls.py index 872f0ffb073fd..0f167ff6b4950 100644 --- a/tls/datadog_checks/tls/tls.py +++ b/tls/datadog_checks/tls/tls.py @@ -4,10 +4,9 @@ import socket import ssl from datetime import datetime +from urllib.parse import urlparse import service_identity -from six import text_type -from six.moves.urllib.parse import urlparse from datadog_checks.base import AgentCheck, is_affirmative @@ -149,7 +148,7 @@ def validate_certificate(self, cert): validator, host_type = self.validation_data try: - validator(cert, text_type(self._server_hostname)) + validator(cert, str(self._server_hostname)) except service_identity.VerificationError: message = 'The {} on the certificate does not match the given host'.format(host_type) self.log.debug(message) diff --git a/voltdb/datadog_checks/voltdb/check.py b/voltdb/datadog_checks/voltdb/check.py index 1452a9dc0ddc2..c868d06967763 100644 --- a/voltdb/datadog_checks/voltdb/check.py +++ b/voltdb/datadog_checks/voltdb/check.py @@ -4,7 +4,6 @@ from typing import Any, List, Optional, cast # noqa: F401 import requests # noqa: F401 -from six import raise_from from datadog_checks.base import AgentCheck from datadog_checks.base.utils.db import QueryManager @@ -52,7 +51,7 @@ def _raise_for_status_with_details(self, response): pass else: message += ' (details: {})'.format(details) - raise_from(Exception(message), exc) + raise Exception(message) from exc def _fetch_version(self): # type: () -> Optional[str] diff --git a/voltdb/datadog_checks/voltdb/client.py b/voltdb/datadog_checks/voltdb/client.py index cc276ed602cc8..29efb4a241a04 100644 --- a/voltdb/datadog_checks/voltdb/client.py +++ b/voltdb/datadog_checks/voltdb/client.py @@ -3,9 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import json from typing import Callable, Union # noqa: F401 +from urllib.parse import urljoin import requests -from six.moves.urllib.parse import urljoin class Client(object): diff --git a/voltdb/datadog_checks/voltdb/config.py b/voltdb/datadog_checks/voltdb/config.py index d827d2866073a..483ef1408d296 100644 --- a/voltdb/datadog_checks/voltdb/config.py +++ b/voltdb/datadog_checks/voltdb/config.py @@ -2,8 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) from typing import Callable, List, Optional # noqa: F401 - -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse from datadog_checks.base import ConfigurationError, is_affirmative diff --git a/vsphere/datadog_checks/vsphere/api.py b/vsphere/datadog_checks/vsphere/api.py index d11995233b89f..4d5e0de2dcda1 100644 --- a/vsphere/datadog_checks/vsphere/api.py +++ b/vsphere/datadog_checks/vsphere/api.py @@ -8,7 +8,6 @@ from pyVim import connect from pyVmomi import vim, vmodl -from six import itervalues from datadog_checks.base.log import CheckLoggingAdapter # noqa: F401 from datadog_checks.vsphere.config import VSphereConfig # noqa: F401 @@ -286,7 +285,7 @@ def get_infrastructure(self): # at this point they are custom pyvmomi objects and the attribute keys are not resolved. attribute_keys = {x.key: x.name for x in self._fetch_all_attributes()} - for props in itervalues(infrastructure_data): + for props in infrastructure_data.values(): mor_attributes = [] if self.config.collect_property_metrics: all_properties = {} diff --git a/vsphere/datadog_checks/vsphere/cache.py b/vsphere/datadog_checks/vsphere/cache.py index 1e2e6ec2741a7..687b8825b5248 100644 --- a/vsphere/datadog_checks/vsphere/cache.py +++ b/vsphere/datadog_checks/vsphere/cache.py @@ -6,7 +6,6 @@ from typing import Any, Dict, Generator, Iterator, List, Type # noqa: F401 from pyVmomi import vim # noqa: F401 -from six import iterkeys from datadog_checks.vsphere.types import CounterId, MetricName, ResourceTags # noqa: F401 @@ -140,7 +139,7 @@ def get_mor_props(self, mor, default=None): def get_mors(self, resource_type): # type: (Type[vim.ManagedEntity]) -> Iterator[vim.ManagedEntity] - return iterkeys(self._mors.get(resource_type, {})) + return iter(self._mors.get(resource_type, {}).keys()) def set_mor_props(self, mor, mor_data): # type: (vim.ManagedEntity, Dict[str, Any]) -> None diff --git a/vsphere/datadog_checks/vsphere/legacy/vsphere_legacy.py b/vsphere/datadog_checks/vsphere/legacy/vsphere_legacy.py index 8eca20328eeb9..158a87723ad01 100644 --- a/vsphere/datadog_checks/vsphere/legacy/vsphere_legacy.py +++ b/vsphere/datadog_checks/vsphere/legacy/vsphere_legacy.py @@ -17,8 +17,6 @@ vim, # pylint: disable=E0611 vmodl, # pylint: disable=E0611 ) -from six import itervalues -from six.moves import range from datadog_checks.base import AgentCheck, ensure_unicode, to_string from datadog_checks.base.checks.libs.thread_pool import SENTINEL, Pool @@ -951,7 +949,7 @@ def collect_metrics(self, instance): batch_size = self.batch_morlist_size or n_mors for batch in mors_batch_method(i_key, batch_size, max_historical_metrics): query_specs = [] - for mor in itervalues(batch): + for mor in batch.values(): if mor['mor_type'] == 'vm': vm_count += 1 if mor['mor_type'] not in REALTIME_RESOURCES and ('metrics' not in mor or not mor['metrics']): diff --git a/vsphere/tests/legacy/test_mor_cache.py b/vsphere/tests/legacy/test_mor_cache.py index fb1c12c61b40d..76da1c3ca8dfd 100644 --- a/vsphere/tests/legacy/test_mor_cache.py +++ b/vsphere/tests/legacy/test_mor_cache.py @@ -5,7 +5,6 @@ import pytest from mock import MagicMock -from six.moves import range from datadog_checks.vsphere.legacy.mor_cache import MorCache, MorNotFoundError diff --git a/windows_service/datadog_checks/windows_service/windows_service.py b/windows_service/datadog_checks/windows_service/windows_service.py index fc387f761022b..3e2ed672fb1e2 100644 --- a/windows_service/datadog_checks/windows_service/windows_service.py +++ b/windows_service/datadog_checks/windows_service/windows_service.py @@ -7,7 +7,6 @@ import pywintypes import win32service import winerror -from six import raise_from from datadog_checks.base import AgentCheck @@ -42,7 +41,7 @@ def _init_patterns(self): pattern = self.name self._name_re = re.compile(pattern, SERVICE_PATTERN_FLAGS) except re.error as e: - raise_from(Exception("Regular expression syntax error in '{}': {}".format(pattern, str(e))), None) + raise Exception("Regular expression syntax error in '{}': {}".format(pattern, str(e))) from None def match(self, service_view): if self.name is not None: