Skip to content
This repository is currently being migrated. It's locked while the migration is in progress.

Prefer builtins over some typing aliases #1001

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 8 additions & 10 deletions baseplate/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@
from types import TracebackType
from typing import Any
from typing import Callable
from typing import Dict
from typing import Iterator
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Tuple
Expand Down Expand Up @@ -182,7 +180,7 @@ class RequestContext:

def __init__(
self,
context_config: Dict[str, Any],
context_config: dict[str, Any],
prefix: Optional[str] = None,
span: Optional["Span"] = None,
wrapped: Optional["RequestContext"] = None,
Expand Down Expand Up @@ -279,9 +277,9 @@ def __init__(self, app_config: Optional[config.RawConfig] = None) -> None:
...

"""
self.observers: List[BaseplateObserver] = []
self.observers: list[BaseplateObserver] = []
self._metrics_client: Optional[metrics.Client] = None
self._context_config: Dict[str, Any] = {}
self._context_config: dict[str, Any] = {}
self._app_config = app_config or {}

self.service_name = self._app_config.get("baseplate.service_name")
Expand Down Expand Up @@ -377,7 +375,7 @@ def configure_observers(self) -> None:
"The following observers are unconfigured and won't run: %s", ", ".join(skipped)
)

def configure_context(self, context_spec: Dict[str, Any]) -> None:
def configure_context(self, context_spec: dict[str, Any]) -> None:
"""Add a number of objects to each request's context object.

Configure and attach multiple clients to the
Expand Down Expand Up @@ -509,8 +507,8 @@ def server_context(self, name: str) -> Iterator[RequestContext]:
with self.make_server_span(context, name):
yield context

def get_runtime_metric_reporters(self) -> Dict[str, Callable[[Any], None]]:
specs: List[Tuple[Optional[str], Dict[str, Any]]] = [(None, self._context_config)]
def get_runtime_metric_reporters(self) -> dict[str, Callable[[Any], None]]:
specs: list[tuple[Optional[str], dict[str, Any]]] = [(None, self._context_config)]
result = {}
while specs:
prefix, spec = specs.pop(0)
Expand Down Expand Up @@ -550,7 +548,7 @@ def __init__(
self.context = context
self.baseplate = baseplate
self.component_name: Optional[str] = None
self.observers: List[SpanObserver] = []
self.observers: list[SpanObserver] = []

def register(self, observer: SpanObserver) -> None:
"""Register an observer to receive events from this span."""
Expand Down Expand Up @@ -655,7 +653,7 @@ def make_child(
"""Return a child Span whose parent is this Span."""
raise NotImplementedError

def with_tags(self, tags: Dict[str, Any]) -> "Span":
def with_tags(self, tags: dict[str, Any]) -> "Span":
"""Declare a set of tags to be added to a span before starting it in the context manager.

Can be used as follow:
Expand Down
11 changes: 4 additions & 7 deletions baseplate/clients/cassandra.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,10 @@
from threading import Event
from typing import Any
from typing import Callable
from typing import Dict
from typing import List
from typing import Mapping
from typing import NamedTuple
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union

Expand Down Expand Up @@ -70,7 +67,7 @@ def cluster_from_config(
app_config: config.RawConfig,
secrets: Optional[SecretsStore] = None,
prefix: str = "cassandra.",
execution_profiles: Optional[Dict[str, ExecutionProfile]] = None,
execution_profiles: Optional[dict[str, ExecutionProfile]] = None,
**kwargs: Any,
) -> Cluster:
"""Make a Cluster from a configuration dictionary.
Expand Down Expand Up @@ -171,7 +168,7 @@ def __init__(
prometheus_cluster_name: Optional[str] = None,
):
self.session = session
self.prepared_statements: Dict[str, PreparedStatement] = {}
self.prepared_statements: dict[str, PreparedStatement] = {}
self.prometheus_client_name = prometheus_client_name
self.prometheus_cluster_name = prometheus_cluster_name

Expand Down Expand Up @@ -318,7 +315,7 @@ def _on_execute_failed(exc: BaseException, args: CassandraCallbackArgs, event: E
event.set()


RowFactory = Callable[[List[str], List[Tuple]], Any]
RowFactory = Callable[[list[str], list[tuple]], Any]
Query = Union[str, SimpleStatement, PreparedStatement, BoundStatement]
Parameters = Union[Sequence[Any], Mapping[str, Any]]

Expand All @@ -329,7 +326,7 @@ def __init__(
context_name: str,
server_span: Span,
session: Session,
prepared_statements: Dict[str, PreparedStatement],
prepared_statements: dict[str, PreparedStatement],
prometheus_client_name: Optional[str] = None,
prometheus_cluster_name: Optional[str] = None,
):
Expand Down
17 changes: 7 additions & 10 deletions baseplate/clients/memcache/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
from time import perf_counter
from typing import Any
from typing import Callable
from typing import Dict
from typing import Iterable
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Union

from prometheus_client import Counter
Expand All @@ -21,7 +18,7 @@
from baseplate.lib.prometheus_metrics import default_latency_buckets


Serializer = Callable[[str, Any], Tuple[bytes, int]]
Serializer = Callable[[str, Any], tuple[bytes, int]]
Deserializer = Callable[[str, bytes, int], Any]


Expand Down Expand Up @@ -254,8 +251,8 @@ def set(self, key: Key, value: Any, expire: int = 0, noreply: Optional[bool] = N

@_prom_instrument
def set_many(
self, values: Dict[Key, Any], expire: int = 0, noreply: Optional[bool] = None
) -> List[str]:
self, values: dict[Key, Any], expire: int = 0, noreply: Optional[bool] = None
) -> list[str]:
with self._make_span("set_many") as span:
span.set_tag("key_count", len(values))
span.set_tag("keys", make_keys_str(values.keys()))
Expand Down Expand Up @@ -312,7 +309,7 @@ def get(self, key: Key, default: Any = None) -> Any:
return self.pooled_client.get(key, **kwargs)

@_prom_instrument
def get_many(self, keys: Sequence[Key]) -> Dict[Key, Any]:
def get_many(self, keys: Sequence[Key]) -> dict[Key, Any]:
with self._make_span("get_many") as span:
span.set_tag("key_count", len(keys))
span.set_tag("keys", make_keys_str(keys))
Expand All @@ -321,13 +318,13 @@ def get_many(self, keys: Sequence[Key]) -> Dict[Key, Any]:
@_prom_instrument
def gets(
self, key: Key, default: Optional[Any] = None, cas_default: Optional[Any] = None
) -> Tuple[Any, Any]:
) -> tuple[Any, Any]:
with self._make_span("gets") as span:
span.set_tag("key", key)
return self.pooled_client.gets(key, default=default, cas_default=cas_default)

@_prom_instrument
def gets_many(self, keys: Sequence[Key]) -> Dict[Key, Tuple[Any, Any]]:
def gets_many(self, keys: Sequence[Key]) -> dict[Key, tuple[Any, Any]]:
with self._make_span("gets_many") as span:
span.set_tag("key_count", len(keys))
span.set_tag("keys", make_keys_str(keys))
Expand Down Expand Up @@ -379,7 +376,7 @@ def touch(self, key: Key, expire: int = 0, noreply: Optional[bool] = None) -> bo
return self.pooled_client.touch(key, expire=expire, noreply=noreply)

@_prom_instrument
def stats(self, *args: str) -> Dict[str, Any]:
def stats(self, *args: str) -> dict[str, Any]:
with self._make_span("stats"):
return self.pooled_client.stats(*args)

Expand Down
3 changes: 1 addition & 2 deletions baseplate/clients/redis.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from math import ceil
from time import perf_counter
from typing import Any
from typing import Dict
from typing import Optional

import redis
Expand Down Expand Up @@ -296,7 +295,7 @@ def __init__(
trace_name: str,
server_span: Span,
connection_pool: redis.ConnectionPool,
response_callbacks: Dict,
response_callbacks: dict,
redis_client_name: str = "",
**kwargs: Any,
):
Expand Down
14 changes: 6 additions & 8 deletions baseplate/clients/redis_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
from datetime import timedelta
from time import perf_counter
from typing import Any
from typing import Dict
from typing import List
from typing import Optional

import rediscluster
Expand Down Expand Up @@ -155,16 +153,16 @@ def should_track_key_reads(self) -> bool:
def should_track_key_writes(self) -> bool:
return randomizer.random() < self.track_writes_sample_rate

def increment_keys_read_counter(self, key_list: List[str], ignore_errors: bool = True) -> None:
def increment_keys_read_counter(self, key_list: list[str], ignore_errors: bool = True) -> None:
self._increment_hot_key_counter(key_list, self.reads_sorted_set_name, ignore_errors)

def increment_keys_written_counter(
self, key_list: List[str], ignore_errors: bool = True
self, key_list: list[str], ignore_errors: bool = True
) -> None:
self._increment_hot_key_counter(key_list, self.writes_sorted_set_name, ignore_errors)

def _increment_hot_key_counter(
self, key_list: List[str], set_name: str, ignore_errors: bool = True
self, key_list: list[str], set_name: str, ignore_errors: bool = True
) -> None:
if len(key_list) == 0:
return
Expand All @@ -183,7 +181,7 @@ def _increment_hot_key_counter(
if not ignore_errors:
raise

def maybe_track_key_usage(self, args: List[str]) -> None:
def maybe_track_key_usage(self, args: list[str]) -> None:
"""Probabilistically track usage of the keys in this command.

If we have enabled key usage tracing *and* this command is withing the
Expand Down Expand Up @@ -216,7 +214,7 @@ def maybe_track_key_usage(self, args: List[str]) -> None:
# the desired behaviour.
class ClusterWithReadReplicasBlockingConnectionPool(rediscluster.ClusterBlockingConnectionPool):
# pylint: disable=arguments-differ
def get_node_by_slot(self, slot: int, read_command: bool = False) -> Dict[str, Any]:
def get_node_by_slot(self, slot: int, read_command: bool = False) -> dict[str, Any]:
"""Get a node from the slot.

If the command is a read command we'll try to return a random node.
Expand Down Expand Up @@ -506,7 +504,7 @@ def __init__(
trace_name: str,
server_span: Span,
connection_pool: rediscluster.ClusterConnectionPool,
response_callbacks: Dict,
response_callbacks: dict,
hot_key_tracker: Optional[HotKeyTracker],
redis_client_name: str = "",
**kwargs: Any,
Expand Down
6 changes: 2 additions & 4 deletions baseplate/clients/sqlalchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,8 @@

from time import perf_counter
from typing import Any
from typing import Dict
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Union

from prometheus_client import Counter
Expand Down Expand Up @@ -136,7 +134,7 @@ def parse(
return SQLAlchemySessionContextFactory(engine, key_path)


Parameters = Optional[Union[Dict[str, Any], Sequence[Any]]]
Parameters = Optional[Union[dict[str, Any], Sequence[Any]]]


SAFE_TRACE_ID = re.compile("^[A-Za-z0-9_-]+$")
Expand Down Expand Up @@ -248,7 +246,7 @@ def on_before_execute(
parameters: Parameters,
context: Optional[ExecutionContext],
executemany: bool,
) -> Tuple[str, Parameters]:
) -> tuple[str, Parameters]:
"""Handle the engine's before_cursor_execute event."""
labels = {
"sql_client_name": self.name,
Expand Down
3 changes: 1 addition & 2 deletions baseplate/frameworks/pyramid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

from typing import Any
from typing import Callable
from typing import Dict
from typing import Iterable
from typing import Iterator
from typing import Mapping
Expand Down Expand Up @@ -323,7 +322,7 @@ class RequestFactory:
def __init__(self, baseplate: Baseplate):
self.baseplate = baseplate

def __call__(self, environ: Dict[str, str]) -> BaseplateRequest:
def __call__(self, environ: dict[str, str]) -> BaseplateRequest:
return BaseplateRequest(environ, context_config=self.baseplate._context_config)

def blank(self, path: str) -> BaseplateRequest:
Expand Down
18 changes: 8 additions & 10 deletions baseplate/frameworks/queue_consumer/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@

from typing import Any
from typing import Callable
from typing import Dict
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Sequence
Expand Down Expand Up @@ -267,7 +265,7 @@ def new(
kafka_consume_batch_size: int = 1,
message_unpack_fn: KafkaMessageDeserializer = json.loads,
health_check_fn: Optional[HealthcheckCallback] = None,
kafka_config: Optional[Dict[str, Any]] = None,
kafka_config: Optional[dict[str, Any]] = None,
prometheus_client_name: str = "",
) -> Self:
"""Return a new `_BaseKafkaQueueConsumerFactory`.
Expand Down Expand Up @@ -314,7 +312,7 @@ def new(
)

@classmethod
def _consumer_config(cls) -> Dict[str, Any]:
def _consumer_config(cls) -> dict[str, Any]:
raise NotImplementedError

@classmethod
Expand All @@ -323,7 +321,7 @@ def make_kafka_consumer(
bootstrap_servers: str,
group_id: str,
topics: Sequence[str],
kafka_config: Optional[Dict[str, Any]] = None,
kafka_config: Optional[dict[str, Any]] = None,
) -> confluent_kafka.Consumer:
consumer_config = {
"bootstrap.servers": bootstrap_servers,
Expand Down Expand Up @@ -358,14 +356,14 @@ def make_kafka_consumer(

# pylint: disable=unused-argument
def log_assign(
consumer: confluent_kafka.Consumer, partitions: List[confluent_kafka.TopicPartition]
consumer: confluent_kafka.Consumer, partitions: list[confluent_kafka.TopicPartition]
) -> None:
for topic_partition in partitions:
logger.info("assigned %s/%s", topic_partition.topic, topic_partition.partition)

# pylint: disable=unused-argument
def log_revoke(
consumer: confluent_kafka.Consumer, partitions: List[confluent_kafka.TopicPartition]
consumer: confluent_kafka.Consumer, partitions: list[confluent_kafka.TopicPartition]
) -> None:
for topic_partition in partitions:
logger.info("revoked %s/%s", topic_partition.topic, topic_partition.partition)
Expand Down Expand Up @@ -441,7 +439,7 @@ class InOrderConsumerFactory(_BaseKafkaQueueConsumerFactory):
message_handler_count = 0

@classmethod
def _consumer_config(cls) -> Dict[str, Any]:
def _consumer_config(cls) -> dict[str, Any]:
return {
# The consumer sends periodic heartbeats on a separate thread to
# indicate its liveness to the broker. If no heartbeats are received by
Expand Down Expand Up @@ -543,7 +541,7 @@ class FastConsumerFactory(_BaseKafkaQueueConsumerFactory):
# pylint: disable=unused-argument
@staticmethod
def _commit_callback(
err: confluent_kafka.KafkaError, topic_partition_list: List[confluent_kafka.TopicPartition]
err: confluent_kafka.KafkaError, topic_partition_list: list[confluent_kafka.TopicPartition]
) -> None:
# called after automatic commits
for topic_partition in topic_partition_list:
Expand All @@ -565,7 +563,7 @@ def _commit_callback(
)

@classmethod
def _consumer_config(cls) -> Dict[str, Any]:
def _consumer_config(cls) -> dict[str, Any]:
return {
# The consumer sends periodic heartbeats on a separate thread to
# indicate its liveness to the broker. If no heartbeats are received by
Expand Down
Loading