Skip to content

Commit

Permalink
chore: pyugrade 3.10 plus
Browse files Browse the repository at this point in the history
  • Loading branch information
jjaakola-aiven committed Dec 10, 2024
1 parent 3d16148 commit 8aead4c
Show file tree
Hide file tree
Showing 45 changed files with 179 additions and 189 deletions.
3 changes: 1 addition & 2 deletions src/karapace/anonymize_schemas/anonymize_avro.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from typing import Any, Union
from typing_extensions import TypeAlias
from typing import Any, TypeAlias, Union

import hashlib
import re
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/avro_dataclasses/introspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""

from __future__ import annotations
from types import UnionType

from .schema import AvroType, EnumType, FieldSchema, MapType, RecordSchema
from collections.abc import Mapping, Sequence
Expand Down Expand Up @@ -90,7 +91,7 @@ def _field_type(field: Field, type_: object) -> AvroType: # pylint: disable=too
origin = get_origin(type_)

# Handle union types.
if origin is Union:
if origin is UnionType:
return [_field_type(field, unit) for unit in get_args(type_)] # type: ignore[misc]

# Handle array types.
Expand Down
6 changes: 3 additions & 3 deletions src/karapace/avro_dataclasses/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
from __future__ import annotations

from .introspect import record_schema
from collections.abc import Iterable, Mapping
from collections.abc import Callable, Iterable, Mapping
from dataclasses import asdict, fields, is_dataclass
from enum import Enum
from functools import lru_cache, partial
from typing import Callable, cast, IO, TYPE_CHECKING, TypeVar, Union
from typing_extensions import get_args, get_origin, Self
from typing import cast, get_args, get_origin, IO, TYPE_CHECKING, TypeVar, Union
from typing_extensions import Self

import avro
import avro.io
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/avro_dataclasses/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from __future__ import annotations

from collections.abc import Mapping
from typing import Literal
from typing_extensions import NotRequired, TypeAlias, TypedDict
from typing import Literal, TypeAlias
from typing_extensions import NotRequired, TypedDict

Primitive: TypeAlias = Literal["int", "long", "string", "null", "bytes", "boolean"]
LogicalType: TypeAlias = Literal["timestamp-millis", "uuid"]
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/backup/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from .poll_timeout import PollTimeout
from .topic_configurations import ConfigSource, get_topic_configurations
from aiokafka.errors import KafkaError, TopicAlreadyExistsError
from collections.abc import Iterator, Mapping, Sized
from collections.abc import Callable, Iterator, Mapping, Sized
from concurrent.futures import Future
from confluent_kafka import Message, TopicPartition
from enum import Enum
Expand All @@ -42,7 +42,7 @@
from pathlib import Path
from rich.console import Console
from tenacity import retry, retry_if_exception_type, RetryCallState, stop_after_delay, wait_fixed
from typing import Callable, Literal, NewType, TypeVar
from typing import Literal, NewType, TypeVar

import contextlib
import datetime
Expand Down
5 changes: 2 additions & 3 deletions src/karapace/backup/backends/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@
"""
from __future__ import annotations

from collections.abc import Generator, Iterator, Mapping, Sequence
from collections.abc import Callable, Generator, Iterator, Mapping, Sequence
from karapace.dataclasses import default_dataclass
from karapace.typing import JsonData, JsonObject
from pathlib import Path
from typing import Callable, ClassVar, Final, IO, Optional, TypeVar, Union
from typing_extensions import TypeAlias
from typing import ClassVar, Final, IO, Optional, TypeAlias, TypeVar, Union

import abc

Expand Down
5 changes: 2 additions & 3 deletions src/karapace/backup/backends/v3/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from .readers import read_metadata, read_records
from .schema import ChecksumAlgorithm, DataFile, Header, Metadata, Record
from .writers import write_metadata, write_record
from collections.abc import Generator, Iterator, Mapping, Sequence
from collections.abc import Callable, Generator, Iterator, Mapping, Sequence
from confluent_kafka import Message
from dataclasses import dataclass
from karapace.backup.backends.reader import BaseBackupReader, Instruction, ProducerSend, RestoreTopic
Expand All @@ -19,8 +19,7 @@
from karapace.utils import assert_never
from karapace.version import __version__
from pathlib import Path
from typing import Callable, ContextManager, Final, IO, TypeVar
from typing_extensions import TypeAlias
from typing import ContextManager, Final, IO, TypeAlias, TypeVar

import datetime
import io
Expand Down
9 changes: 4 additions & 5 deletions src/karapace/backup/backends/v3/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from dataclasses import field
from karapace.avro_dataclasses.models import AvroModel
from karapace.dataclasses import default_dataclass
from typing import Optional

import datetime
import enum
Expand Down Expand Up @@ -53,7 +52,7 @@ class Metadata(AvroModel):
finished_at: datetime.datetime
record_count: int = field(metadata={"type": "int"})
topic_name: str
topic_id: Optional[uuid.UUID]
topic_id: uuid.UUID | None
partition_count: int = field(metadata={"type": "int"})
replication_factor: int = field(metadata={"type": "int"})
topic_configurations: Mapping[str, str]
Expand All @@ -77,8 +76,8 @@ class Header(AvroModel):

@default_dataclass
class Record(AvroModel):
key: Optional[bytes]
value: Optional[bytes]
key: bytes | None
value: bytes | None
headers: tuple[Header, ...]
offset: int = field(metadata={"type": "long"})
timestamp: int = field(metadata={"type": "long"})
Expand All @@ -87,7 +86,7 @@ class Record(AvroModel):
# of records. When restoring, we accumulate parsed records until
# encountering a checkpoint, verify the running checksum against it, and
# only then produce the verified records to Kafka.
checksum_checkpoint: Optional[bytes]
checksum_checkpoint: bytes | None

def __post_init__(self) -> None:
assert self.offset >= 0
Expand Down
3 changes: 1 addition & 2 deletions src/karapace/backup/backends/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from confluent_kafka import Message
from karapace.backup.safe_writer import bytes_writer, str_writer
from pathlib import Path
from typing import ContextManager, Generic, IO, Literal, TypeVar
from typing_extensions import TypeAlias
from typing import ContextManager, Generic, IO, Literal, TypeAlias, TypeVar

import abc
import contextlib
Expand Down
3 changes: 1 addition & 2 deletions src/karapace/backup/safe_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from collections.abc import Generator
from pathlib import Path
from tempfile import mkstemp, TemporaryDirectory
from typing import Final, IO, Literal
from typing_extensions import TypeAlias
from typing import Final, IO, Literal, TypeAlias

import contextlib
import os
Expand Down
39 changes: 19 additions & 20 deletions src/karapace/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
See LICENSE for details
"""
from aiohttp import BasicAuth, ClientSession
from collections.abc import Awaitable, Mapping
from collections.abc import Awaitable, Callable, Mapping
from karapace.typing import JsonData
from typing import Callable, Optional, Union
from urllib.parse import urljoin

import logging
Expand All @@ -19,7 +18,7 @@
LOG = logging.getLogger(__name__)


async def _get_aiohttp_client(*, auth: Optional[BasicAuth] = None) -> ClientSession:
async def _get_aiohttp_client(*, auth: BasicAuth | None = None) -> ClientSession:
return ClientSession(auth=auth)


Expand All @@ -28,7 +27,7 @@ def __init__(
self,
status: int,
json_result: JsonData,
headers: Optional[Mapping] = None,
headers: Mapping | None = None,
) -> None:
self.status_code = status
self.json_result = json_result
Expand All @@ -48,10 +47,10 @@ def ok(self) -> bool:
class Client:
def __init__(
self,
server_uri: Optional[str] = None,
server_uri: str | None = None,
client_factory: Callable[..., Awaitable[ClientSession]] = _get_aiohttp_client,
server_ca: Optional[str] = None,
session_auth: Optional[BasicAuth] = None,
server_ca: str | None = None,
session_auth: BasicAuth | None = None,
) -> None:
self.server_uri = server_uri or ""
self.session_auth = session_auth
Expand All @@ -61,13 +60,13 @@ def __init__(
# Instead we wait for the first query in async context and lazy-initialize aiohttp client.
self.client_factory = client_factory

self.ssl_mode: Union[None, bool, ssl.SSLContext]
self.ssl_mode: None | bool | ssl.SSLContext
if server_ca is None:
self.ssl_mode = False
else:
self.ssl_mode = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.ssl_mode.load_verify_locations(cafile=server_ca)
self._client: Optional[ClientSession] = None
self._client: ClientSession | None = None

def path_for(self, path: Path) -> str:
return urljoin(self.server_uri, path)
Expand All @@ -89,9 +88,9 @@ async def get(
self,
path: Path,
json: JsonData = None,
headers: Optional[Headers] = None,
auth: Optional[BasicAuth] = None,
params: Optional[Mapping[str, str]] = None,
headers: Headers | None = None,
auth: BasicAuth | None = None,
params: Mapping[str, str] | None = None,
json_response: bool = True,
) -> Result:
path = self.path_for(path)
Expand All @@ -113,8 +112,8 @@ async def get(
async def delete(
self,
path: Path,
headers: Optional[Headers] = None,
auth: Optional[BasicAuth] = None,
headers: Headers | None = None,
auth: BasicAuth | None = None,
) -> Result:
path = self.path_for(path)
if not headers:
Expand All @@ -133,8 +132,8 @@ async def post(
self,
path: Path,
json: JsonData,
headers: Optional[Headers] = None,
auth: Optional[BasicAuth] = None,
headers: Headers | None = None,
auth: BasicAuth | None = None,
) -> Result:
path = self.path_for(path)
if not headers:
Expand All @@ -155,8 +154,8 @@ async def put(
self,
path: Path,
json: JsonData,
headers: Optional[Headers] = None,
auth: Optional[BasicAuth] = None,
headers: Headers | None = None,
auth: BasicAuth | None = None,
) -> Result:
path = self.path_for(path)
if not headers:
Expand All @@ -177,8 +176,8 @@ async def put_with_data(
self,
path: Path,
data: JsonData,
headers: Optional[Headers],
auth: Optional[BasicAuth] = None,
headers: Headers | None,
auth: BasicAuth | None = None,
) -> Result:
path = self.path_for(path)
client = await self.get_client()
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/compatibility/jsonschema/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from collections.abc import Callable
from dataclasses import dataclass
from enum import Enum, unique
from typing import Callable, Generic, TypeVar
from typing import Generic, TypeVar

T = TypeVar("T")

Expand Down
12 changes: 6 additions & 6 deletions src/karapace/compatibility/jsonschema/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from copy import copy
from jsonschema import Draft7Validator
from karapace.compatibility.jsonschema.types import BooleanSchema, Instance, Keyword, Subschema
from typing import Any, Optional, TypeVar, Union
from typing import Any, TypeVar, Union

import re

Expand Down Expand Up @@ -53,7 +53,7 @@ def normalize_schema_rec(validator: Draft7Validator, original_schema: Any) -> An
return normalized


def maybe_get_subschemas_and_type(schema: Any) -> Optional[tuple[list[Any], Subschema]]:
def maybe_get_subschemas_and_type(schema: Any) -> tuple[list[Any], Subschema] | None:
"""If schema contains `anyOf`, `allOf`, or `oneOf`, return it.
This will also normalized schemas with a list of types to a `anyOf`, e..g:
Expand Down Expand Up @@ -218,7 +218,7 @@ def is_tuple_without_additional_items(schema: Any) -> bool:
return is_tuple(schema) and is_false_schema(additional_items)


def gt(left: Optional[int], right: Optional[int]) -> bool:
def gt(left: int | None, right: int | None) -> bool:
"""Predicate greater-than that checks for nullables.
When `left` is writer and `right` is reader, this can be used to check for
Expand Down Expand Up @@ -256,11 +256,11 @@ def gt(left: Optional[int], right: Optional[int]) -> bool:
return bool(left is not None and right is not None and left > right)


def lt(left: Optional[int], right: Optional[int]) -> bool:
def lt(left: int | None, right: int | None) -> bool:
return gt(right, left) # pylint: disable=arguments-out-of-order


def ne(writer: Optional[T], reader: Optional[T]) -> bool:
def ne(writer: T | None, reader: T | None) -> bool:
"""Predicate not-equals that checks for nullables.
Predicate used to check for incompatibility in constraints that accept
Expand Down Expand Up @@ -288,7 +288,7 @@ def ne(writer: Optional[T], reader: Optional[T]) -> bool:
return bool(reader is not None and writer is not None and reader != writer)


def introduced_constraint(reader: Optional[T], writer: Optional[T]) -> bool:
def introduced_constraint(reader: T | None, writer: T | None) -> bool:
"""True if `writer` did *not* have the constraint but `reader` introduced it.
A constraint limits the value domain, because of that objects that were
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/instrumentation/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
from __future__ import annotations

from aiohttp.web import middleware, Request, Response
from collections.abc import Awaitable
from collections.abc import Awaitable, Callable
from karapace.rapu import RestApp
from prometheus_client import CollectorRegistry, Counter, Gauge, generate_latest, Histogram
from typing import Callable, Final
from typing import Final

import logging
import time
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/kafka/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@
KafkaUnavailableError,
NoBrokersAvailable,
)
from collections.abc import Iterable
from collections.abc import Callable, Iterable
from concurrent.futures import Future
from confluent_kafka.error import KafkaError, KafkaException
from typing import Any, Callable, Literal, NoReturn, Protocol, TypedDict, TypeVar
from typing import Any, Literal, NoReturn, Protocol, TypedDict, TypeVar
from typing_extensions import Unpack

import logging
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/kafka/consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@
from __future__ import annotations

from aiokafka.errors import IllegalStateError, KafkaTimeoutError
from collections.abc import Iterable
from collections.abc import Callable, Iterable
from confluent_kafka import Consumer, Message, TopicPartition
from confluent_kafka.admin import PartitionMetadata
from confluent_kafka.error import KafkaException
from karapace.kafka.common import _KafkaConfigMixin, KafkaClientParams, raise_from_kafkaexception
from typing import Any, Callable, TypeVar
from typing import Any, TypeVar
from typing_extensions import Unpack

import asyncio
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/kafka_rest_apis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
)
from binascii import Error as B64DecodeError
from collections import namedtuple
from collections.abc import Callable
from confluent_kafka.error import KafkaException
from contextlib import AsyncExitStack
from http import HTTPStatus
Expand Down Expand Up @@ -44,7 +45,7 @@
)
from karapace.typing import NameStrategy, SchemaId, Subject, SubjectType
from karapace.utils import convert_to_int, json_encode
from typing import Callable, TypedDict
from typing import TypedDict

import asyncio
import base64
Expand Down
Loading

0 comments on commit 8aead4c

Please sign in to comment.