Skip to content

Commit

Permalink
chore: pyupgrade formattin to py39
Browse files Browse the repository at this point in the history
  • Loading branch information
jjaakola-aiven committed Oct 8, 2024
1 parent 06a93c7 commit 646fb93
Show file tree
Hide file tree
Showing 83 changed files with 217 additions and 194 deletions.
3 changes: 1 addition & 2 deletions performance-test/schema-registry-schema-post.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from dataclasses import dataclass, field
from locust import FastHttpUser, task
from locust.contrib.fasthttp import ResponseContextManager
from typing import Dict

import json
import random
Expand All @@ -17,7 +16,7 @@
@dataclass
class TestData:
count: int = 0
schemas: Dict[uuid.UUID, SchemaId] = field(default_factory=dict)
schemas: dict[uuid.UUID, SchemaId] = field(default_factory=dict)


SUBJECTS = ["test-subject-1", "test-subject-2"]
Expand Down
10 changes: 5 additions & 5 deletions src/karapace/anonymize_schemas/anonymize_avro.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from typing import Any, Dict, List, Union
from typing import Any, Union
from typing_extensions import TypeAlias

import hashlib
Expand Down Expand Up @@ -95,7 +95,7 @@ def anonymize_element(m: re.Match) -> str:
return NAME_ANONYMIZABLE_PATTERN.sub(anonymize_element, name)


Schema: TypeAlias = Union[str, Dict[str, Any], List[Any]]
Schema: TypeAlias = Union[str, dict[str, Any], list[Any]]


def anonymize(input_schema: Schema) -> Schema:
Expand All @@ -105,10 +105,10 @@ def anonymize(input_schema: Schema) -> Schema:
if input_schema in ALL_TYPES:
return input_schema
return anonymize_name(input_schema)
elif isinstance(input_schema, List):
elif isinstance(input_schema, list):
return [anonymize(value) for value in input_schema]
elif isinstance(input_schema, Dict):
output_schema: Dict[str, Any] = {}
elif isinstance(input_schema, dict):
output_schema: dict[str, Any] = {}
for key, value in input_schema.items():
if key in KEYWORDS:
output_schema[key] = anonymize(value)
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/avro_dataclasses/introspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
from __future__ import annotations

from .schema import AvroType, EnumType, FieldSchema, MapType, RecordSchema
from collections.abc import Mapping
from collections.abc import Mapping, Sequence
from dataclasses import Field, fields, is_dataclass, MISSING
from enum import Enum
from functools import lru_cache
from typing import Final, get_args, get_origin, Sequence, TYPE_CHECKING, TypeVar, Union
from typing import Final, get_args, get_origin, TYPE_CHECKING, TypeVar, Union

import datetime
import uuid
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/avro_dataclasses/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@
from __future__ import annotations

from .introspect import record_schema
from collections.abc import Iterable, Mapping
from dataclasses import asdict, fields, is_dataclass
from enum import Enum
from functools import lru_cache, partial
from typing import Callable, cast, IO, Iterable, Mapping, TYPE_CHECKING, TypeVar, Union
from typing import Callable, cast, IO, TYPE_CHECKING, TypeVar, Union
from typing_extensions import get_args, get_origin, Self

import avro
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/backup/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from .poll_timeout import PollTimeout
from .topic_configurations import ConfigSource, get_topic_configurations
from aiokafka.errors import KafkaError, TopicAlreadyExistsError
from collections.abc import Sized
from collections.abc import Iterator, Mapping, Sized
from concurrent.futures import Future
from confluent_kafka import Message, TopicPartition
from enum import Enum
Expand All @@ -42,7 +42,7 @@
from pathlib import Path
from rich.console import Console
from tenacity import retry, retry_if_exception_type, RetryCallState, stop_after_delay, wait_fixed
from typing import Callable, Iterator, Literal, Mapping, NewType, TypeVar
from typing import Callable, Literal, NewType, TypeVar

import contextlib
import datetime
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/backends/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
"""
from __future__ import annotations

from collections.abc import Generator, Iterator, Mapping, Sequence
from karapace.dataclasses import default_dataclass
from karapace.typing import JsonData, JsonObject
from pathlib import Path
from typing import Callable, ClassVar, Final, Generator, IO, Iterator, Mapping, Optional, Sequence, TypeVar, Union
from typing import Callable, ClassVar, Final, IO, Optional, TypeVar, Union
from typing_extensions import TypeAlias

import abc
Expand Down
5 changes: 3 additions & 2 deletions src/karapace/backup/backends/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,17 @@
"""
from __future__ import annotations

from collections.abc import Generator
from karapace.backup.backends.reader import BaseItemsBackupReader
from karapace.utils import json_decode
from typing import Generator, IO, List
from typing import IO


class SchemaBackupV1Reader(BaseItemsBackupReader):
@staticmethod
def items_from_file(fp: IO[str]) -> Generator[list[str], None, None]:
raw_msg = fp.read()
values = json_decode(raw_msg, List[List[str]])
values = json_decode(raw_msg, list[list[str]])
if not values:
return
yield from values
7 changes: 4 additions & 3 deletions src/karapace/backup/backends/v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
"""
from __future__ import annotations

from collections.abc import Generator, Sequence
from karapace.anonymize_schemas import anonymize_avro
from karapace.backup.backends.reader import BaseItemsBackupReader
from karapace.backup.backends.writer import BaseKVBackupWriter, StdOut
from karapace.utils import json_decode, json_encode
from pathlib import Path
from typing import Any, ClassVar, Dict, Final, Generator, IO, Sequence
from typing import Any, ClassVar, Final, IO

import base64
import contextlib
Expand Down Expand Up @@ -61,8 +62,8 @@ def serialize_record(
# Check that the message has key `schema` and type is Avro schema.
# The Avro schemas may have `schemaType` key, if not present the schema is Avro.

key = json_decode(key_bytes, Dict[str, str])
value = json_decode(value_bytes, Dict[str, str])
key = json_decode(key_bytes, dict[str, str])
value = json_decode(value_bytes, dict[str, str])

if value and "schema" in value and value.get("schemaType", "AVRO") == "AVRO":
original_schema: Any = json_decode(value["schema"])
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/backends/v3/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from .readers import read_metadata, read_records
from .schema import ChecksumAlgorithm, DataFile, Header, Metadata, Record
from .writers import write_metadata, write_record
from collections.abc import Generator, Iterator, Mapping, Sequence
from confluent_kafka import Message
from dataclasses import dataclass
from karapace.backup.backends.reader import BaseBackupReader, Instruction, ProducerSend, RestoreTopic
Expand All @@ -18,7 +19,7 @@
from karapace.utils import assert_never
from karapace.version import __version__
from pathlib import Path
from typing import Callable, ContextManager, Final, Generator, IO, Iterator, Mapping, Sequence, TypeVar
from typing import Callable, ContextManager, Final, IO, TypeVar
from typing_extensions import TypeAlias

import datetime
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/backends/v3/readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@
from .constants import V3_MARKER
from .errors import InvalidChecksum, InvalidHeader, TooFewRecords, TooManyRecords, UnexpectedEndOfData
from .schema import Metadata, Record
from collections.abc import Generator
from karapace.avro_dataclasses.models import AvroModel
from typing import Generator, IO, TypeVar
from typing import IO, TypeVar

import io
import struct
Expand Down
7 changes: 4 additions & 3 deletions src/karapace/backup/backends/v3/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from collections.abc import Mapping
from dataclasses import field
from karapace.avro_dataclasses.models import AvroModel
from karapace.dataclasses import default_dataclass
from typing import Mapping, Optional, Tuple
from typing import Optional

import datetime
import enum
Expand Down Expand Up @@ -56,7 +57,7 @@ class Metadata(AvroModel):
partition_count: int = field(metadata={"type": "int"})
replication_factor: int = field(metadata={"type": "int"})
topic_configurations: Mapping[str, str]
data_files: Tuple[DataFile, ...]
data_files: tuple[DataFile, ...]
checksum_algorithm: ChecksumAlgorithm = ChecksumAlgorithm.unknown

def __post_init__(self) -> None:
Expand All @@ -78,7 +79,7 @@ class Header(AvroModel):
class Record(AvroModel):
key: Optional[bytes]
value: Optional[bytes]
headers: Tuple[Header, ...]
headers: tuple[Header, ...]
offset: int = field(metadata={"type": "long"})
timestamp: int = field(metadata={"type": "long"})
# In order to reduce the impact of checksums on total file sizes, especially
Expand Down
5 changes: 3 additions & 2 deletions src/karapace/backup/backends/v3/schema_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@
"""
from . import schema
from avro.compatibility import ReaderWriterCompatibilityChecker, SchemaCompatibilityType
from collections.abc import Generator
from karapace.avro_dataclasses.introspect import record_schema
from karapace.avro_dataclasses.models import AvroModel
from karapace.schema_models import parse_avro_schema_definition
from typing import Final, Generator, Tuple, Type
from typing import Final

import argparse
import json
Expand All @@ -19,7 +20,7 @@
import sys


def types() -> Generator[Tuple[str, Type[AvroModel]], None, None]:
def types() -> Generator[tuple[str, type[AvroModel]], None, None]:
for name, value in schema.__dict__.items():
try:
if issubclass(value, AvroModel) and value != AvroModel:
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/backends/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
"""
from __future__ import annotations

from collections.abc import Iterator, Mapping, Sequence
from confluent_kafka import Message
from karapace.backup.safe_writer import bytes_writer, str_writer
from pathlib import Path
from typing import ContextManager, Generic, IO, Iterator, Literal, Mapping, Sequence, TypeVar
from typing import ContextManager, Generic, IO, Literal, TypeVar
from typing_extensions import TypeAlias

import abc
Expand Down
2 changes: 1 addition & 1 deletion src/karapace/backup/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
from .errors import BackupDataRestorationError, StaleConsumerError
from .poll_timeout import PollTimeout
from aiokafka.errors import BrokerResponseError
from collections.abc import Iterator
from karapace.backup.api import VerifyLevel
from karapace.config import Config, read_config
from typing import Iterator

import argparse
import contextlib
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/safe_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
"""
from __future__ import annotations

from collections.abc import Generator
from pathlib import Path
from tempfile import mkstemp, TemporaryDirectory
from typing import Final, Generator, IO, Literal
from typing import Final, IO, Literal
from typing_extensions import TypeAlias

import contextlib
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/topic_configurations.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
"""
from __future__ import annotations

from collections.abc import Container
from confluent_kafka.admin import ConfigSource
from karapace.kafka.admin import KafkaAdminClient
from typing import Container, Final
from typing import Final

ALL_CONFIG_SOURCES: Final = ConfigSource

Expand Down
3 changes: 2 additions & 1 deletion src/karapace/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
See LICENSE for details
"""
from aiohttp import BasicAuth, ClientSession
from collections.abc import Awaitable, Mapping
from karapace.typing import JsonData
from typing import Awaitable, Callable, Mapping, Optional, Union
from typing import Callable, Optional, Union
from urllib.parse import urljoin

import logging
Expand Down
6 changes: 3 additions & 3 deletions src/karapace/compatibility/jsonschema/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
from copy import copy
from jsonschema import Draft7Validator
from karapace.compatibility.jsonschema.types import BooleanSchema, Instance, Keyword, Subschema
from typing import Any, List, Optional, Tuple, Type, TypeVar, Union
from typing import Any, Optional, TypeVar, Union

import re

T = TypeVar("T")
JSONSCHEMA_TYPES = Union[Instance, Subschema, Keyword, Type[BooleanSchema]]
JSONSCHEMA_TYPES = Union[Instance, Subschema, Keyword, type[BooleanSchema]]


def normalize_schema(validator: Draft7Validator) -> Any:
Expand Down Expand Up @@ -53,7 +53,7 @@ def normalize_schema_rec(validator: Draft7Validator, original_schema: Any) -> An
return normalized


def maybe_get_subschemas_and_type(schema: Any) -> Optional[Tuple[List[Any], Subschema]]:
def maybe_get_subschemas_and_type(schema: Any) -> Optional[tuple[list[Any], Subschema]]:
"""If schema contains `anyOf`, `allOf`, or `oneOf`, return it.
This will also normalized schemas with a list of types to a `anyOf`, e..g:
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@
"""
from __future__ import annotations

from collections.abc import Mapping
from karapace.constants import DEFAULT_AIOHTTP_CLIENT_MAX_SIZE, DEFAULT_PRODUCER_MAX_REQUEST, DEFAULT_SCHEMA_TOPIC
from karapace.typing import ElectionStrategy, NameStrategy
from karapace.utils import json_decode, json_encode, JSONDecodeError
from pathlib import Path
from typing import IO, Mapping
from typing import IO
from typing_extensions import NotRequired, TypedDict

import logging
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/coordinator/schema_coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,12 @@
SyncGroupRequest_v3,
)
from aiokafka.util import create_future, create_task
from collections.abc import Coroutine, Sequence
from karapace.dataclasses import default_dataclass
from karapace.typing import JsonData
from karapace.utils import json_decode, json_encode
from karapace.version import __version__
from typing import Any, Coroutine, Final, Sequence
from typing import Any, Final
from typing_extensions import TypedDict

import aiokafka.errors as Errors
Expand Down
2 changes: 1 addition & 1 deletion src/karapace/in_memory_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
from __future__ import annotations

from abc import ABC, abstractmethod
from collections.abc import Iterable, Sequence
from dataclasses import dataclass, field
from karapace.schema_models import SchemaVersion, TypedSchema, Versioner
from karapace.schema_references import Reference, Referents
from karapace.typing import SchemaId, Subject, Version
from threading import Lock, RLock
from typing import Iterable, Sequence

import logging

Expand Down
3 changes: 2 additions & 1 deletion src/karapace/instrumentation/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@
from __future__ import annotations

from aiohttp.web import middleware, Request, Response
from collections.abc import Awaitable
from karapace.rapu import RestApp
from prometheus_client import CollectorRegistry, Counter, Gauge, generate_latest, Histogram
from typing import Awaitable, Callable, Final
from typing import Callable, Final

import logging
import time
Expand Down
3 changes: 1 addition & 2 deletions src/karapace/kafka/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from __future__ import annotations

from collections.abc import Iterable
from collections.abc import Container, Iterable
from concurrent.futures import Future
from confluent_kafka import TopicPartition
from confluent_kafka.admin import (
Expand All @@ -27,7 +27,6 @@
single_futmap_result,
UnknownTopicOrPartitionError,
)
from typing import Container


class KafkaAdminClient(_KafkaConfigMixin, AdminClient):
Expand Down
Loading

0 comments on commit 646fb93

Please sign in to comment.