Skip to content

Commit

Permalink
merge with main
Browse files Browse the repository at this point in the history
  • Loading branch information
libretto committed Oct 25, 2024
2 parents 47e903f + 60b5db6 commit cba3c0d
Show file tree
Hide file tree
Showing 112 changed files with 1,056 additions and 560 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
python-version: [ '3.9', '3.10', '3.11', '3.12' ]
env:
PYTEST_ADDOPTS: >-
--log-dir=/tmp/ci-logs
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ repos:
rev: v3.4.0
hooks:
- id: pyupgrade
args: [ --py38-plus ]
args: [ --py39-plus ]

- repo: https://github.com/pycqa/autoflake
rev: v2.1.1
Expand Down
2 changes: 1 addition & 1 deletion GNUmakefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ SHELL := /usr/bin/env bash
VENV_DIR ?= $(CURDIR)/venv
PIP ?= pip3 --disable-pip-version-check --no-input --require-virtualenv
PYTHON ?= python3
PYTHON_VERSION ?= 3.8
PYTHON_VERSION ?= 3.9

define PIN_VERSIONS_COMMAND
pip install pip-tools && \
Expand Down
13 changes: 13 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,10 @@ Test the compatibility of a schema with the latest schema under subject "test-ke
http://localhost:8081/compatibility/subjects/test-key/versions/latest
{"is_compatible":true}

NOTE: if the subject's compatibility mode is transitive (BACKWARD_TRANSITIVE, FORWARD_TRANSITIVE or FULL_TRANSITIVE) then the
compatibility is checked not only against the latest schema, but also against all previous schemas, as it would be done
when trying to register the new schema through the `subjects/<subject-key>/versions` endpoint.

Get current global backwards compatibility setting value::

$ curl -X GET http://localhost:8081/config
Expand Down Expand Up @@ -478,6 +482,15 @@ Keys to take special care are the ones needed to configure Kafka and advertised_
* - ``use_protobuf_formatter``
- ``false``
- If protobuf formatter should be used on protobuf schemas in order to normalize schemas. The formatter is used on top and independent of regular normalization and schemas will be persisted in a formatted state.
* - ``log_handler``
- ``stdout``
- Select the log handler. Default is standard output. Alternative log handler is ``systemd``.
* - ``log_level``
- ``DEBUG``
- Logging level. Default level is debug.
* - ``log_format``
- ``%(name)-20s\t%(threadName)s\t%(levelname)-8s\t%(message)s``
- Log format


Authentication and authorization of Karapace Schema Registry REST API
Expand Down
1 change: 1 addition & 0 deletions karapace.config.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"group_id": "schema-registry",
"host": "127.0.0.1",
"log_level": "DEBUG",
"log_handler": "stdout",
"port": 8081,
"server_tls_certfile": null,
"server_tls_keyfile": null,
Expand Down
5 changes: 4 additions & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[mypy]
mypy_path = $MYPY_CONFIG_FILE_DIR/stubs
python_version = 3.8
python_version = 3.9
packages = karapace
show_error_codes = True
pretty = True
Expand Down Expand Up @@ -85,3 +85,6 @@ ignore_missing_imports = True

[mypy-networkx.*]
ignore_missing_imports = True

[mypy-systemd.*]
ignore_missing_imports = True
3 changes: 1 addition & 2 deletions performance-test/schema-registry-schema-post.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from dataclasses import dataclass, field
from locust import FastHttpUser, task
from locust.contrib.fasthttp import ResponseContextManager
from typing import Dict

import json
import random
Expand All @@ -17,7 +16,7 @@
@dataclass
class TestData:
count: int = 0
schemas: Dict[uuid.UUID, SchemaId] = field(default_factory=dict)
schemas: dict[uuid.UUID, SchemaId] = field(default_factory=dict)


SUBJECTS = ["test-subject-1", "test-subject-2"]
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "karapace"
requires-python = ">= 3.8"
requires-python = ">= 3.9"
dynamic = ["version"]
readme = "README.rst"
license = {file = "LICENSE"}
Expand Down Expand Up @@ -49,7 +49,6 @@ classifiers=[
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
Expand All @@ -70,6 +69,7 @@ Issues = "https://github.com/Aiven-Open/karapace/issues"

[project.optional-dependencies]
sentry-sdk = ["sentry-sdk>=1.6.0"]
systemd-logging = ["systemd-python==235"]
ujson = ["ujson"]
dev = [
# Developer QoL
Expand Down Expand Up @@ -107,5 +107,5 @@ include-package-data = true
version_file = "src/karapace/version.py"

[tool.black]
target-version = ["py38"]
target-version = ["py39"]
line-length = 125
13 changes: 2 additions & 11 deletions requirements/requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.8
# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
# make pin-requirements
Expand Down Expand Up @@ -93,10 +93,6 @@ idna==3.8
# yarl
importlib-metadata==8.4.0
# via flask
importlib-resources==6.4.4
# via
# jsonschema
# jsonschema-specifications
iniconfig==2.0.0
# via pytest
isodate==0.6.1
Expand Down Expand Up @@ -135,8 +131,6 @@ packaging==24.1
# pytest
pdbpp==0.10.3
# via karapace (/karapace/pyproject.toml)
pkgutil-resolve-name==1.3.10
# via jsonschema
pluggy==1.5.0
# via pytest
prometheus-client==0.20.0
Expand Down Expand Up @@ -215,7 +209,6 @@ typing-extensions==4.12.2
# via
# anyio
# karapace (/karapace/pyproject.toml)
# rich
ujson==5.10.0
# via karapace (/karapace/pyproject.toml)
urllib3==2.2.2
Expand All @@ -238,9 +231,7 @@ yarl==1.12.1
# aiohttp
# karapace (/karapace/pyproject.toml)
zipp==3.20.1
# via
# importlib-metadata
# importlib-resources
# via importlib-metadata
zope-event==5.0
# via gevent
zope-interface==7.0.2
Expand Down
11 changes: 1 addition & 10 deletions requirements/requirements-typing.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.8
# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
# make pin-requirements
Expand Down Expand Up @@ -45,10 +45,6 @@ idna==3.10
# via
# anyio
# yarl
importlib-resources==6.4.5
# via
# jsonschema
# jsonschema-specifications
isodate==0.6.1
# via karapace (/karapace/pyproject.toml)
jsonschema==4.23.0
Expand All @@ -73,8 +69,6 @@ networkx==3.1
# via karapace (/karapace/pyproject.toml)
packaging==24.1
# via aiokafka
pkgutil-resolve-name==1.3.10
# via jsonschema
prometheus-client==0.20.0
# via karapace (/karapace/pyproject.toml)
protobuf==3.20.3
Expand Down Expand Up @@ -122,7 +116,6 @@ typing-extensions==4.12.2
# karapace (/karapace/pyproject.toml)
# multidict
# mypy
# rich
ujson==5.10.0
# via karapace (/karapace/pyproject.toml)
urllib3==2.2.2
Expand All @@ -135,7 +128,5 @@ yarl==1.12.1
# via
# aiohttp
# karapace (/karapace/pyproject.toml)
zipp==3.20.2
# via importlib-resources
zstandard==0.23.0
# via karapace (/karapace/pyproject.toml)
11 changes: 1 addition & 10 deletions requirements/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with Python 3.8
# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
# make pin-requirements
Expand Down Expand Up @@ -43,10 +43,6 @@ idna==3.8
# via
# anyio
# yarl
importlib-resources==6.4.4
# via
# jsonschema
# jsonschema-specifications
isodate==0.6.1
# via karapace (/karapace/pyproject.toml)
jsonschema==4.23.0
Expand All @@ -67,8 +63,6 @@ networkx==3.1
# via karapace (/karapace/pyproject.toml)
packaging==24.1
# via aiokafka
pkgutil-resolve-name==1.3.10
# via jsonschema
prometheus-client==0.20.0
# via karapace (/karapace/pyproject.toml)
protobuf==3.20.3
Expand Down Expand Up @@ -103,7 +97,6 @@ typing-extensions==4.12.2
# via
# anyio
# karapace (/karapace/pyproject.toml)
# rich
ujson==5.10.0
# via karapace (/karapace/pyproject.toml)
watchfiles==0.23.0
Expand All @@ -114,7 +107,5 @@ yarl==1.12.1
# via
# aiohttp
# karapace (/karapace/pyproject.toml)
zipp==3.20.1
# via importlib-resources
zstandard==0.23.0
# via karapace (/karapace/pyproject.toml)
2 changes: 1 addition & 1 deletion runtime.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
python-3.8.16
python-3.9.20
10 changes: 5 additions & 5 deletions src/karapace/anonymize_schemas/anonymize_avro.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
Copyright (c) 2023 Aiven Ltd
See LICENSE for details
"""
from typing import Any, Dict, List, Union
from typing import Any, Union
from typing_extensions import TypeAlias

import hashlib
Expand Down Expand Up @@ -95,7 +95,7 @@ def anonymize_element(m: re.Match) -> str:
return NAME_ANONYMIZABLE_PATTERN.sub(anonymize_element, name)


Schema: TypeAlias = Union[str, Dict[str, Any], List[Any]]
Schema: TypeAlias = Union[str, dict[str, Any], list[Any]]


def anonymize(input_schema: Schema) -> Schema:
Expand All @@ -105,10 +105,10 @@ def anonymize(input_schema: Schema) -> Schema:
if input_schema in ALL_TYPES:
return input_schema
return anonymize_name(input_schema)
elif isinstance(input_schema, List):
elif isinstance(input_schema, list):
return [anonymize(value) for value in input_schema]
elif isinstance(input_schema, Dict):
output_schema: Dict[str, Any] = {}
elif isinstance(input_schema, dict):
output_schema: dict[str, Any] = {}
for key, value in input_schema.items():
if key in KEYWORDS:
output_schema[key] = anonymize(value)
Expand Down
19 changes: 11 additions & 8 deletions src/karapace/avro_dataclasses/introspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,11 @@
from __future__ import annotations

from .schema import AvroType, EnumType, FieldSchema, MapType, RecordSchema
from collections.abc import Mapping
from collections.abc import Mapping, Sequence
from dataclasses import Field, fields, is_dataclass, MISSING
from enum import Enum
from functools import lru_cache
from typing import Final, Sequence, TYPE_CHECKING, TypeVar, Union

# Note: It's important get_args and get_origin are imported from typing_extensions
# until support for Python 3.8 is dropped.
from typing_extensions import get_args, get_origin
from typing import Final, get_args, get_origin, TYPE_CHECKING, TypeVar, Union

import datetime
import uuid
Expand Down Expand Up @@ -46,10 +42,17 @@ def _field_type_array(field: Field, origin: type, type_: object) -> AvroType:
else:
(inner_type,) = get_args(type_)

items: AvroType
if is_dataclass(inner_type):
assert isinstance(inner_type, type)
items = record_schema(inner_type)
else:
items = _field_type(field, inner_type)

return {
"name": f"one_of_{field.name}",
"type": "array",
"items": (record_schema(inner_type) if is_dataclass(inner_type) else _field_type(field, inner_type)),
"items": items,
}


Expand Down Expand Up @@ -132,7 +135,7 @@ def _field_type(field: Field, type_: object) -> AvroType: # pylint: disable=too
T = TypeVar("T", str, int, bool, Enum, None)


def transform_default(type_: type[T], default: T) -> str | int | bool | None:
def transform_default(type_: type[T] | str, default: T) -> str | int | bool | None:
if isinstance(default, Enum):
assert isinstance(type_, type)
assert issubclass(type_, Enum)
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/avro_dataclasses/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@
from __future__ import annotations

from .introspect import record_schema
from collections.abc import Iterable, Mapping
from dataclasses import asdict, fields, is_dataclass
from enum import Enum
from functools import lru_cache, partial
from typing import Callable, cast, IO, Iterable, Mapping, TYPE_CHECKING, TypeVar, Union
from typing import Callable, cast, IO, TYPE_CHECKING, TypeVar, Union
from typing_extensions import get_args, get_origin, Self

import avro
Expand Down
4 changes: 2 additions & 2 deletions src/karapace/backup/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from .poll_timeout import PollTimeout
from .topic_configurations import ConfigSource, get_topic_configurations
from aiokafka.errors import KafkaError, TopicAlreadyExistsError
from collections.abc import Sized
from collections.abc import Iterator, Mapping, Sized
from concurrent.futures import Future
from confluent_kafka import Message, TopicPartition
from enum import Enum
Expand All @@ -42,7 +42,7 @@
from pathlib import Path
from rich.console import Console
from tenacity import retry, retry_if_exception_type, RetryCallState, stop_after_delay, wait_fixed
from typing import Callable, Iterator, Literal, Mapping, NewType, TypeVar
from typing import Callable, Literal, NewType, TypeVar

import contextlib
import datetime
Expand Down
3 changes: 2 additions & 1 deletion src/karapace/backup/backends/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@
"""
from __future__ import annotations

from collections.abc import Generator, Iterator, Mapping, Sequence
from karapace.dataclasses import default_dataclass
from karapace.typing import JsonData, JsonObject
from pathlib import Path
from typing import Callable, ClassVar, Final, Generator, IO, Iterator, Mapping, Optional, Sequence, TypeVar, Union
from typing import Callable, ClassVar, Final, IO, Optional, TypeVar, Union
from typing_extensions import TypeAlias

import abc
Expand Down
Loading

0 comments on commit cba3c0d

Please sign in to comment.