diff --git a/.bandit.yml b/.bandit.yml
index 4262043b..a1141ddb 100644
--- a/.bandit.yml
+++ b/.bandit.yml
@@ -2,3 +2,6 @@ exclude_dirs: ['tests']
skips: [
'B403', # Imports of "pickle"
]
+
+assert_used:
+ skips: ['docs/**/*.py']
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 0942a66b..887539c2 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -18,3 +18,5 @@ values =
first_value = 1
[bumpversion:file:src/easynetwork/__init__.py]
+
+[bumpversion:file:docs/source/conf.py]
diff --git a/.flake8 b/.flake8
index 570d4541..fef4c1c9 100644
--- a/.flake8
+++ b/.flake8
@@ -23,7 +23,10 @@ per-file-ignores =
# F401: 'module' imported but unused
# F403: 'from module import *' used; unable to detect undefined names
# F405: 'name' may be undefined, or defined from star imports: 'module'
+ # F841: local variable 'name' is assigned to but never used
# DALL001: There is no __all__ defined
src/easynetwork/__init__.py:F401,F403,F405
src/easynetwork/*/__init__.py:F401,F403,F405
tests/*.py:DALL001
+ docs/*.py:DALL001
+ docs/source/_include/*.py:F841,DALL001
diff --git a/.github/actions/setup-tox/action.yml b/.github/actions/setup-tox/action.yml
index b7f94690..4fef7b40 100644
--- a/.github/actions/setup-tox/action.yml
+++ b/.github/actions/setup-tox/action.yml
@@ -14,7 +14,7 @@ runs:
shell: bash
- uses: pdm-project/setup-pdm@v3
with:
- version: '2.8.2'
+ version: '2.9.2'
python-version: ${{ inputs.python-version }}
cache: true
cache-dependency-path: './pdm.lock'
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 75a3eab1..4b6db6d8 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -17,6 +17,7 @@ on:
- '.github/**'
- '!.github/actions/setup-tox/**'
- '!.github/workflows/build.yml'
+ - 'docs/**'
push:
branches:
- main
@@ -32,6 +33,7 @@ on:
- '.github/**'
- '!.github/actions/setup-tox/**'
- '!.github/workflows/build.yml'
+ - 'docs/**'
workflow_dispatch:
inputs:
SOURCE_DATE_EPOCH:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e4eb64aa..f690e537 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -9,7 +9,7 @@ ci:
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v3.10.1
+ rev: v3.11.0
hooks:
- id: pyupgrade
args: ['--py311-plus']
@@ -31,13 +31,22 @@ repos:
types_or: [python, pyi]
require_serial: true
pass_filenames: false
+ - id: mypy
+ name: mypy (docs)
+ files: ^((src|docs/source)/)
+ exclude: ^(docs/source/conf.py)$
+ entry: tox run -q -e mypy-docs
+ language: system
+ types_or: [python, pyi]
+ require_serial: true
+ pass_filenames: false
- repo: https://github.com/PyCQA/isort
rev: '5.12.0'
hooks:
- id: isort
args: ['--filter-files', '--settings-file', 'pyproject.toml']
- repo: https://github.com/psf/black
- rev: '23.7.0'
+ rev: '23.9.1'
hooks:
- id: black
args: ['--config', 'pyproject.toml']
@@ -64,9 +73,17 @@ repos:
types: [] # Overwrite with empty in order to fallback to types_or
types_or: [python, pyi]
- repo: https://github.com/pdm-project/pdm
- rev: '2.8.2'
+ rev: '2.9.2'
hooks:
- id: pdm-lock-check
+ - repo: https://github.com/pre-commit/pygrep-hooks
+ rev: 'v1.10.0'
+ hooks:
+ - id: python-check-blanket-noqa
+ - id: python-no-log-warn
+ - id: rst-backticks
+ - id: rst-directive-colons
+ - id: rst-inline-touching-normal
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 'v4.4.0'
hooks:
diff --git a/.vscode/settings.example.json b/.vscode/settings.example.json
index 7275d667..8b30e645 100644
--- a/.vscode/settings.example.json
+++ b/.vscode/settings.example.json
@@ -19,6 +19,12 @@
130
]
},
+ "[restructuredtext]": {
+ "editor.tabSize": 3,
+ "editor.rulers": [
+ 150
+ ]
+ },
"black-formatter.args": [
"--config",
"pyproject.toml",
diff --git a/README.md b/README.md
index 92d516e6..9a8a0bc5 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,175 @@
# EasyNetwork
-The easiest way to use sockets in Python
+
+The easiest way to use sockets in Python!
+
+[![PyPI](https://img.shields.io/pypi/v/easynetwork)](https://pypi.org/project/easynetwork/)
+[![PyPI - License](https://img.shields.io/pypi/l/easynetwork)](https://github.com/francis-clairicia/EasyNetwork/blob/main/LICENSE)
+![PyPI - Python Version](https://img.shields.io/pypi/pyversions/easynetwork)
+
+[![Test](https://github.com/francis-clairicia/EasyNetwork/actions/workflows/test.yml/badge.svg)](https://github.com/francis-clairicia/EasyNetwork/actions/workflows/test.yml)
+[![Codecov](https://img.shields.io/codecov/c/github/francis-clairicia/EasyNetwork)](https://codecov.io/gh/francis-clairicia/EasyNetwork)
+[![CodeFactor Grade](https://img.shields.io/codefactor/grade/github/francis-clairicia/EasyNetwork)](https://www.codefactor.io/repository/github/francis-clairicia/easynetwork)
+
+[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit)](https://github.com/pre-commit/pre-commit)
+[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/francis-clairicia/EasyNetwork/main.svg)](https://results.pre-commit.ci/latest/github/francis-clairicia/EasyNetwork/main)
+
+[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)
+[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
+[![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/)
+[![security: bandit](https://img.shields.io/badge/security-bandit-yellow.svg)](https://github.com/PyCQA/bandit)
+
+[![Hatch project](https://img.shields.io/badge/%F0%9F%A5%9A-Hatch-4051b5.svg)](https://github.com/pypa/hatch)
+[![pdm-managed](https://img.shields.io/badge/pdm-managed-blueviolet)](https://pdm.fming.dev)
+
+## Installation
+### From PyPI repository
+```sh
+pip install --user easynetwork
+```
+
+### From source
+```sh
+git clone https://github.com/francis-clairicia/EasyNetwork.git
+cd EasyNetwork
+pip install --user .
+```
+
+## Overview
+EasyNetwork completely encapsulates the socket handling, providing you with a higher level interface
+that allows an application/software to completely handle the logic part with Python objects,
+without worrying about how to process, send or receive data over the network.
+
+The communication protocol can be whatever you want, be it JSON, Pickle, ASCII, structure, base64 encoded,
+compressed, encrypted, or any other format that is not part of the standard library.
+You choose the data format and the library takes care of the rest.
+
+Works with TCP and UDP.
+
+### Documentation
+Coming soon.
+
+## Usage
+### TCP Echo server with JSON data
+```py
+import logging
+from collections.abc import AsyncGenerator
+from typing import Any, TypeAlias
+
+from easynetwork.api_async.server import AsyncStreamClient, AsyncStreamRequestHandler
+from easynetwork.api_sync.server import StandaloneTCPNetworkServer
+from easynetwork.exceptions import StreamProtocolParseError
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+# These TypeAliases are there to help you understand
+# where requests and responses are used in the code
+RequestType: TypeAlias = Any
+ResponseType: TypeAlias = Any
+
+
+class JSONProtocol(StreamProtocol[ResponseType, RequestType]):
+ def __init__(self) -> None:
+ super().__init__(JSONSerializer())
+
+
+class EchoRequestHandler(AsyncStreamRequestHandler[RequestType, ResponseType]):
+ def __init__(self) -> None:
+ self.logger: logging.Logger = logging.getLogger(self.__class__.__name__)
+
+ async def handle(
+ self,
+ client: AsyncStreamClient[ResponseType],
+ ) -> AsyncGenerator[None, RequestType]:
+ try:
+ request: RequestType = yield # A JSON request has been sent by this client
+ except StreamProtocolParseError:
+ # Invalid JSON data sent
+ # This is an example of how you can answer to an invalid request
+ await client.send_packet({"error": "Invalid JSON", "code": "parse_error"})
+ return
+
+ self.logger.info(f"{client.address} sent {request!r}")
+
+ # As a good echo handler, the request is sent back to the client
+ response: ResponseType = request
+ await client.send_packet(response)
+
+ # Leaving the generator will NOT close the connection,
+ # a new generator will be created afterwards.
+ # You may manually close the connection if you want to:
+ # await client.aclose()
+
+
+def main() -> None:
+ host = None # Bind on all interfaces
+ port = 9000
+
+ logging.basicConfig(level=logging.INFO, format="[ %(levelname)s ] [ %(name)s ] %(message)s")
+ with StandaloneTCPNetworkServer(host, port, JSONProtocol(), EchoRequestHandler()) as server:
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ pass
+
+
+if __name__ == "__main__":
+ main()
+```
+
+### TCP Echo client with JSON data
+```py
+from typing import Any
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+class JSONProtocol(StreamProtocol[Any, Any]):
+ def __init__(self) -> None:
+ super().__init__(JSONSerializer())
+
+
+def main() -> None:
+ with TCPNetworkClient(("localhost", 9000), JSONProtocol()) as client:
+ client.send_packet({"data": {"my_body": ["as json"]}})
+ response = client.recv_packet() # response should be the sent dictionary
+ print(response) # prints {'data': {'my_body': ['as json']}}
+
+
+if __name__ == "__main__":
+ main()
+```
+
+
+Asynchronous version ( with async def
)
+
+```py
+import asyncio
+from typing import Any
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+class JSONProtocol(StreamProtocol[Any, Any]):
+ def __init__(self) -> None:
+ super().__init__(JSONSerializer())
+
+
+async def main() -> None:
+ async with AsyncTCPNetworkClient(("localhost", 9000), JSONProtocol()) as client:
+ await client.send_packet({"data": {"my_body": ["as json"]}})
+ response = await client.recv_packet() # response should be the sent dictionary
+ print(response) # prints {'data': {'my_body': ['as json']}}
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
+```
+
+
+
+## License
+This project is licensed under the terms of the [Apache Software License 2.0](https://github.com/francis-clairicia/EasyNetwork/blob/main/LICENSE).
diff --git a/docs/.gitignore b/docs/.gitignore
new file mode 100644
index 00000000..cd89541d
--- /dev/null
+++ b/docs/.gitignore
@@ -0,0 +1,2 @@
+# Intersphinx inventory dump.
+*-inv.txt
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 00000000..d0c3cbf1
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 00000000..dc1312ab
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=source
+set BUILDDIR=build
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/docs/source/_extensions/sphinx_easynetwork.py b/docs/source/_extensions/sphinx_easynetwork.py
new file mode 100644
index 00000000..18b390e2
--- /dev/null
+++ b/docs/source/_extensions/sphinx_easynetwork.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from sphinx.application import Sphinx
+
+from easynetwork.api_sync.server import AbstractNetworkServer
+from easynetwork.api_sync.server._base import BaseStandaloneNetworkServerImpl
+
+
+def _replace_base_in_place(klass: type, bases: list[type], base_to_replace: type, base_to_set_instead: type) -> None:
+ if issubclass(klass, base_to_replace):
+ for index, base in enumerate(bases):
+ if base is base_to_replace:
+ bases[index] = base_to_set_instead
+
+
+def autodoc_process_bases(app: Sphinx, name: str, obj: type, options: dict[str, Any], bases: list[type]) -> None:
+ _replace_base_in_place(obj, bases, BaseStandaloneNetworkServerImpl, AbstractNetworkServer)
+
+
+def setup(app: Sphinx) -> dict[str, Any]:
+ app.setup_extension("sphinx.ext.autodoc")
+ app.connect("autodoc-process-bases", autodoc_process_bases)
+
+ return {
+ "version": "0.1",
+ "parallel_read_safe": True,
+ "parallel_write_safe": True,
+ }
diff --git a/docs/source/_include/examples/howto/protocols/__init__.py b/docs/source/_include/examples/howto/protocols/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/protocols/basics/__init__.py b/docs/source/_include/examples/howto/protocols/basics/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/protocols/basics/datagram_protocol_instance.py b/docs/source/_include/examples/howto/protocols/basics/datagram_protocol_instance.py
new file mode 100644
index 00000000..1589214b
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/basics/datagram_protocol_instance.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from typing import Any, TypeAlias
+
+from easynetwork.protocol import DatagramProtocol
+from easynetwork.serializers import JSONSerializer
+
+SentPacket: TypeAlias = Any
+ReceivedPacket: TypeAlias = Any
+
+json_protocol: DatagramProtocol[SentPacket, ReceivedPacket] = DatagramProtocol(JSONSerializer())
diff --git a/docs/source/_include/examples/howto/protocols/basics/datagram_protocol_subclass.py b/docs/source/_include/examples/howto/protocols/basics/datagram_protocol_subclass.py
new file mode 100644
index 00000000..08b6a63d
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/basics/datagram_protocol_subclass.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from typing import Any, TypeAlias
+
+from easynetwork.protocol import DatagramProtocol
+from easynetwork.serializers import JSONSerializer
+
+SentPacket: TypeAlias = Any
+ReceivedPacket: TypeAlias = Any
+
+
+class JSONDatagramProtocol(DatagramProtocol[SentPacket, ReceivedPacket]):
+ def __init__(self) -> None:
+ serializer = JSONSerializer()
+ super().__init__(serializer)
+
+
+json_protocol = JSONDatagramProtocol()
diff --git a/docs/source/_include/examples/howto/protocols/basics/stream_protocol_instance.py b/docs/source/_include/examples/howto/protocols/basics/stream_protocol_instance.py
new file mode 100644
index 00000000..1a4e39d3
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/basics/stream_protocol_instance.py
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from typing import Any, TypeAlias
+
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+SentPacket: TypeAlias = Any
+ReceivedPacket: TypeAlias = Any
+
+json_protocol: StreamProtocol[SentPacket, ReceivedPacket] = StreamProtocol(JSONSerializer())
diff --git a/docs/source/_include/examples/howto/protocols/basics/stream_protocol_subclass.py b/docs/source/_include/examples/howto/protocols/basics/stream_protocol_subclass.py
new file mode 100644
index 00000000..3005fa43
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/basics/stream_protocol_subclass.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from typing import Any, TypeAlias
+
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+SentPacket: TypeAlias = Any
+ReceivedPacket: TypeAlias = Any
+
+
+class JSONStreamProtocol(StreamProtocol[SentPacket, ReceivedPacket]):
+ def __init__(self) -> None:
+ serializer = JSONSerializer()
+ super().__init__(serializer)
+
+
+json_protocol = JSONStreamProtocol()
diff --git a/docs/source/_include/examples/howto/protocols/composite_converter/__init__.py b/docs/source/_include/examples/howto/protocols/composite_converter/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/protocols/composite_converter/packet_converter_subclass.py b/docs/source/_include/examples/howto/protocols/composite_converter/packet_converter_subclass.py
new file mode 100644
index 00000000..a32c2da4
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/composite_converter/packet_converter_subclass.py
@@ -0,0 +1,45 @@
+# mypy: disable-error-code=empty-body
+
+from __future__ import annotations
+
+from typing import Any
+
+from easynetwork.converter import AbstractPacketConverterComposite
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+class Request:
+ ...
+
+
+class Response:
+ ...
+
+
+class ClientConverter(AbstractPacketConverterComposite[Request, Response, dict[str, Any]]):
+ def convert_to_dto_packet(self, request: Request, /) -> dict[str, Any]:
+ ...
+
+ def create_from_dto_packet(self, response_dict: dict[str, Any], /) -> Response:
+ ...
+
+
+class ServerConverter(AbstractPacketConverterComposite[Response, Request, dict[str, Any]]):
+ def convert_to_dto_packet(self, response: Response, /) -> dict[str, Any]:
+ ...
+
+ def create_from_dto_packet(self, request_dict: dict[str, Any], /) -> Request:
+ ...
+
+
+serializer = JSONSerializer()
+
+client_protocol: StreamProtocol[Request, Response] = StreamProtocol(
+ serializer=serializer,
+ converter=ClientConverter(),
+)
+server_protocol: StreamProtocol[Response, Request] = StreamProtocol(
+ serializer=serializer,
+ converter=ServerConverter(),
+)
diff --git a/docs/source/_include/examples/howto/protocols/composite_converter/stapled_packet_converter.py b/docs/source/_include/examples/howto/protocols/composite_converter/stapled_packet_converter.py
new file mode 100644
index 00000000..337791bf
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/composite_converter/stapled_packet_converter.py
@@ -0,0 +1,53 @@
+# mypy: disable-error-code=empty-body
+
+from __future__ import annotations
+
+from typing import Any
+
+from easynetwork.converter import AbstractPacketConverter, StapledPacketConverter
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+class Request:
+ ...
+
+
+class Response:
+ ...
+
+
+class RequestConverter(AbstractPacketConverter[Request, dict[str, Any]]):
+ def convert_to_dto_packet(self, request: Request, /) -> dict[str, Any]:
+ ...
+
+ def create_from_dto_packet(self, request_dict: dict[str, Any], /) -> Request:
+ ...
+
+
+class ResponseConverter(AbstractPacketConverter[Response, dict[str, Any]]):
+ def convert_to_dto_packet(self, response: Response, /) -> dict[str, Any]:
+ ...
+
+ def create_from_dto_packet(self, response_dict: dict[str, Any], /) -> Response:
+ ...
+
+
+serializer = JSONSerializer()
+request_converter = RequestConverter()
+response_converter = ResponseConverter()
+
+client_protocol: StreamProtocol[Request, Response] = StreamProtocol(
+ serializer=serializer,
+ converter=StapledPacketConverter(
+ sent_packet_converter=request_converter,
+ received_packet_converter=response_converter,
+ ),
+)
+server_protocol: StreamProtocol[Response, Request] = StreamProtocol(
+ serializer=serializer,
+ converter=StapledPacketConverter(
+ sent_packet_converter=response_converter,
+ received_packet_converter=request_converter,
+ ),
+)
diff --git a/docs/source/_include/examples/howto/protocols/simple_converter/__init__.py b/docs/source/_include/examples/howto/protocols/simple_converter/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/protocols/simple_converter/example1.py b/docs/source/_include/examples/howto/protocols/simple_converter/example1.py
new file mode 100644
index 00000000..090574c3
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/simple_converter/example1.py
@@ -0,0 +1,53 @@
+from __future__ import annotations
+
+import dataclasses
+import uuid
+from typing import Any, TypeGuard
+
+from easynetwork.converter import AbstractPacketConverter
+from easynetwork.exceptions import PacketConversionError
+
+
+@dataclasses.dataclass
+class Person:
+ id: uuid.UUID
+ name: str
+ age: int
+ friends: list[uuid.UUID] = dataclasses.field(default_factory=list)
+
+
+class PersonToJSONConverter(AbstractPacketConverter[Person, dict[str, Any]]):
+ def convert_to_dto_packet(self, person: Person, /) -> dict[str, Any]:
+ return {
+ "id": str(person.id),
+ "name": person.name,
+ "age": person.age,
+ "friends": [str(friend_uuid) for friend_uuid in person.friends],
+ }
+
+ def create_from_dto_packet(self, packet: dict[str, Any], /) -> Person:
+ match packet:
+ case {
+ "id": str(person_id),
+ "name": str(name),
+ "age": int(age),
+ "friends": list(friends),
+ } if self._is_valid_list_of_friends(friends):
+ try:
+ person = Person(
+ id=uuid.UUID(person_id),
+ name=name,
+ age=age,
+ friends=[uuid.UUID(friend_id) for friend_id in friends],
+ )
+ except ValueError: # Invalid UUIDs
+ raise PacketConversionError("Invalid UUID fields") from None
+
+ case _:
+ raise PacketConversionError("Invalid packet format")
+
+ return person
+
+ @staticmethod
+ def _is_valid_list_of_friends(friends_list: list[Any]) -> TypeGuard[list[str]]:
+ return all(isinstance(friend_id, str) for friend_id in friends_list)
diff --git a/docs/source/_include/examples/howto/protocols/simple_converter/example2.py b/docs/source/_include/examples/howto/protocols/simple_converter/example2.py
new file mode 100644
index 00000000..ab9a448c
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/simple_converter/example2.py
@@ -0,0 +1,46 @@
+from __future__ import annotations
+
+import uuid
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.exceptions import DeserializeError, PacketConversionError, StreamProtocolParseError
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+from .example1 import Person, PersonToJSONConverter
+
+
+class PersonProtocol(StreamProtocol[Person, Person]):
+ def __init__(self) -> None:
+ serializer = JSONSerializer()
+ converter = PersonToJSONConverter()
+
+ super().__init__(serializer, converter=converter)
+
+
+def main() -> None:
+ protocol = PersonProtocol()
+
+ with TCPNetworkClient(("remote_address", 12345), protocol) as endpoint:
+ john_doe = Person(
+ id=uuid.uuid4(),
+ name="John Doe",
+ age=36,
+ friends=[uuid.uuid4() for _ in range(5)],
+ )
+
+ # Person object directly sent
+ endpoint.send_packet(john_doe)
+
+ try:
+ # The received object should be a Person instance.
+ received_person = endpoint.recv_packet()
+ except StreamProtocolParseError as exc:
+ match exc.error:
+ case DeserializeError():
+ print("It is not even a JSON object.")
+ case PacketConversionError():
+ print("It is not a valid Person in JSON object.")
+ else:
+ assert isinstance(received_person, Person)
+ print(f"Received person: {received_person!r}")
diff --git a/docs/source/_include/examples/howto/protocols/usage/__init__.py b/docs/source/_include/examples/howto/protocols/usage/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/protocols/usage/datagram_protocol.py b/docs/source/_include/examples/howto/protocols/usage/datagram_protocol.py
new file mode 100644
index 00000000..c5c24431
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/usage/datagram_protocol.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.client import UDPNetworkEndpoint
+from easynetwork.exceptions import DatagramProtocolParseError
+
+from ..basics.datagram_protocol_subclass import JSONDatagramProtocol
+
+
+def main() -> None:
+ protocol = JSONDatagramProtocol()
+
+ with UDPNetworkEndpoint(protocol) as endpoint:
+ endpoint.send_packet_to({"data": 42}, ("remote_address", 12345))
+
+ ...
+
+ try:
+ received_packet, sender_address = endpoint.recv_packet_from()
+ except DatagramProtocolParseError:
+ print("The received data is invalid.")
+ else:
+ print(f"Received {received_packet!r} from {sender_address}.")
diff --git a/docs/source/_include/examples/howto/protocols/usage/stream_protocol.py b/docs/source/_include/examples/howto/protocols/usage/stream_protocol.py
new file mode 100644
index 00000000..0346fc7f
--- /dev/null
+++ b/docs/source/_include/examples/howto/protocols/usage/stream_protocol.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.exceptions import StreamProtocolParseError
+
+from ..basics.stream_protocol_subclass import JSONStreamProtocol
+
+
+def main() -> None:
+ protocol = JSONStreamProtocol()
+
+ with TCPNetworkClient(("remote_address", 12345), protocol) as endpoint:
+ endpoint.send_packet({"data": 42})
+
+ ...
+
+ try:
+ received_packet = endpoint.recv_packet()
+ except StreamProtocolParseError:
+ print("The received data is invalid.")
+ else:
+ print(f"Received {received_packet!r} from {endpoint.get_remote_address()}.")
diff --git a/docs/source/_include/examples/howto/serializers/incremental_serializer/__init__.py b/docs/source/_include/examples/howto/serializers/incremental_serializer/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/serializers/incremental_serializer/example1.py b/docs/source/_include/examples/howto/serializers/incremental_serializer/example1.py
new file mode 100644
index 00000000..fa552a2c
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/incremental_serializer/example1.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+import json
+from typing import Any
+
+from easynetwork.exceptions import DeserializeError
+from easynetwork.serializers.base_stream import AutoSeparatedPacketSerializer
+
+
+class MyJSONSerializer(AutoSeparatedPacketSerializer[Any]):
+ def __init__(self, *, ensure_ascii: bool = True) -> None:
+ super().__init__(separator=b"\r\n")
+
+ self._ensure_ascii: bool = ensure_ascii
+
+ self._encoding: str
+ if self._ensure_ascii:
+ self._encoding = "ascii"
+ else:
+ self._encoding = "utf-8"
+
+ def serialize(self, packet: Any) -> bytes:
+ document = json.dumps(packet, ensure_ascii=self._ensure_ascii)
+ return document.encode(self._encoding)
+
+ def deserialize(self, data: bytes) -> Any:
+ try:
+ document = data.decode(self._encoding)
+ return json.loads(document)
+ except (UnicodeError, json.JSONDecodeError) as exc:
+ raise DeserializeError("JSON decode error") from exc
diff --git a/docs/source/_include/examples/howto/serializers/incremental_serializer/example2.py b/docs/source/_include/examples/howto/serializers/incremental_serializer/example2.py
new file mode 100644
index 00000000..f3ae718d
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/incremental_serializer/example2.py
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+import json
+from collections.abc import Generator
+from typing import Any
+
+from easynetwork.exceptions import DeserializeError, IncrementalDeserializeError
+from easynetwork.serializers.abc import AbstractIncrementalPacketSerializer
+
+
+class MyJSONSerializer(AbstractIncrementalPacketSerializer[Any]):
+ def __init__(self, *, ensure_ascii: bool = True) -> None:
+ self._ensure_ascii: bool = ensure_ascii
+
+ self._encoding: str
+ if self._ensure_ascii:
+ self._encoding = "ascii"
+ else:
+ self._encoding = "utf-8"
+
+ def _dump(self, packet: Any) -> bytes:
+ document = json.dumps(packet, ensure_ascii=self._ensure_ascii)
+ return document.encode(self._encoding)
+
+ def _load(self, data: bytes) -> Any:
+ document = data.decode(self._encoding)
+ return json.loads(document)
+
+ def serialize(self, packet: Any) -> bytes:
+ return self._dump(packet)
+
+ def deserialize(self, data: bytes) -> Any:
+ try:
+ return self._load(data)
+ except (UnicodeError, json.JSONDecodeError) as exc:
+ raise DeserializeError("JSON decode error") from exc
+
+ def incremental_serialize(self, packet: Any) -> Generator[bytes, None, None]:
+ yield self._dump(packet) + b"\r\n"
+
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[Any, bytes]]:
+ data = yield
+ newline = b"\r\n"
+ while (index := data.find(newline)) < 0:
+ data += yield
+
+ remainder = data[index + len(newline) :]
+ data = data[:index]
+
+ try:
+ document = self._load(data)
+ except (UnicodeError, json.JSONDecodeError) as exc:
+ raise IncrementalDeserializeError("JSON decode error", remainder) from exc
+
+ return document, remainder
diff --git a/docs/source/_include/examples/howto/serializers/one_shot_serializer/__init__.py b/docs/source/_include/examples/howto/serializers/one_shot_serializer/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/serializers/one_shot_serializer/example1.py b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example1.py
new file mode 100644
index 00000000..efa79da5
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example1.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+import json
+from typing import Any
+
+from easynetwork.serializers.abc import AbstractPacketSerializer
+
+
+class MyJSONSerializer(AbstractPacketSerializer[Any]):
+ def serialize(self, packet: Any) -> bytes:
+ document = json.dumps(packet)
+ return document.encode()
+
+ def deserialize(self, data: bytes) -> Any:
+ document = data.decode()
+ return json.loads(document)
diff --git a/docs/source/_include/examples/howto/serializers/one_shot_serializer/example2.py b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example2.py
new file mode 100644
index 00000000..1f435235
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example2.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+import json
+from typing import Any
+
+from easynetwork.exceptions import DeserializeError
+from easynetwork.serializers.abc import AbstractPacketSerializer
+
+
+class MyJSONSerializer(AbstractPacketSerializer[Any]):
+ def serialize(self, packet: Any) -> bytes:
+ document = json.dumps(packet)
+ return document.encode()
+
+ def deserialize(self, data: bytes) -> Any:
+ try:
+ document = data.decode()
+ return json.loads(document)
+ except (UnicodeError, json.JSONDecodeError) as exc:
+ raise DeserializeError("JSON decode error") from exc
diff --git a/docs/source/_include/examples/howto/serializers/one_shot_serializer/example3.py b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example3.py
new file mode 100644
index 00000000..f783faf7
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example3.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+import json
+from typing import Any
+
+from easynetwork.exceptions import DeserializeError
+from easynetwork.serializers.abc import AbstractPacketSerializer
+
+
+class MyJSONSerializer(AbstractPacketSerializer[Any]):
+ def __init__(self, *, ensure_ascii: bool = True) -> None:
+ self._ensure_ascii: bool = ensure_ascii
+
+ self._encoding: str
+ if self._ensure_ascii:
+ self._encoding = "ascii"
+ else:
+ self._encoding = "utf-8"
+
+ def serialize(self, packet: Any) -> bytes:
+ document = json.dumps(packet, ensure_ascii=self._ensure_ascii)
+ return document.encode(self._encoding)
+
+ def deserialize(self, data: bytes) -> Any:
+ try:
+ document = data.decode(self._encoding)
+ return json.loads(document)
+ except (UnicodeError, json.JSONDecodeError) as exc:
+ raise DeserializeError("JSON decode error") from exc
diff --git a/docs/source/_include/examples/howto/serializers/one_shot_serializer/example4_datagram.py b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example4_datagram.py
new file mode 100644
index 00000000..7c49e8d3
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example4_datagram.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.client import UDPNetworkEndpoint
+from easynetwork.protocol import DatagramProtocol
+
+from .example3 import MyJSONSerializer
+
+
+def main() -> None:
+ serializer = MyJSONSerializer()
+ protocol = DatagramProtocol(serializer)
+
+ with UDPNetworkEndpoint(protocol) as endpoint:
+ endpoint.send_packet_to({"data": 42}, ("remote_address", 12345))
+
+ ...
diff --git a/docs/source/_include/examples/howto/serializers/one_shot_serializer/example4_stream.py b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example4_stream.py
new file mode 100644
index 00000000..420e49ad
--- /dev/null
+++ b/docs/source/_include/examples/howto/serializers/one_shot_serializer/example4_stream.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers.wrapper import Base64EncoderSerializer
+
+from .example3 import MyJSONSerializer
+
+
+def main() -> None:
+ # Use of Base64EncoderSerializer as an incremental serializer wrapper
+ serializer = Base64EncoderSerializer(MyJSONSerializer())
+ protocol = StreamProtocol(serializer)
+
+ with TCPNetworkClient(("remote_address", 12345), protocol) as endpoint:
+ endpoint.send_packet({"data": 42})
+
+ ...
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/__init__.py b/docs/source/_include/examples/howto/tcp_clients/basics/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_async/__init__.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_async/connection_example1.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/connection_example1.py
new file mode 100644
index 00000000..c62c08a0
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/connection_example1.py
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+import asyncio
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+async def main() -> None:
+ protocol = StreamProtocol(JSONSerializer())
+ address = ("localhost", 9000)
+
+ async with AsyncTCPNetworkClient(address, protocol) as client:
+ print(f"Remote address: {client.get_remote_address()}")
+
+ ...
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_async/connection_example2.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/connection_example2.py
new file mode 100644
index 00000000..c75a874a
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/connection_example2.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+import asyncio
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+async def main() -> None:
+ protocol = StreamProtocol(JSONSerializer())
+ address = ("localhost", 9000)
+
+ try:
+ async with asyncio.timeout(30):
+ client = AsyncTCPNetworkClient(address, protocol)
+ await client.wait_connected()
+ except TimeoutError:
+ print(f"Could not connect to {address} after 30 seconds")
+ return
+
+ async with client:
+ print(f"Remote address: {client.get_remote_address()}")
+
+ ...
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_async/socket_example1.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/socket_example1.py
new file mode 100644
index 00000000..fdc7e409
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/basics/api_async/socket_example1.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+import asyncio
+import socket
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+async def obtain_a_connected_socket() -> socket.socket:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+ ...
+
+ return sock
+
+
+async def main() -> None:
+ protocol = StreamProtocol(JSONSerializer())
+ sock = await obtain_a_connected_socket()
+
+ async with AsyncTCPNetworkClient(sock, protocol) as client:
+ print(f"Remote address: {client.get_remote_address()}")
+
+ ...
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/__init__.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/connection_example1.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/connection_example1.py
new file mode 100644
index 00000000..430b49e5
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/connection_example1.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+def main() -> None:
+ protocol = StreamProtocol(JSONSerializer())
+ address = ("localhost", 9000)
+
+ with TCPNetworkClient(address, protocol) as client:
+ print(f"Remote address: {client.get_remote_address()}")
+
+ ...
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/connection_example2.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/connection_example2.py
new file mode 100644
index 00000000..79d88608
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/connection_example2.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+def main() -> None:
+ protocol = StreamProtocol(JSONSerializer())
+ address = ("localhost", 9000)
+
+ try:
+ client = TCPNetworkClient(address, protocol, connect_timeout=30)
+ except TimeoutError:
+ print(f"Could not connect to {address} after 30 seconds")
+ return
+
+ with client:
+ print(f"Remote address: {client.get_remote_address()}")
+
+ ...
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/socket_example1.py b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/socket_example1.py
new file mode 100644
index 00000000..7c96fcba
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/basics/api_sync/socket_example1.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+import socket
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+def obtain_a_connected_socket() -> socket.socket:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+ ...
+
+ return sock
+
+
+def main() -> None:
+ protocol = StreamProtocol(JSONSerializer())
+ sock = obtain_a_connected_socket()
+
+ with TCPNetworkClient(sock, protocol) as client:
+ print(f"Remote address: {client.get_remote_address()}")
+
+ ...
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/howto/tcp_clients/concurrency/__init__.py b/docs/source/_include/examples/howto/tcp_clients/concurrency/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/tcp_clients/concurrency/api_async.py b/docs/source/_include/examples/howto/tcp_clients/concurrency/api_async.py
new file mode 100644
index 00000000..68840ad6
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/concurrency/api_async.py
@@ -0,0 +1,69 @@
+from __future__ import annotations
+
+import asyncio
+import contextlib
+import traceback
+from typing import Any, TypeAlias
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.exceptions import StreamProtocolParseError
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+RequestType: TypeAlias = dict[str, Any]
+ResponseType: TypeAlias = dict[str, Any]
+
+
+async def consumer(response: ResponseType) -> None:
+ # Do some stuff...
+
+ print(response)
+
+
+async def receiver_worker(
+ client: AsyncTCPNetworkClient[RequestType, ResponseType],
+ task_group: asyncio.TaskGroup,
+) -> None:
+ while True:
+ try:
+ # Pass timeout=None to get an infinite iterator.
+ # It will be terminated when the client.close() method has been called.
+ async for response in client.iter_received_packets(timeout=None):
+ _ = task_group.create_task(consumer(response))
+ except StreamProtocolParseError:
+ print("Parsing error")
+ traceback.print_exc()
+ continue
+
+
+async def do_main_stuff(
+ client: AsyncTCPNetworkClient[RequestType, ResponseType],
+) -> None:
+ while True:
+ # Do some stuff...
+ request = {"data": 42}
+
+ await client.send_packet(request)
+
+
+async def main() -> None:
+ remote_address = ("localhost", 9000)
+ protocol = StreamProtocol(JSONSerializer())
+
+ async with contextlib.AsyncExitStack() as exit_stack:
+ # task group setup
+ task_group = await exit_stack.enter_async_context(asyncio.TaskGroup())
+
+ # connect to remote
+ client = AsyncTCPNetworkClient(remote_address, protocol)
+ await exit_stack.enter_async_context(client)
+
+ # receiver_worker task setup
+ _ = task_group.create_task(receiver_worker(client, task_group))
+
+ # Setup done, let's go
+ await do_main_stuff(client)
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/howto/tcp_clients/concurrency/api_sync.py b/docs/source/_include/examples/howto/tcp_clients/concurrency/api_sync.py
new file mode 100644
index 00000000..141c06da
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/concurrency/api_sync.py
@@ -0,0 +1,76 @@
+from __future__ import annotations
+
+import contextlib
+import threading
+import traceback
+from concurrent.futures import ThreadPoolExecutor
+from typing import Any, TypeAlias
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.exceptions import StreamProtocolParseError
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+RequestType: TypeAlias = dict[str, Any]
+ResponseType: TypeAlias = dict[str, Any]
+
+
+def consumer(response: ResponseType) -> None:
+ # Do some stuff...
+
+ print(response)
+
+
+def receiver_worker(
+ client: TCPNetworkClient[RequestType, ResponseType],
+ executor: ThreadPoolExecutor,
+) -> None:
+ while True:
+ try:
+ # Pass timeout=None to get an infinite iterator.
+ # It will be terminated when the client.close() method has been called.
+ for response in client.iter_received_packets(timeout=None):
+ executor.submit(consumer, response)
+ except StreamProtocolParseError:
+ print("Parsing error")
+ traceback.print_exc()
+ continue
+
+
+def do_main_stuff(client: TCPNetworkClient[RequestType, ResponseType]) -> None:
+ while True:
+ # Do some stuff...
+ request = {"data": 42}
+
+ client.send_packet(request)
+
+
+def main() -> None:
+ remote_address = ("localhost", 9000)
+ protocol = StreamProtocol(JSONSerializer())
+
+ with contextlib.ExitStack() as exit_stack:
+ # thread pool executor setup
+ executor = exit_stack.enter_context(ThreadPoolExecutor())
+
+ # connect to remote
+ client = TCPNetworkClient(remote_address, protocol)
+
+ # receiver_worker thread setup
+ receiver_worker_thread = threading.Thread(
+ target=receiver_worker,
+ args=(client, executor),
+ )
+ receiver_worker_thread.start()
+ exit_stack.callback(receiver_worker_thread.join)
+
+ # add the client close, so it will be closed
+ # before joining the thread
+ exit_stack.enter_context(client)
+
+ # Setup done, let's go
+ do_main_stuff(client)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/howto/tcp_clients/concurrency/ssl_shared_lock.py b/docs/source/_include/examples/howto/tcp_clients/concurrency/ssl_shared_lock.py
new file mode 100644
index 00000000..1c353be4
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/concurrency/ssl_shared_lock.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+
+def ssl_shared_lock_for_sync_client() -> None:
+ remote_address = ("remote_address", 12345)
+ protocol = StreamProtocol(JSONSerializer())
+
+ # [start]
+ client = TCPNetworkClient(
+ remote_address,
+ protocol,
+ ssl=True,
+ ssl_shared_lock=False,
+ )
+
+
+async def ssl_shared_lock_for_async_client() -> None:
+ remote_address = ("remote_address", 12345)
+ protocol = StreamProtocol(JSONSerializer())
+
+ # [start]
+ client = AsyncTCPNetworkClient(
+ remote_address,
+ protocol,
+ ssl=True,
+ ssl_shared_lock=False,
+ )
diff --git a/docs/source/_include/examples/howto/tcp_clients/usage/__init__.py b/docs/source/_include/examples/howto/tcp_clients/usage/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/docs/source/_include/examples/howto/tcp_clients/usage/api_async.py b/docs/source/_include/examples/howto/tcp_clients/usage/api_async.py
new file mode 100644
index 00000000..743bc70f
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/usage/api_async.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+
+import asyncio
+import socket
+from typing import Any
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+from easynetwork.exceptions import StreamProtocolParseError
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+###############
+# Basic usage #
+###############
+
+
+async def send_packet_example1(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ await client.send_packet({"data": 42})
+
+
+async def recv_packet_example1(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ packet = await client.recv_packet()
+ print(f"Received packet: {packet!r}")
+
+
+async def recv_packet_example2(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ try:
+ async with asyncio.timeout(30):
+ packet = await client.recv_packet()
+ except TimeoutError:
+ print("Timed out")
+ else:
+ print(f"Received packet: {packet!r}")
+
+
+async def recv_packet_example3(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ try:
+ async with asyncio.timeout(30):
+ packet = await client.recv_packet()
+ except StreamProtocolParseError:
+ print("Received something, but was not valid")
+ except TimeoutError:
+ print("Timed out")
+ else:
+ print(f"Received packet: {packet!r}")
+
+
+async def recv_packet_example4(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ all_packets = [p async for p in client.iter_received_packets()]
+
+
+async def recv_packet_example5(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ all_packets = [p async for p in client.iter_received_packets(timeout=1)]
+
+
+##################
+# Advanced usage #
+##################
+
+
+async def send_eof_example(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ await client.send_eof()
+
+
+async def socket_proxy_example(client: AsyncTCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ client.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, False)
+
+
+async def max_recv_size_example() -> None:
+ address = ("remote_address", 12345)
+ protocol = StreamProtocol(JSONSerializer())
+
+ # [start]
+ async with AsyncTCPNetworkClient(address, protocol, max_recv_size=1024) as client:
+ # Only do socket.recv(1024) calls
+ packet = await client.recv_packet()
+
+
+async def ssl_default_context_example() -> None:
+ address = ("remote_address", 12345)
+ protocol = StreamProtocol(JSONSerializer())
+
+ # [start]
+ async with AsyncTCPNetworkClient(address, protocol, ssl=True) as client:
+ await client.send_packet({"data": 42})
+
+ packet = await client.recv_packet()
diff --git a/docs/source/_include/examples/howto/tcp_clients/usage/api_sync.py b/docs/source/_include/examples/howto/tcp_clients/usage/api_sync.py
new file mode 100644
index 00000000..eb5d4255
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_clients/usage/api_sync.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+
+import socket
+from typing import Any
+
+from easynetwork.api_sync.client import TCPNetworkClient
+from easynetwork.exceptions import StreamProtocolParseError
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+###############
+# Basic usage #
+###############
+
+
+def send_packet_example1(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ client.send_packet({"data": 42})
+
+
+def recv_packet_example1(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ packet = client.recv_packet()
+ print(f"Received packet: {packet!r}")
+
+
+def recv_packet_example2(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ try:
+ packet = client.recv_packet(timeout=30)
+ except TimeoutError:
+ print("Timed out")
+ else:
+ print(f"Received packet: {packet!r}")
+
+
+def recv_packet_example3(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ try:
+ packet = client.recv_packet(timeout=30)
+ except StreamProtocolParseError:
+ print("Received something, but was not valid")
+ except TimeoutError:
+ print("Timed out")
+ else:
+ print(f"Received packet: {packet!r}")
+
+
+def recv_packet_example4(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ all_packets = [p for p in client.iter_received_packets()]
+
+
+def recv_packet_example5(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ all_packets = [p for p in client.iter_received_packets(timeout=1)]
+
+
+##################
+# Advanced usage #
+##################
+
+
+def send_eof_example(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ client.send_eof()
+
+
+def socket_proxy_example(client: TCPNetworkClient[Any, Any]) -> None:
+ # [start]
+ client.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, False)
+
+
+def max_recv_size_example() -> None:
+ address = ("remote_address", 12345)
+ protocol = StreamProtocol(JSONSerializer())
+
+ # [start]
+ with TCPNetworkClient(address, protocol, max_recv_size=1024) as client:
+ # Only do socket.recv(1024) calls
+ packet = client.recv_packet()
+
+
+def ssl_default_context_example() -> None:
+ address = ("remote_address", 12345)
+ protocol = StreamProtocol(JSONSerializer())
+
+ # [start]
+ with TCPNetworkClient(address, protocol, ssl=True) as client:
+ client.send_packet({"data": 42})
+
+ packet = client.recv_packet()
diff --git a/docs/source/_include/examples/howto/tcp_servers/async_server.py b/docs/source/_include/examples/howto/tcp_servers/async_server.py
new file mode 100644
index 00000000..db75540f
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_servers/async_server.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+import asyncio
+from collections.abc import AsyncGenerator
+
+from easynetwork.api_async.server import AsyncStreamClient, AsyncStreamRequestHandler, AsyncTCPNetworkServer
+from easynetwork.protocol import StreamProtocol
+
+
+class Request:
+ ...
+
+
+class Response:
+ ...
+
+
+class MyRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ request: Request = yield
+
+ ...
+
+ await client.send_packet(Response())
+
+
+# NOTE: The sent packet is "Response" and the received packet is "Request"
+class ServerProtocol(StreamProtocol[Response, Request]):
+ def __init__(self) -> None:
+ ...
+
+
+async def main() -> None:
+ host, port = "localhost", 9000
+ protocol = ServerProtocol()
+ handler = MyRequestHandler()
+
+ # Create the server, binding to localhost on port 9000
+ async with AsyncTCPNetworkServer(host, port, protocol, handler) as server:
+ # Activate the server; this will keep running until you
+ # interrupt the program with Ctrl-C
+ await server.serve_forever()
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/howto/tcp_servers/request_handler_explanation.py b/docs/source/_include/examples/howto/tcp_servers/request_handler_explanation.py
new file mode 100644
index 00000000..36d6b63c
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_servers/request_handler_explanation.py
@@ -0,0 +1,257 @@
+from __future__ import annotations
+
+import asyncio
+import contextlib
+from collections.abc import AsyncGenerator
+
+from easynetwork.api_async.server import AsyncStreamClient, AsyncStreamRequestHandler, AsyncTCPNetworkServer
+from easynetwork.exceptions import StreamProtocolParseError
+
+
+class Request:
+ ...
+
+
+class Response:
+ ...
+
+
+class BadRequest(Response):
+ ...
+
+
+class InternalError(Response):
+ ...
+
+
+class TimedOut(Response):
+ ...
+
+
+class MinimumRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ ### Before 'yield'
+ # Initializes the generator.
+ # This is the setup part before receiving a request.
+ # You don't have much thing to do here.
+ ##################
+
+ request: Request = yield
+
+ ### After 'yield'
+ # Once the server has sent you the client's request,
+ # you can do whatever you want with it and send responses back
+ # to the client if necessary.
+ await client.send_packet(Response())
+ #################
+
+ ### On a 'return'
+ # When handle() returns, this means that the handling of ONE request
+ # has finished. There is no connection close or whatever.
+ # The server will immediately create a new generator.
+ #################
+ return
+
+
+class ConnectionCloseRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ request: Request = yield
+
+ await client.send_packet(Response())
+
+ # At this point, the transport is closed and the server
+ # will not create a new generator.
+ await client.aclose()
+
+
+class ConnectionCloseWithContextRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ async with contextlib.aclosing(client):
+ request: Request = yield
+
+ await client.send_packet(Response())
+
+
+class ConnectionCloseBeforeYieldRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ if not self.should_handle(client):
+ return
+
+ request: Request = yield
+
+ def should_handle(self, client: AsyncStreamClient[Response]) -> bool:
+ return True
+
+
+class ErrorHandlingInRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ try:
+ # *All* exceptions are thrown through the "yield" statement
+ # (including BaseException). But you should only catch Exception subclasses.
+ request: Request = yield
+ except StreamProtocolParseError:
+ await client.send_packet(BadRequest())
+ except OSError:
+ # It is possible that something went wrong with the underlying
+ # transport (the socket) at the OS level.
+ # You should check if the client is always usable.
+ try:
+ await client.send_packet(InternalError())
+ except OSError:
+ await client.aclose()
+ raise
+ except Exception:
+ await client.send_packet(InternalError())
+ else:
+ await client.send_packet(Response())
+
+
+class MultipleYieldInRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ request: Request = yield
+
+ ...
+
+ await client.send_packet(Response())
+
+ if self.need_something_else(request, client):
+ additional_data: Request = yield
+
+ ...
+
+ await client.send_packet(Response())
+
+ def need_something_else(self, request: Request, client: AsyncStreamClient[Response]) -> bool:
+ return True
+
+
+class ClientLoopInRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ # Close the client at the loop break
+ async with contextlib.aclosing(client):
+ # Ask the user to log in
+ initial_user_info: Request = yield
+
+ ...
+
+ # Sucessfully logged in
+ await client.send_packet(Response())
+
+ # Start handling requests
+ while not client.is_closing():
+ request: Request = yield
+
+ ...
+
+ await client.send_packet(Response())
+
+
+class TimeoutRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ try:
+ async with asyncio.timeout(30):
+ # The client has 30 seconds to send the request to the server.
+ request: Request = yield
+ except TimeoutError:
+ await client.send_packet(TimedOut())
+ else:
+ await client.send_packet(Response())
+
+
+class ClientConnectionHooksRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def on_connection(self, client: AsyncStreamClient[Response]) -> None:
+ print(f"{client.address} is connected")
+
+ # Notify the client that the service is ready.
+ await client.send_packet(Response())
+
+ async def on_disconnection(self, client: AsyncStreamClient[Response]) -> None:
+ # Perfom service shutdown clean-up
+ ...
+
+ print(f"{client.address} is disconnected")
+
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ request: Request = yield
+
+ ...
+
+ await client.send_packet(Response())
+
+
+class ClientConnectionAsyncGenRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def on_connection(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ # Ask the user to log in
+ initial_user_info: Request = yield
+
+ ...
+
+ # Sucessfully logged in
+ await client.send_packet(Response())
+
+ async def on_disconnection(self, client: AsyncStreamClient[Response]) -> None:
+ # Perfom log out clean-up
+ ...
+
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ request: Request = yield
+
+ ...
+
+ await client.send_packet(Response())
+
+
+class ServiceInitializationHookRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ async def service_init(
+ self,
+ exit_stack: contextlib.AsyncExitStack,
+ server: AsyncTCPNetworkServer[Request, Response],
+ ) -> None:
+ exit_stack.callback(self._service_quit)
+
+ self.background_tasks = await exit_stack.enter_async_context(asyncio.TaskGroup())
+
+ _ = self.background_tasks.create_task(self._service_actions())
+
+ async def _service_actions(self) -> None:
+ while True:
+ await asyncio.sleep(1)
+
+ # Do some stuff each second in background
+ ...
+
+ def _service_quit(self) -> None:
+ print("Service stopped")
diff --git a/docs/source/_include/examples/howto/tcp_servers/simple_request_handler.py b/docs/source/_include/examples/howto/tcp_servers/simple_request_handler.py
new file mode 100644
index 00000000..aca0cdaa
--- /dev/null
+++ b/docs/source/_include/examples/howto/tcp_servers/simple_request_handler.py
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+from collections.abc import AsyncGenerator
+
+from easynetwork.api_async.server import AsyncStreamClient, AsyncStreamRequestHandler
+
+
+class Request:
+ """Object representing the client request."""
+
+ ...
+
+
+class Response:
+ """Object representing the response to send to the client."""
+
+ ...
+
+
+class MyRequestHandler(AsyncStreamRequestHandler[Request, Response]):
+ """
+ The request handler class for our server.
+
+ It is instantiated once to the server, and must
+ override the handle() method to implement communication to the
+ client.
+ """
+
+ async def handle(
+ self,
+ client: AsyncStreamClient[Response],
+ ) -> AsyncGenerator[None, Request]:
+ # "client" is the write stream of the connection to the remote host.
+ # The read stream is covered by the server and the incoming
+ # request is sent through the "yield" statement.
+ request: Request = yield
+
+ # Do some stuff
+ ...
+
+ response = Response()
+
+ await client.send_packet(response)
diff --git a/docs/source/_include/examples/tutorials/echo_client_server_tcp/async_client.py b/docs/source/_include/examples/tutorials/echo_client_server_tcp/async_client.py
new file mode 100644
index 00000000..401334f5
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/echo_client_server_tcp/async_client.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+import asyncio
+import sys
+
+from easynetwork.api_async.client import AsyncTCPNetworkClient
+
+from json_protocol import JSONProtocol
+
+
+async def main() -> None:
+ host = "localhost"
+ port = 9000
+
+ # Connect to server
+ async with AsyncTCPNetworkClient((host, port), JSONProtocol()) as client:
+ # Send data
+ request = {"command-line arguments": sys.argv[1:]}
+ await client.send_packet(request)
+
+ # Receive data from the server and shut down
+ response = await client.recv_packet()
+
+ print(f"Sent: {request}")
+ print(f"Received: {response}")
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/tutorials/echo_client_server_tcp/async_server.py b/docs/source/_include/examples/tutorials/echo_client_server_tcp/async_server.py
new file mode 100644
index 00000000..ffc659d4
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/echo_client_server_tcp/async_server.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+import asyncio
+
+from easynetwork.api_async.server import AsyncTCPNetworkServer
+
+from echo_request_handler import EchoRequestHandler
+from json_protocol import JSONProtocol
+
+
+async def main() -> None:
+ host = None
+ port = 9000
+ protocol = JSONProtocol()
+ handler = EchoRequestHandler()
+
+ async with AsyncTCPNetworkServer(host, port, protocol, handler) as server:
+ try:
+ await server.serve_forever()
+ except asyncio.CancelledError:
+ pass
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/tutorials/echo_client_server_tcp/client.py b/docs/source/_include/examples/tutorials/echo_client_server_tcp/client.py
new file mode 100644
index 00000000..06ecef94
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/echo_client_server_tcp/client.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+import sys
+
+from easynetwork.api_sync.client import TCPNetworkClient
+
+from json_protocol import JSONProtocol
+
+
+def main() -> None:
+ host = "localhost"
+ port = 9000
+
+ # Connect to server
+ with TCPNetworkClient((host, port), JSONProtocol()) as client:
+ # Send data
+ request = {"command-line arguments": sys.argv[1:]}
+ client.send_packet(request)
+
+ # Receive data from the server and shut down
+ response = client.recv_packet()
+
+ print(f"Sent: {request}")
+ print(f"Received: {response}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/tutorials/echo_client_server_tcp/echo_request_handler.py b/docs/source/_include/examples/tutorials/echo_client_server_tcp/echo_request_handler.py
new file mode 100644
index 00000000..f6d1e7e1
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/echo_client_server_tcp/echo_request_handler.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from collections.abc import AsyncGenerator
+from typing import Any, TypeAlias
+
+from easynetwork.api_async.server import AsyncStreamClient, AsyncStreamRequestHandler
+from easynetwork.exceptions import StreamProtocolParseError
+
+# These TypeAliases are there to help you understand
+# where requests and responses are used
+RequestType: TypeAlias = Any
+ResponseType: TypeAlias = Any
+
+
+class EchoRequestHandler(AsyncStreamRequestHandler[RequestType, ResponseType]):
+ async def handle(
+ self,
+ client: AsyncStreamClient[ResponseType],
+ ) -> AsyncGenerator[None, RequestType]:
+ try:
+ request: RequestType = yield # A JSON request has been sent by this client
+ except StreamProtocolParseError:
+ # Invalid JSON data sent
+ # This is an example of how you can answer to an invalid request
+ await client.send_packet({"error": "Invalid JSON", "code": "parse_error"})
+ return
+
+ print(f"{client.address.host} sent {request}")
+
+ # As a good echo handler, the request is sent back to the client
+ response: ResponseType = request
+ await client.send_packet(response)
diff --git a/docs/source/_include/examples/tutorials/echo_client_server_tcp/json_protocol.py b/docs/source/_include/examples/tutorials/echo_client_server_tcp/json_protocol.py
new file mode 100644
index 00000000..36d5a859
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/echo_client_server_tcp/json_protocol.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from typing import Any, TypeAlias
+
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import JSONSerializer
+
+# Use of type aliases in order not to see two Any types without real meaning
+# In our case, any serializable object will be sent/received
+SentDataType: TypeAlias = Any
+ReceivedDataType: TypeAlias = Any
+
+
+class JSONProtocol(StreamProtocol[SentDataType, ReceivedDataType]):
+ def __init__(self) -> None:
+ super().__init__(JSONSerializer())
diff --git a/docs/source/_include/examples/tutorials/echo_client_server_tcp/server.py b/docs/source/_include/examples/tutorials/echo_client_server_tcp/server.py
new file mode 100644
index 00000000..cd2965fb
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/echo_client_server_tcp/server.py
@@ -0,0 +1,23 @@
+from __future__ import annotations
+
+from easynetwork.api_sync.server import StandaloneTCPNetworkServer
+
+from echo_request_handler import EchoRequestHandler
+from json_protocol import JSONProtocol
+
+
+def main() -> None:
+ host = None
+ port = 9000
+ protocol = JSONProtocol()
+ handler = EchoRequestHandler()
+
+ with StandaloneTCPNetworkServer(host, port, protocol, handler) as server:
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ pass
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/tutorials/ftp_server/async_server.py b/docs/source/_include/examples/tutorials/ftp_server/async_server.py
new file mode 100644
index 00000000..1f6c7091
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/async_server.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+from collections.abc import Sequence
+
+from easynetwork.api_async.server import AsyncTCPNetworkServer
+
+from ftp_reply import FTPReply
+from ftp_request import FTPRequest
+from ftp_server_protocol import FTPServerProtocol
+from ftp_server_request_handler import FTPRequestHandler
+
+
+class AsyncFTPServer(AsyncTCPNetworkServer[FTPRequest, FTPReply]):
+ def __init__(
+ self,
+ host: str | Sequence[str] | None = None,
+ port: int = 21000,
+ ) -> None:
+ super().__init__(host, port, FTPServerProtocol(), FTPRequestHandler())
+
+
+if __name__ == "__main__":
+ import asyncio
+ import logging
+
+ async def main() -> None:
+ logging.basicConfig(
+ level=logging.INFO,
+ format="[ %(levelname)s ] [ %(name)s ] %(message)s",
+ )
+ async with AsyncFTPServer() as server:
+ try:
+ await server.serve_forever()
+ except asyncio.CancelledError:
+ pass
+
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/tutorials/ftp_server/ftp_command.py b/docs/source/_include/examples/tutorials/ftp_server/ftp_command.py
new file mode 100644
index 00000000..bdceb020
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/ftp_command.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from enum import StrEnum, auto
+
+
+class FTPCommand(StrEnum):
+ """This is an enumeration of all the commands defined in RFC 959.
+
+ If the client sends a command that is not one of these,
+ the server will reply a 500 error code.
+
+ If the server has not implemented one of these commands,
+ it will reply a 502 error code.
+ """
+
+ @staticmethod
+ def _generate_next_value_(
+ name: str,
+ start: int,
+ count: int,
+ last_values: list[str],
+ ) -> str:
+ assert name.isupper()
+ return name
+
+ ABOR = auto()
+ ACCT = auto()
+ ALLO = auto()
+ APPE = auto()
+ CDUP = auto()
+ CWD = auto()
+ DELE = auto()
+ HELP = auto()
+ LIST = auto()
+ MKD = auto()
+ MODE = auto()
+ NOOP = auto()
+ PASS = auto()
+ PASV = auto()
+ PORT = auto()
+ PWD = auto()
+ QUIT = auto()
+ REIN = auto()
+ REST = auto()
+ RETR = auto()
+ RMD = auto()
+ RNFR = auto()
+ RNTO = auto()
+ SITE = auto()
+ SMNT = auto()
+ STAT = auto()
+ STOR = auto()
+ STOU = auto()
+ STRU = auto()
+ SYST = auto()
+ TYPE = auto()
+ USER = auto()
diff --git a/docs/source/_include/examples/tutorials/ftp_server/ftp_converters.py b/docs/source/_include/examples/tutorials/ftp_server/ftp_converters.py
new file mode 100644
index 00000000..46c22f08
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/ftp_converters.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+
+from easynetwork.converter import AbstractPacketConverter
+from easynetwork.exceptions import PacketConversionError
+
+from ftp_command import FTPCommand
+from ftp_reply import FTPReply
+from ftp_request import FTPRequest
+
+
+class FTPRequestConverter(AbstractPacketConverter[FTPRequest, str]):
+ """Converter to switch between FTPRequest objects and strings."""
+
+ def convert_to_dto_packet(self, obj: FTPRequest) -> str:
+ """Creates the string representation of the FTPRequest object
+
+ Not implemented.
+ """
+
+ raise NotImplementedError("Not needed in server side")
+
+ def create_from_dto_packet(self, packet: str) -> FTPRequest:
+ """Builds an FTPRequest object from a raw string
+
+ >>> c = FTPRequestConverter()
+ >>> c.create_from_dto_packet("NOOP")
+ FTPRequest(command=, args=())
+ >>> c.create_from_dto_packet("qUiT")
+ FTPRequest(command=, args=())
+ >>> c.create_from_dto_packet("STOR /path/file.txt")
+ FTPRequest(command=, args=('/path/file.txt',))
+ >>> c.create_from_dto_packet("invalid command")
+ Traceback (most recent call last):
+ ...
+ easynetwork.exceptions.PacketConversionError: Command unrecognized: 'INVALID'
+
+ Parameters:
+ packet: The string representation of the request
+
+ Returns:
+ the FTP request
+ """
+ command, *args = packet.split(" ")
+ command = command.upper()
+ try:
+ command = FTPCommand(command)
+ except ValueError as exc:
+ raise PacketConversionError(f"Command unrecognized: {command!r}") from exc
+ return FTPRequest(command, tuple(args))
+
+
+class FTPReplyConverter(AbstractPacketConverter[FTPReply, str]):
+ """Converter to switch between FTPReply objects and strings."""
+
+ def convert_to_dto_packet(self, obj: FTPReply) -> str:
+ """Creates the string representation of the FTPReply object
+
+ >>> c = FTPReplyConverter()
+ >>> c.convert_to_dto_packet(FTPReply(200, "Command okay."))
+ '200 Command okay.'
+ >>> c.convert_to_dto_packet(FTPReply(10, "Does not exist but why not."))
+ '010 Does not exist but why not.'
+
+ Parameters:
+ obj: The FTPReply object
+
+ Returns:
+ the string representation of the reply
+ """
+
+ code: int = obj.code
+ message: str = obj.message
+
+ assert 0 <= code < 1000, f"Invalid reply code {code:d}"
+
+ # Multi-line replies exist, but we won't deal with them in this tutorial.
+ assert "\n" not in message, "message contains newline character"
+ separator = " "
+
+ return f"{code:03d}{separator}{message}"
+
+ def create_from_dto_packet(self, packet: str) -> FTPReply:
+ """Builds an FTPReply object from a raw string
+
+ Not implemented.
+ """
+ raise NotImplementedError("Not needed in server side")
diff --git a/docs/source/_include/examples/tutorials/ftp_server/ftp_reply.py b/docs/source/_include/examples/tutorials/ftp_server/ftp_reply.py
new file mode 100644
index 00000000..3b02ddc0
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/ftp_reply.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+
+@dataclass(frozen=True)
+class FTPReply:
+ """Dataclass defining an FTP reply."""
+
+ code: int
+ """The reply code."""
+
+ message: str
+ """A line of text following the reply code."""
+
+ @staticmethod
+ def ok() -> FTPReply:
+ return FTPReply(200, "Command okay.")
+
+ @staticmethod
+ def service_ready_for_new_user() -> FTPReply:
+ return FTPReply(220, "Service ready for new user.")
+
+ @staticmethod
+ def connection_close(*, unexpected: bool = False) -> FTPReply:
+ if unexpected:
+ return FTPReply(421, "Service not available, closing control connection.")
+ return FTPReply(221, "Service closing control connection.")
+
+ @staticmethod
+ def syntax_error() -> FTPReply:
+ return FTPReply(500, "Syntax error, command unrecognized.")
+
+ @staticmethod
+ def not_implemented_error() -> FTPReply:
+ return FTPReply(502, "Command not implemented.")
diff --git a/docs/source/_include/examples/tutorials/ftp_server/ftp_request.py b/docs/source/_include/examples/tutorials/ftp_server/ftp_request.py
new file mode 100644
index 00000000..e118e648
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/ftp_request.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any
+
+from ftp_command import FTPCommand
+
+
+@dataclass(frozen=True, match_args=True)
+class FTPRequest:
+ """Dataclass defining an FTP request"""
+
+ command: FTPCommand
+ """Command name."""
+
+ args: tuple[Any, ...]
+ """Command arguments sequence. May be empty."""
diff --git a/docs/source/_include/examples/tutorials/ftp_server/ftp_server_protocol.py b/docs/source/_include/examples/tutorials/ftp_server/ftp_server_protocol.py
new file mode 100644
index 00000000..3d7623cc
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/ftp_server_protocol.py
@@ -0,0 +1,23 @@
+from __future__ import annotations
+
+from easynetwork.converter import StapledPacketConverter
+from easynetwork.protocol import StreamProtocol
+from easynetwork.serializers import StringLineSerializer
+
+from ftp_converters import FTPReplyConverter, FTPRequestConverter
+from ftp_reply import FTPReply
+from ftp_request import FTPRequest
+
+
+class FTPServerProtocol(StreamProtocol[FTPReply, FTPRequest]):
+ def __init__(self) -> None:
+ request_converter = FTPRequestConverter()
+ response_converter = FTPReplyConverter()
+
+ super().__init__(
+ serializer=StringLineSerializer(newline="CRLF", encoding="ascii"),
+ converter=StapledPacketConverter(
+ sent_packet_converter=response_converter,
+ received_packet_converter=request_converter,
+ ),
+ )
diff --git a/docs/source/_include/examples/tutorials/ftp_server/ftp_server_request_handler.py b/docs/source/_include/examples/tutorials/ftp_server/ftp_server_request_handler.py
new file mode 100644
index 00000000..504e630f
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/ftp_server_request_handler.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+import contextlib
+import logging
+from collections.abc import AsyncGenerator
+from typing import Any
+
+from easynetwork.api_async.server import AsyncStreamClient, AsyncStreamRequestHandler
+from easynetwork.exceptions import StreamProtocolParseError
+
+from ftp_command import FTPCommand
+from ftp_reply import FTPReply
+from ftp_request import FTPRequest
+
+
+class FTPRequestHandler(AsyncStreamRequestHandler[FTPRequest, FTPReply]):
+ async def service_init(
+ self,
+ exit_stack: contextlib.AsyncExitStack,
+ server: Any,
+ ) -> None:
+ self.logger = logging.getLogger(self.__class__.__name__)
+
+ async def on_connection(self, client: AsyncStreamClient[FTPReply]) -> None:
+ await client.send_packet(FTPReply.service_ready_for_new_user())
+
+ async def on_disconnection(self, client: AsyncStreamClient[FTPReply]) -> None:
+ with contextlib.suppress(ConnectionError):
+ if not client.is_closing():
+ await client.send_packet(FTPReply.connection_close(unexpected=True))
+
+ async def handle(
+ self,
+ client: AsyncStreamClient[FTPReply],
+ ) -> AsyncGenerator[None, FTPRequest]:
+ try:
+ request: FTPRequest = yield
+ except StreamProtocolParseError as exc:
+ self.logger.warning(
+ "%s: %s: %s",
+ client.address,
+ type(exc.error).__name__,
+ exc.error,
+ )
+ await client.send_packet(FTPReply.syntax_error())
+ return
+
+ self.logger.info("Sent by client %s: %s", client.address, request)
+ match request:
+ case FTPRequest(FTPCommand.NOOP):
+ await client.send_packet(FTPReply.ok())
+
+ case FTPRequest(FTPCommand.QUIT):
+ async with contextlib.aclosing(client):
+ await client.send_packet(FTPReply.connection_close())
+
+ case _:
+ await client.send_packet(FTPReply.not_implemented_error())
diff --git a/docs/source/_include/examples/tutorials/ftp_server/server.py b/docs/source/_include/examples/tutorials/ftp_server/server.py
new file mode 100644
index 00000000..4ae7b7eb
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/ftp_server/server.py
@@ -0,0 +1,36 @@
+from __future__ import annotations
+
+from collections.abc import Sequence
+
+from easynetwork.api_sync.server import StandaloneTCPNetworkServer
+
+from ftp_reply import FTPReply
+from ftp_request import FTPRequest
+from ftp_server_protocol import FTPServerProtocol
+from ftp_server_request_handler import FTPRequestHandler
+
+
+class FTPServer(StandaloneTCPNetworkServer[FTPRequest, FTPReply]):
+ def __init__(
+ self,
+ host: str | Sequence[str] | None = None,
+ port: int = 21000,
+ ) -> None:
+ super().__init__(host, port, FTPServerProtocol(), FTPRequestHandler())
+
+
+if __name__ == "__main__":
+ import logging
+
+ def main() -> None:
+ logging.basicConfig(
+ level=logging.INFO,
+ format="[ %(levelname)s ] [ %(name)s ] %(message)s",
+ )
+ with FTPServer() as server:
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ pass
+
+ main()
diff --git a/docs/source/_include/examples/tutorials/udp_endpoint/async_endpoint.py b/docs/source/_include/examples/tutorials/udp_endpoint/async_endpoint.py
new file mode 100644
index 00000000..39ee6e5a
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/udp_endpoint/async_endpoint.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+import asyncio
+import sys
+from typing import Any
+
+from easynetwork.api_async.client.udp import AsyncUDPNetworkEndpoint
+
+from json_protocol import JSONProtocol
+
+
+async def sender(
+ endpoint: AsyncUDPNetworkEndpoint[Any, Any],
+ address: tuple[str, int],
+ to_send: list[str],
+) -> None:
+ # Send data to the specified address
+ sent_data = {"command-line arguments": to_send}
+ await endpoint.send_packet_to(sent_data, address)
+
+ # Receive data and shut down
+ received_data, sender_address = await endpoint.recv_packet_from()
+
+ print(f"Sent to {address[:2]} : {sent_data}")
+ print(f"Received from {sender_address} : {received_data}")
+
+
+async def receiver(endpoint: AsyncUDPNetworkEndpoint[Any, Any]) -> None:
+ # JSON data has been sent by "sender_address"
+ received_data, sender_address = await endpoint.recv_packet_from()
+
+ print(f"From {sender_address}: {received_data}")
+
+ # Send back to the sender
+ await endpoint.send_packet_to(received_data, sender_address)
+
+
+async def main() -> None:
+ async with AsyncUDPNetworkEndpoint(JSONProtocol()) as endpoint:
+ match sys.argv[1:]:
+ case ["sender", address_string, *to_send]:
+ host, port_string = address_string.split(",")
+ port = int(port_string)
+
+ await sender(endpoint, (host, port), to_send)
+
+ case ["receiver"]:
+ print(f"Receiver available on {endpoint.get_local_address()}")
+
+ await receiver(endpoint)
+
+ case _:
+ raise ValueError("Invalid arguments")
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/docs/source/_include/examples/tutorials/udp_endpoint/endpoint.py b/docs/source/_include/examples/tutorials/udp_endpoint/endpoint.py
new file mode 100644
index 00000000..21436ad1
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/udp_endpoint/endpoint.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+import sys
+from typing import Any
+
+from easynetwork.api_sync.client.udp import UDPNetworkEndpoint
+
+from json_protocol import JSONProtocol
+
+
+def sender(
+ endpoint: UDPNetworkEndpoint[Any, Any],
+ address: tuple[str, int],
+ to_send: list[str],
+) -> None:
+ # Send data to the specified address
+ sent_data = {"command-line arguments": to_send}
+ endpoint.send_packet_to(sent_data, address)
+
+ # Receive data and shut down
+ received_data, sender_address = endpoint.recv_packet_from()
+
+ print(f"Sent to {address[:2]} : {sent_data}")
+ print(f"Received from {sender_address} : {received_data}")
+
+
+def receiver(endpoint: UDPNetworkEndpoint[Any, Any]) -> None:
+ # JSON data has been sent by "sender_address"
+ received_data, sender_address = endpoint.recv_packet_from()
+
+ print(f"From {sender_address}: {received_data}")
+
+ # Send back to the sender
+ endpoint.send_packet_to(received_data, sender_address)
+
+
+def main() -> None:
+ with UDPNetworkEndpoint(JSONProtocol()) as endpoint:
+ match sys.argv[1:]:
+ case ["sender", address_string, *to_send]:
+ host, port_string = address_string.split(",")
+ port = int(port_string)
+
+ sender(endpoint, (host, port), to_send)
+
+ case ["receiver"]:
+ print(f"Receiver available on {endpoint.get_local_address()}")
+
+ receiver(endpoint)
+
+ case _:
+ raise ValueError("Invalid arguments")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/docs/source/_include/examples/tutorials/udp_endpoint/json_protocol.py b/docs/source/_include/examples/tutorials/udp_endpoint/json_protocol.py
new file mode 100644
index 00000000..5a1f115e
--- /dev/null
+++ b/docs/source/_include/examples/tutorials/udp_endpoint/json_protocol.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from typing import Any, TypeAlias
+
+from easynetwork.protocol import DatagramProtocol
+from easynetwork.serializers import JSONSerializer
+
+# Use of type aliases in order not to see two Any types without real meaning
+# In our case, any serializable object will be sent/received
+SentDataType: TypeAlias = Any
+ReceivedDataType: TypeAlias = Any
+
+
+class JSONProtocol(DatagramProtocol[SentDataType, ReceivedDataType]):
+ def __init__(self) -> None:
+ super().__init__(JSONSerializer())
diff --git a/docs/source/_include/link-labels.rst b/docs/source/_include/link-labels.rst
new file mode 100644
index 00000000..58f0d99d
--- /dev/null
+++ b/docs/source/_include/link-labels.rst
@@ -0,0 +1,11 @@
+.. Links
+
+.. _UDP: https://en.wikipedia.org/wiki/User_Datagram_Protocol
+
+.. _TCP: https://en.wikipedia.org/wiki/Transmission_Control_Protocol
+
+.. _FTP: https://en.wikipedia.org/wiki/File_Transfer_Protocol
+
+.. _File Transfer Protocol: `FTP`_
+
+.. _Telnet Protocol: https://en.wikipedia.org/wiki/Telnet
diff --git a/docs/source/_include/see-also-optional-dependencies.rst b/docs/source/_include/see-also-optional-dependencies.rst
new file mode 100644
index 00000000..d2ea862f
--- /dev/null
+++ b/docs/source/_include/see-also-optional-dependencies.rst
@@ -0,0 +1,4 @@
+.. seealso::
+
+ :ref:`optional-dependencies`
+ Explains how to install required dependencies.
diff --git a/docs/source/_include/sync-async-variants.rst b/docs/source/_include/sync-async-variants.rst
new file mode 100644
index 00000000..49b99de1
--- /dev/null
+++ b/docs/source/_include/sync-async-variants.rst
@@ -0,0 +1,10 @@
+.. note::
+
+ This page uses two different API variants:
+
+ * Synchronous API with classic ``def`` functions, usable in any context.
+
+ * Asynchronous API with ``async def`` functions, using an asynchronous framework to perform I/O operations.
+
+ All asynchronous API examples assume that you are using :mod:`asyncio`,
+ but you can use a different library thanks to the :doc:`asynchronous backend engine API `.
diff --git a/docs/source/api/async/backend.rst b/docs/source/api/async/backend.rst
new file mode 100644
index 00000000..6ce7807d
--- /dev/null
+++ b/docs/source/api/async/backend.rst
@@ -0,0 +1,234 @@
+*******************************
+Asynchronous Backend Engine API
+*******************************
+
+.. contents:: Table of Contents
+ :local:
+ :depth: 1
+
+------
+
+Introduction
+============
+
+.. todo::
+
+ Explain this big thing.
+
+
+Backend Interface
+=================
+
+.. automodule:: easynetwork.api_async.backend.abc
+ :no-docstring:
+
+.. autoclass:: AsyncBackend
+
+.. contents:: :class:`AsyncBackend`'s methods
+ :local:
+
+Runners
+-------
+
+.. automethod:: AsyncBackend.bootstrap
+
+.. automethod:: AsyncBackend.new_runner
+
+.. autoclass:: Runner
+ :members:
+ :special-members: __enter__, __exit__
+
+Coroutines And Tasks
+--------------------
+
+Sleeping
+^^^^^^^^
+
+.. automethod:: AsyncBackend.coro_yield
+
+.. automethod:: AsyncBackend.sleep
+
+.. automethod:: AsyncBackend.sleep_forever
+
+.. automethod:: AsyncBackend.sleep_until
+
+.. automethod:: AsyncBackend.current_time
+
+Task Cancellation
+^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.get_cancelled_exc_class
+
+Shielding From Task Cancellation
+""""""""""""""""""""""""""""""""
+
+.. automethod:: AsyncBackend.cancel_shielded_coro_yield
+
+.. automethod:: AsyncBackend.ignore_cancellation
+
+Creating Concurrent Tasks
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.create_task_group
+
+.. autoclass:: TaskGroup
+ :members:
+ :special-members: __aenter__, __aexit__
+
+.. autoclass:: Task
+ :members:
+
+Spawning System Tasks
+"""""""""""""""""""""
+
+.. automethod:: AsyncBackend.spawn_task
+
+.. autoclass:: SystemTask
+ :members:
+
+Timeouts
+^^^^^^^^
+
+.. automethod:: AsyncBackend.move_on_after
+
+.. automethod:: AsyncBackend.move_on_at
+
+.. automethod:: AsyncBackend.timeout
+
+.. automethod:: AsyncBackend.timeout_at
+
+.. autoclass:: TimeoutHandle
+ :members:
+
+Networking
+----------
+
+Opening Network Connections
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.create_tcp_connection
+
+.. automethod:: AsyncBackend.create_ssl_over_tcp_connection
+
+.. automethod:: AsyncBackend.wrap_tcp_client_socket
+
+.. automethod:: AsyncBackend.wrap_ssl_over_tcp_client_socket
+
+.. automethod:: AsyncBackend.create_udp_endpoint
+
+.. automethod:: AsyncBackend.wrap_udp_socket
+
+Creating Network Servers
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.create_tcp_listeners
+
+.. automethod:: AsyncBackend.create_ssl_over_tcp_listeners
+
+Socket Adapter Classes
+^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: AsyncBaseSocketAdapter
+ :members:
+ :special-members: __aenter__, __aexit__
+
+.. autoclass:: AsyncStreamSocketAdapter
+ :members:
+
+.. autoclass:: AsyncHalfCloseableStreamSocketAdapter
+ :members:
+
+.. autoclass:: AsyncDatagramSocketAdapter
+ :members:
+
+.. autoclass:: AsyncListenerSocketAdapter
+ :members:
+
+.. autoclass:: AcceptedSocket
+ :members:
+
+
+Synchronization Primitives
+--------------------------
+
+Locks
+^^^^^
+
+.. automethod:: AsyncBackend.create_lock
+
+.. autoprotocol:: ILock
+
+Events
+^^^^^^
+
+.. automethod:: AsyncBackend.create_event
+
+.. autoprotocol:: IEvent
+
+Condition Variables
+^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.create_condition_var
+
+.. autoprotocol:: ICondition
+
+Concurrency And Multithreading
+------------------------------
+
+Running Blocking Code
+^^^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.run_in_thread
+
+Scheduling From Other Threads
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.create_threads_portal
+
+.. autoclass:: ThreadsPortal
+ :members:
+
+``concurrent.futures`` Integration
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automethod:: AsyncBackend.wait_future
+
+.. seealso::
+
+ :class:`.AsyncExecutor` class.
+
+
+Backend Factory
+===============
+
+.. automodule:: easynetwork.api_async.backend.factory
+ :no-docstring:
+
+.. todo::
+
+ Document backend factory usage.
+
+.. autoclass:: AsyncBackendFactory
+ :members:
+ :exclude-members: GROUP_NAME
+
+
+Tasks Utilities
+===============
+
+.. automodule:: easynetwork.api_async.backend.tasks
+ :no-docstring:
+
+.. autoclass:: SingleTaskRunner
+ :members:
+
+
+Concurrency And Multithreading (``concurrent.futures`` Integration)
+===================================================================
+
+.. automodule:: easynetwork.api_async.backend.futures
+ :no-docstring:
+
+.. autoclass:: AsyncExecutor
+ :members:
+ :special-members: __aenter__, __aexit__
diff --git a/docs/source/api/async/client.rst b/docs/source/api/async/client.rst
new file mode 100644
index 00000000..981fccb4
--- /dev/null
+++ b/docs/source/api/async/client.rst
@@ -0,0 +1,39 @@
+***********
+Clients API
+***********
+
+.. automodule:: easynetwork.api_async.client
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+Abstract Base Class
+===================
+
+.. autoclass:: AbstractAsyncNetworkClient
+ :members:
+ :special-members: __aenter__, __aexit__
+
+
+TCP Implementation
+==================
+
+.. autoclass:: AsyncTCPNetworkClient
+ :members:
+
+UDP Implementation
+==================
+
+.. autoclass:: AsyncUDPNetworkClient
+ :members:
+ :exclude-members: iter_received_packets
+
+
+Generic UDP Endpoint
+--------------------
+
+.. autoclass:: AsyncUDPNetworkEndpoint
+ :members:
+ :special-members: __aenter__, __aexit__
diff --git a/docs/source/api/async/index.rst b/docs/source/api/async/index.rst
new file mode 100644
index 00000000..c69103c8
--- /dev/null
+++ b/docs/source/api/async/index.rst
@@ -0,0 +1,14 @@
+********************************
+Asynchronous API (``async def``)
+********************************
+
+.. automodule:: easynetwork.api_async
+
+-----
+
+.. toctree::
+ :maxdepth: 2
+
+ client
+ server
+ backend
diff --git a/docs/source/api/async/server.rst b/docs/source/api/async/server.rst
new file mode 100644
index 00000000..88066903
--- /dev/null
+++ b/docs/source/api/async/server.rst
@@ -0,0 +1,55 @@
+***********
+Servers API
+***********
+
+.. automodule:: easynetwork.api_async.server
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+Abstract Base Class
+===================
+
+.. autoclass:: AbstractAsyncNetworkServer
+ :members:
+ :special-members: __aenter__, __aexit__
+
+.. autoprotocol:: easynetwork.api_async.server.abc::SupportsEventSet
+
+
+TCP Implementation
+==================
+
+.. autoclass:: AsyncTCPNetworkServer
+ :members:
+
+UDP Implementation
+==================
+
+.. autoclass:: AsyncUDPNetworkServer
+ :members:
+
+
+Request Handler Interface
+=========================
+
+.. autoclass:: AsyncStreamRequestHandler
+ :members:
+
+.. autoclass:: AsyncDatagramRequestHandler
+ :members:
+
+Client API
+----------
+
+.. autoclass:: AsyncBaseClientInterface
+ :members:
+
+.. autoclass:: AsyncStreamClient
+ :members:
+
+.. autoclass:: AsyncDatagramClient
+ :members:
+ :special-members: __eq__, __hash__
diff --git a/docs/source/api/converter.rst b/docs/source/api/converter.rst
new file mode 100644
index 00000000..a95b5429
--- /dev/null
+++ b/docs/source/api/converter.rst
@@ -0,0 +1,6 @@
+**********
+Converters
+**********
+
+.. automodule:: easynetwork.converter
+ :members:
diff --git a/docs/source/api/exceptions.rst b/docs/source/api/exceptions.rst
new file mode 100644
index 00000000..395ed60a
--- /dev/null
+++ b/docs/source/api/exceptions.rst
@@ -0,0 +1,6 @@
+**********
+Exceptions
+**********
+
+.. automodule:: easynetwork.exceptions
+ :members:
diff --git a/docs/source/api/index.rst b/docs/source/api/index.rst
new file mode 100644
index 00000000..24b99eb7
--- /dev/null
+++ b/docs/source/api/index.rst
@@ -0,0 +1,15 @@
+*************
+API Reference
+*************
+
+.. toctree::
+ :maxdepth: 1
+
+ async/index
+ sync/index
+ serializers/index
+ converter
+ protocol
+ exceptions
+ tools/index
+ typevars
diff --git a/docs/source/api/protocol.rst b/docs/source/api/protocol.rst
new file mode 100644
index 00000000..2b9c5465
--- /dev/null
+++ b/docs/source/api/protocol.rst
@@ -0,0 +1,6 @@
+*********
+Protocols
+*********
+
+.. automodule:: easynetwork.protocol
+ :members:
diff --git a/docs/source/api/serializers/abc.rst b/docs/source/api/serializers/abc.rst
new file mode 100644
index 00000000..ea8f5d47
--- /dev/null
+++ b/docs/source/api/serializers/abc.rst
@@ -0,0 +1,40 @@
+*********************
+Abstract Base Classes
+*********************
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+Top-Level Base Classes
+======================
+
+.. automodule:: easynetwork.serializers.abc
+ :no-docstring:
+
+.. autoclass:: AbstractPacketSerializer
+ :members:
+
+.. autoclass:: AbstractIncrementalPacketSerializer
+ :members:
+
+
+------
+
+Stream Base Classes
+===================
+
+.. automodule:: easynetwork.serializers.base_stream
+ :no-docstring:
+
+Here are abstract classes that implement common stream protocol patterns.
+
+.. autoclass:: AutoSeparatedPacketSerializer
+ :members:
+
+.. autoclass:: FixedSizePacketSerializer
+ :members:
+
+.. autoclass:: FileBasedPacketSerializer
+ :members:
diff --git a/docs/source/api/serializers/cbor.rst b/docs/source/api/serializers/cbor.rst
new file mode 100644
index 00000000..9318a2e3
--- /dev/null
+++ b/docs/source/api/serializers/cbor.rst
@@ -0,0 +1,21 @@
+***************
+CBOR Serializer
+***************
+
+.. currentmodule:: easynetwork.serializers
+
+The `CBOR `_ is an alternative representation of the ``JSON`` data models.
+
+.. include:: ../../_include/see-also-optional-dependencies.rst
+
+.. autoclass:: CBORSerializer
+ :members:
+
+Configuration
+=============
+
+.. autoclass:: CBOREncoderConfig
+ :members:
+
+.. autoclass:: CBORDecoderConfig
+ :members:
diff --git a/docs/source/api/serializers/index.rst b/docs/source/api/serializers/index.rst
new file mode 100644
index 00000000..7d623319
--- /dev/null
+++ b/docs/source/api/serializers/index.rst
@@ -0,0 +1,32 @@
+***********
+Serializers
+***********
+
+.. automodule:: easynetwork.serializers
+
+-----
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Built-In Serializers
+
+ abc
+ json
+ pickle
+ line
+ struct
+ cbor
+ msgpack
+
+-----
+
+.. automodule:: easynetwork.serializers.wrapper
+ :no-docstring:
+
+.. toctree::
+ :caption: Serializer Wrappers
+ :maxdepth: 2
+
+ wrappers/base64
+ wrappers/compressor
+ wrappers/encryptor
diff --git a/docs/source/api/serializers/json.rst b/docs/source/api/serializers/json.rst
new file mode 100644
index 00000000..fe9fd8a8
--- /dev/null
+++ b/docs/source/api/serializers/json.rst
@@ -0,0 +1,17 @@
+***************
+JSON Serializer
+***************
+
+.. currentmodule:: easynetwork.serializers
+
+.. autoclass:: JSONSerializer
+ :members:
+
+Configuration
+=============
+
+.. autoclass:: JSONEncoderConfig
+ :members:
+
+.. autoclass:: JSONDecoderConfig
+ :members:
diff --git a/docs/source/api/serializers/line.rst b/docs/source/api/serializers/line.rst
new file mode 100644
index 00000000..946cf82a
--- /dev/null
+++ b/docs/source/api/serializers/line.rst
@@ -0,0 +1,8 @@
+*****************
+String Serializer
+*****************
+
+.. currentmodule:: easynetwork.serializers
+
+.. autoclass:: StringLineSerializer
+ :members:
diff --git a/docs/source/api/serializers/msgpack.rst b/docs/source/api/serializers/msgpack.rst
new file mode 100644
index 00000000..10f9ee14
--- /dev/null
+++ b/docs/source/api/serializers/msgpack.rst
@@ -0,0 +1,21 @@
+**********************
+MessagePack Serializer
+**********************
+
+.. currentmodule:: easynetwork.serializers
+
+The `MessagePack `_ is an alternative representation of the ``JSON`` data models.
+
+.. include:: ../../_include/see-also-optional-dependencies.rst
+
+.. autoclass:: MessagePackSerializer
+ :members:
+
+Configuration
+=============
+
+.. autoclass:: MessagePackerConfig
+ :members:
+
+.. autoclass:: MessageUnpackerConfig
+ :members:
diff --git a/docs/source/api/serializers/pickle.rst b/docs/source/api/serializers/pickle.rst
new file mode 100644
index 00000000..5df14b79
--- /dev/null
+++ b/docs/source/api/serializers/pickle.rst
@@ -0,0 +1,25 @@
+*****************
+Pickle Serializer
+*****************
+
+.. currentmodule:: easynetwork.serializers
+
+.. warning::
+
+ Read the security considerations for using :mod:`pickle` module.
+
+.. todo::
+
+ Add examples of how to use PickleSerializer with EncryptorSerializer or Base64EncoderSerializer with checksum.
+
+.. autoclass:: PickleSerializer
+ :members:
+
+Configuration
+=============
+
+.. autoclass:: PicklerConfig
+ :members:
+
+.. autoclass:: UnpicklerConfig
+ :members:
diff --git a/docs/source/api/serializers/struct.rst b/docs/source/api/serializers/struct.rst
new file mode 100644
index 00000000..0ab45281
--- /dev/null
+++ b/docs/source/api/serializers/struct.rst
@@ -0,0 +1,18 @@
+********************
+Structure Serializer
+********************
+
+.. currentmodule:: easynetwork.serializers
+
+Serializers that use the :mod:`struct` module.
+
+There is a base class :class:`AbstractStructSerializer` to easily manipulate structured data.
+
+.. autoclass:: AbstractStructSerializer
+ :members:
+
+.. autotypevar:: easynetwork.serializers.struct::_NamedTupleVar
+ :noindex:
+
+.. autoclass:: NamedTupleStructSerializer
+ :members:
diff --git a/docs/source/api/serializers/wrappers/base64.rst b/docs/source/api/serializers/wrappers/base64.rst
new file mode 100644
index 00000000..a1a1a0b7
--- /dev/null
+++ b/docs/source/api/serializers/wrappers/base64.rst
@@ -0,0 +1,11 @@
+*************************
+Base64 Encoder Serializer
+*************************
+
+.. currentmodule:: easynetwork.serializers.wrapper
+
+.. autoclass:: Base64EncoderSerializer
+ :members:
+ :exclude-members: generate_key
+
+.. automethod:: Base64EncoderSerializer.generate_key
diff --git a/docs/source/api/serializers/wrappers/compressor.rst b/docs/source/api/serializers/wrappers/compressor.rst
new file mode 100644
index 00000000..734e2792
--- /dev/null
+++ b/docs/source/api/serializers/wrappers/compressor.rst
@@ -0,0 +1,24 @@
+**********************
+Compressor Serializers
+**********************
+
+.. currentmodule:: easynetwork.serializers.wrapper
+
+.. autoclass:: BZ2CompressorSerializer
+ :members:
+
+.. autoclass:: ZlibCompressorSerializer
+ :members:
+
+
+Abstract Compressor Interface
+=============================
+
+.. currentmodule:: easynetwork.serializers.wrapper.compressor
+
+.. autoclass:: AbstractCompressorSerializer
+ :members:
+
+.. autoprotocol:: CompressorInterface
+
+.. autoprotocol:: DecompressorInterface
diff --git a/docs/source/api/serializers/wrappers/encryptor.rst b/docs/source/api/serializers/wrappers/encryptor.rst
new file mode 100644
index 00000000..1641f313
--- /dev/null
+++ b/docs/source/api/serializers/wrappers/encryptor.rst
@@ -0,0 +1,13 @@
+********************
+Encryptor Serializer
+********************
+
+.. currentmodule:: easynetwork.serializers.wrapper
+
+.. include:: ../../../_include/see-also-optional-dependencies.rst
+
+.. autoclass:: EncryptorSerializer
+ :members:
+ :exclude-members: generate_key
+
+.. automethod:: EncryptorSerializer.generate_key
diff --git a/docs/source/api/sync/client.rst b/docs/source/api/sync/client.rst
new file mode 100644
index 00000000..cdf5c1a5
--- /dev/null
+++ b/docs/source/api/sync/client.rst
@@ -0,0 +1,38 @@
+***********
+Clients API
+***********
+
+.. automodule:: easynetwork.api_sync.client
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+Abstract Base Class
+===================
+
+.. autoclass:: AbstractNetworkClient
+ :members:
+ :special-members: __enter__, __exit__
+
+TCP Implementation
+==================
+
+.. autoclass:: TCPNetworkClient
+ :members:
+
+UDP Implementation
+==================
+
+.. autoclass:: UDPNetworkClient
+ :members:
+ :exclude-members: iter_received_packets
+
+
+Generic UDP Endpoint
+--------------------
+
+.. autoclass:: UDPNetworkEndpoint
+ :members:
+ :special-members: __enter__, __exit__
diff --git a/docs/source/api/sync/index.rst b/docs/source/api/sync/index.rst
new file mode 100644
index 00000000..b8ed043c
--- /dev/null
+++ b/docs/source/api/sync/index.rst
@@ -0,0 +1,13 @@
+***************
+Synchronous API
+***************
+
+.. automodule:: easynetwork.api_sync
+
+-----
+
+.. toctree::
+ :maxdepth: 2
+
+ client
+ server
diff --git a/docs/source/api/sync/server.rst b/docs/source/api/sync/server.rst
new file mode 100644
index 00000000..f2ed3ab7
--- /dev/null
+++ b/docs/source/api/sync/server.rst
@@ -0,0 +1,31 @@
+***********
+Servers API
+***********
+
+.. automodule:: easynetwork.api_sync.server
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+Abstract Base Class
+===================
+
+.. autoclass:: AbstractNetworkServer
+ :members:
+ :special-members: __enter__, __exit__
+
+TCP Implementation
+==================
+
+.. autoclass:: StandaloneTCPNetworkServer
+ :inherited-members:
+ :members:
+
+UDP Implementation
+==================
+
+.. autoclass:: StandaloneUDPNetworkServer
+ :inherited-members:
+ :members:
diff --git a/docs/source/api/tools/index.rst b/docs/source/api/tools/index.rst
new file mode 100644
index 00000000..9c12fdad
--- /dev/null
+++ b/docs/source/api/tools/index.rst
@@ -0,0 +1,12 @@
+**************
+Internal Tools
+**************
+
+.. automodule:: easynetwork.tools
+
+-----
+
+.. toctree::
+ :maxdepth: 1
+
+ socket
diff --git a/docs/source/api/tools/socket.rst b/docs/source/api/tools/socket.rst
new file mode 100644
index 00000000..1951591d
--- /dev/null
+++ b/docs/source/api/tools/socket.rst
@@ -0,0 +1,50 @@
+**************
+Socket Helpers
+**************
+
+.. automodule:: easynetwork.tools.socket
+
+Data Structures And Constants
+=============================
+
+.. autoenum:: AddressFamily
+ :members:
+
+.. autonamedtuple:: IPv4SocketAddress
+ :members:
+
+.. autonamedtuple:: IPv6SocketAddress
+ :members:
+
+.. autodata:: SocketAddress
+ :annotation: :TypeAlias = IPv4SocketAddress | IPv6SocketAddress
+
+
+Classes
+=======
+
+.. autoprotocol:: SupportsSocketOptions
+
+.. autoprotocol:: ISocket
+
+.. autoclass:: SocketProxy
+ :members:
+
+
+Functions
+=========
+
+.. autofunction:: new_socket_address
+
+.. autofunction:: set_tcp_nodelay
+
+.. autofunction:: set_tcp_keepalive
+
+Socket Lingering
+----------------
+
+.. autofunction:: get_socket_linger_struct
+
+.. autofunction:: enable_socket_linger
+
+.. autofunction:: disable_socket_linger
diff --git a/docs/source/api/typevars.rst b/docs/source/api/typevars.rst
new file mode 100644
index 00000000..f51e3a38
--- /dev/null
+++ b/docs/source/api/typevars.rst
@@ -0,0 +1,8 @@
+*******************************
+For type hints — Type Variables
+*******************************
+
+.. automodule:: easynetwork._typevars
+ :members:
+ :private-members:
+ :noindex:
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100644
index 00000000..1cfe82af
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,139 @@
+# type: ignore
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+from __future__ import annotations
+
+import os.path
+import sys
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+project = "EasyNetwork"
+copyright = "2023, Francis Clairicia-Rose-Claire-Josephine"
+author = "FrankySnow9"
+release = "1.0.0rc4"
+version = release
+
+# -- General configuration ---------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
+
+sys.path.append(os.path.abspath("./_extensions"))
+
+extensions = [
+ # Built-in
+ "sphinx.ext.autodoc",
+ "sphinx.ext.duration",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.todo",
+ # Dependencies
+ "enum_tools.autoenum",
+ "sphinx_rtd_theme",
+ "sphinx_tabs.tabs",
+ "sphinx_toolbox.github",
+ "sphinx_toolbox.sidebar_links",
+ "sphinx_toolbox.more_autodoc.genericalias",
+ "sphinx_toolbox.more_autodoc.autonamedtuple",
+ "sphinx_toolbox.more_autodoc.autoprotocol",
+ "sphinx_toolbox.more_autodoc.typevars",
+ "sphinx_toolbox.more_autodoc.no_docstring",
+ # Custom
+ "sphinx_easynetwork",
+]
+
+highlight_language = "python3"
+
+manpages_url = "https://manpages.debian.org/{path}"
+
+templates_path = []
+exclude_patterns = ["_include", "_extensions"]
+
+
+# -- sphinx.ext.autodoc configuration -------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
+
+autoclass_content = "both"
+autodoc_member_order = "bysource"
+autodoc_default_options = {
+ "undoc-members": None,
+ "member-order": "bysource",
+ "no-value": None,
+ "show-inheritance": None,
+}
+autodoc_preserve_defaults = True
+autodoc_typehints = "description"
+autodoc_typehints_description_target = "documented_params"
+autodoc_type_aliases = {
+ "_typing_bz2.BZ2Compressor": "bz2.BZ2Compressor",
+ "_typing_bz2.BZ2Decompressor": "bz2.BZ2Decompressor",
+ "_typing_zlib._Compress": "zlib.Compress",
+ "_typing_zlib._Decompress": "zlib.Decompress",
+ "_typing_pickle.Pickler": "pickle.Pickler",
+ "_typing_pickle.Unpickler": "pickle.Unpickler",
+ "_typing_struct.Struct": "struct.Struct",
+ "_typing_ssl.SSLContext": "ssl.SSLContext",
+ "_socket._RetAddress": "typing.Any",
+ "_socket.socket": "socket.socket",
+ "contextvars.Context": "contextvars.Context",
+}
+autodoc_inherit_docstrings = False
+
+# -- sphinx.ext.intersphinx configuration ------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
+
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
+ "cbor2": ("https://cbor2.readthedocs.io/en/stable", None),
+ "cryptography": ("https://cryptography.io/en/stable", None),
+ "msgpack": ("https://msgpack-python.readthedocs.io/en/stable", None),
+}
+
+
+# -- sphinx.ext.napoleon configuration -------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html
+
+napoleon_numpy_docstring = False
+napoleon_preprocess_types = True
+napoleon_use_param = True
+napoleon_use_keyword = True
+napoleon_custom_sections = [
+ ("Common Parameters", "params_style"),
+ ("Socket Parameters", "params_style"),
+ ("Connection Parameters", "params_style"),
+ ("Backend Parameters", "params_style"),
+]
+
+
+# -- sphinx.ext.todo configuration -------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/extensions/todo.html
+
+todo_include_todos = True
+todo_emit_warnings = False
+
+
+# -- sphinx-tabs configuration -----------------------------------------------
+# https://sphinx-tabs.readthedocs.io/en/latest/
+
+sphinx_tabs_disable_tab_closing = True
+
+# -- sphinx-toolbox.github configuration -------------------------------------
+# https://sphinx-toolbox.readthedocs.io/en/stable/extensions/github.html
+
+github_username = "francis-clairicia"
+github_repository = "EasyNetwork"
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = "sphinx_rtd_theme"
+html_static_path = []
+
+# -- sphinx-rtd-theme configuration -------------------------------------
+# https://sphinx-rtd-theme.readthedocs.io/en/stable/configuring.html
+
+html_theme_options = {
+ "navigation_depth": -1, # Unlimited
+}
diff --git a/docs/source/glossary.rst b/docs/source/glossary.rst
new file mode 100644
index 00000000..1439098f
--- /dev/null
+++ b/docs/source/glossary.rst
@@ -0,0 +1,90 @@
+********
+Glossary
+********
+
+.. glossary::
+ :sorted:
+
+ communication protocol
+ A set of formal rules describing how to transmit or exchange data, especially across a network.
+
+ In EasyNetwork, it is up to the developer to define his communication protocol using a :term:`protocol object`.
+
+ composite converter
+ A :term:`converter` that processes different objects in input and output.
+
+ .. seealso:: :class:`.StapledPacketConverter` class.
+
+ converter
+ An interface responsible for bridging the gap between the Python objects manipulated by the application/software
+ and the :term:`data transfer objects ` handled by the :term:`serializer`.
+
+ It must also ensure that deserialized objects are valid and usable by the application/software without post-processing.
+
+ data transfer object
+ An object that carry data between processes in order to reduce the number of methods calls. It is a flat data structure
+ that contain no business logic.
+
+ In EasyNetwork, the DTOs are manipulated by the :term:`serializer` and transformed into business objects by a :term:`converter`.
+
+ DTO
+ See :term:`data transfer object`.
+
+ incremental serializer
+ See :term:`serializer`.
+
+ one-shot serializer
+ See :term:`serializer`.
+
+ packet
+ A unit of data routed between an origin and a destination on a network.
+
+ protocol object
+ An object representing a :term:`communication protocol`. It consists of a :term:`serializer` and, optionally, a :term:`converter`.
+
+ serializer
+ The lowest layer for passing from a Python object ( a :term:`DTO` ) to raw data (:class:`bytes`) and vice versa.
+
+ It must have no knowledge of the object's validity with regard to the application/software logic,
+ nor the meaning of data with regard to the :term:`communication protocol`.
+ This ensures a generic format that can be reused in any project.
+
+ For example, the :class:`.JSONSerializer` only knows how to translate dictionaries, lists, strings, numbers and special constants,
+ and how to reinterpret them.
+
+ A serializer imposes its own limits on the objects it can translate and on the validity of the object itself
+ (for example, as a JSON object, a dictionary must only have character strings as keys).
+
+ Ideally, a serializer should only handle :ref:`primitive types ` and :ref:`constants `.
+
+ There are 2 types of serializers:
+
+ * one-shot serializers
+
+ One-shot serializers provide the full representation of a Python object in :class:`bytes` **in a single function call**,
+ and need this same full representation to recreate the object **at once**.
+
+ * incremental serializers
+
+ Incremental serializers, on the other hand, provide the full representation of the Python object in :class:`bytes` **part by part**,
+ sometimes including additional metadata used during deserialization.
+
+ During deserialization, they have the ability **to know when the** :term:`packet` **is complete** (and wait if incomplete)
+ and which bytes are not part of the initial :term:`packet`.
+
+ serializer wrapper
+ A :term:`serializer` that transforms data coming from another :term:`serializer`.
+
+ Example:
+
+ >>> from easynetwork.serializers import JSONSerializer
+ >>> from easynetwork.serializers.wrapper import Base64EncoderSerializer
+ >>> s = Base64EncoderSerializer(JSONSerializer())
+ >>> data = s.serialize({"data": 42})
+ >>> data
+ b'eyJkYXRhIjo0Mn0='
+ >>> s.deserialize(data)
+ {'data': 42}
+
+ Most of the time, a serializer wrapper is an :term:`incremental serializer` in order to allow a :term:`one-shot serializer`
+ to be used in a stream context.
diff --git a/docs/source/howto/index.rst b/docs/source/howto/index.rst
new file mode 100644
index 00000000..517a421a
--- /dev/null
+++ b/docs/source/howto/index.rst
@@ -0,0 +1,11 @@
+************
+How-to Guide
+************
+
+.. toctree::
+ :maxdepth: 1
+
+ protocols
+ serializers
+ tcp_clients
+ tcp_servers
diff --git a/docs/source/howto/protocols.rst b/docs/source/howto/protocols.rst
new file mode 100644
index 00000000..1e9fc555
--- /dev/null
+++ b/docs/source/howto/protocols.rst
@@ -0,0 +1,194 @@
+********************************
+How-to — Communication Protocols
+********************************
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+The Basics
+==========
+
+To define your :term:`communication protocol`, you must instantiate one of the following :term:`protocol objects `:
+
+* :class:`.DatagramProtocol`: suitable for datagram oriented communication (e.g. `UDP`_).
+
+* :class:`.StreamProtocol`: suitable for stream oriented communication (e.g. `TCP`_).
+
+They all have one thing in common: they wrap a :term:`serializer` and a :term:`converter`.
+
+You can either directly create an instance:
+
+.. tabs::
+
+ .. group-tab:: DatagramProtocol
+
+ .. literalinclude:: ../_include/examples/howto/protocols/basics/datagram_protocol_instance.py
+ :linenos:
+
+ .. group-tab:: StreamProtocol
+
+ .. literalinclude:: ../_include/examples/howto/protocols/basics/stream_protocol_instance.py
+ :linenos:
+
+or create a subclass:
+
+.. tabs::
+
+ .. group-tab:: DatagramProtocol
+
+ .. literalinclude:: ../_include/examples/howto/protocols/basics/datagram_protocol_subclass.py
+ :linenos:
+
+ .. group-tab:: StreamProtocol
+
+ .. literalinclude:: ../_include/examples/howto/protocols/basics/stream_protocol_subclass.py
+ :linenos:
+
+.. _why-write-a-protocol-subclass:
+
+.. tip::
+
+ The latter is recommended. The main advantage of this model is to declaratively define the :term:`communication protocol`
+ (the name of the class being that of the protocol, the types of objects sent and received, etc.).
+
+ Another advantage is that the :term:`serializer` (and :term:`converter`, if any) can be configured in a single place in the project.
+
+
+Usage
+=====
+
+The :term:`protocol objects ` are requested by endpoint and server implementations to handle the data sent and received:
+
+.. tabs::
+
+ .. group-tab:: DatagramProtocol
+
+ .. literalinclude:: ../_include/examples/howto/protocols/usage/datagram_protocol.py
+ :pyobject: main
+ :end-at: ...
+ :linenos:
+
+ .. group-tab:: StreamProtocol
+
+ .. literalinclude:: ../_include/examples/howto/protocols/usage/stream_protocol.py
+ :pyobject: main
+ :end-at: ...
+ :linenos:
+
+.. warning::
+
+ A :term:`protocol object` is intended to be shared by multiple endpoints. Do not store sensitive data in these objects.
+ You might see some magic.
+
+
+Parsing Error
+-------------
+
+The :term:`protocol objects ` raise a :exc:`.BaseProtocolParseError` subclass when the received data is invalid:
+
+.. tabs::
+
+ .. group-tab:: DatagramProtocol
+
+ The raised exception is :exc:`.DatagramProtocolParseError`.
+
+ .. literalinclude:: ../_include/examples/howto/protocols/usage/datagram_protocol.py
+ :pyobject: main
+ :start-at: try:
+ :dedent:
+ :linenos:
+
+ .. group-tab:: StreamProtocol
+
+ The raised exception is :exc:`.StreamProtocolParseError`.
+
+ .. literalinclude:: ../_include/examples/howto/protocols/usage/stream_protocol.py
+ :pyobject: main
+ :start-at: try:
+ :dedent:
+ :linenos:
+
+.. tip::
+
+ The underlying :exc:`.DeserializeError` instance is available with the :attr:`~.BaseProtocolParseError.error` attribute.
+
+
+The Converters
+==============
+
+TL;DR: Why should you always have a converter in your protocol object?
+----------------------------------------------------------------------
+
+Unless the :term:`serializer` is already making the tea and coffee for you, in 99% of cases the data received can be anything,
+as long as it's in the right format. On the other hand, the application has to comply with the format for sending data to the remote endpoint.
+
+However, you just want to be able to manipulate your business objects without having to worry about such problems.
+
+This is what a :term:`converter` can do for you. It creates a :term:`DTO` suitable for the underlying :term:`serializer` and validates the received
+:term:`DTO` to recreate the business object.
+
+Writing A Converter
+-------------------
+
+To write a :term:`converter`, you must create a subclass of :class:`~.AbstractPacketConverter` and override
+its :meth:`~.AbstractPacketConverter.convert_to_dto_packet` and :meth:`~.AbstractPacketConverter.create_from_dto_packet` methods.
+
+For example:
+
+.. literalinclude:: ../_include/examples/howto/protocols/simple_converter/example1.py
+ :linenos:
+
+.. warning::
+
+ The :meth:`~.AbstractPacketConverter.create_from_dto_packet` function must raise a :exc:`PacketConversionError` to indicate that
+ a parsing error was "expected" so that the received data is considered invalid.
+
+ Otherwise, any other error is considered a crash.
+
+This :term:`converter` can now be used in our :term:`protocol object`:
+
+.. literalinclude:: ../_include/examples/howto/protocols/simple_converter/example2.py
+ :pyobject: PersonProtocol
+ :linenos:
+
+.. note::
+
+ Now this protocol is annotated to send and receive a ``Person`` object.
+
+In the application, you can now safely handle an object with real meaning:
+
+.. literalinclude:: ../_include/examples/howto/protocols/simple_converter/example2.py
+ :pyobject: main
+ :linenos:
+
+
+Writing A Composite Converter
+-----------------------------
+
+Most of the time, the packets sent and received are different (the request/response system). To deal with this, a :term:`protocol object`
+accepts a :term:`composite converter`.
+
+To write a :term:`composite converter`, there are two ways described below.
+
+.. note::
+
+ Do what you think is best, there is no recommended method.
+
+Option 1: Using ``StapledPacketConverter``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/howto/protocols/composite_converter/stapled_packet_converter.py
+ :start-at: from __future__ import
+ :linenos:
+
+Option 2: By Subclassing ``AbstractPacketConverterComposite``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/howto/protocols/composite_converter/packet_converter_subclass.py
+ :start-at: from __future__ import
+ :linenos:
+
+
+.. include:: ../_include/link-labels.rst
diff --git a/docs/source/howto/serializers.rst b/docs/source/howto/serializers.rst
new file mode 100644
index 00000000..1aced131
--- /dev/null
+++ b/docs/source/howto/serializers.rst
@@ -0,0 +1,271 @@
+********************
+How-to — Serializers
+********************
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+The Basics
+==========
+
+Where Are They Used?
+--------------------
+
+A :term:`serializer` is used by a :term:`protocol object`:
+
+* :class:`.DatagramProtocol`: accepts any :term:`serializer` object.
+
+* :class:`.StreamProtocol`: accepts :term:`incremental serializer` objects only.
+
+Which Ones Are Already Available?
+---------------------------------
+
+Several serializers are provided in the :mod:`easynetwork.serializers` module. Do not hesitate to use them.
+
+If nothing fits your needs, you can implement your own serializer.
+
+
+Writing A One-Shot Serializer
+=============================
+
+:term:`One-shot serializers ` are the easiest piece of code to write. They can be used directly on
+:class:`.DatagramProtocol` instances, and you can use a :term:`serializer wrapper` to use the :class:`.StreamProtocol` class.
+
+.. seealso::
+
+ :mod:`easynetwork.serializers.wrapper` module
+ The full list of available wrappers for serializers.
+
+To write a :term:`one-shot serializer`, you must create a subclass of :class:`~.AbstractPacketSerializer` and override
+its :meth:`~.AbstractPacketSerializer.serialize` and :meth:`~.AbstractPacketSerializer.deserialize` methods.
+
+A naive implementation of :class:`.JSONSerializer` should look something like this:
+
+.. literalinclude:: ../_include/examples/howto/serializers/one_shot_serializer/example1.py
+ :linenos:
+
+
+Parsing Error
+-------------
+
+The :meth:`~.AbstractPacketSerializer.deserialize` function must raise a :exc:`.DeserializeError` to indicate that a parsing error
+was "expected" so that the received data is considered invalid.
+
+.. literalinclude:: ../_include/examples/howto/serializers/one_shot_serializer/example2.py
+ :linenos:
+ :emphasize-lines: 6,16,19-20
+
+.. warning::
+
+ Otherwise, any other error is considered a serializer crash.
+
+
+The Use Of ``self``
+-------------------
+
+A :term:`serializer` is intended to be shared by multiple :term:`protocols ` (and :term:`protocols ` are intended
+to be shared by multiple endpoints).
+
+Therefore, the object should only store additional configuration used for serialization/deserialization.
+
+For example:
+
+.. literalinclude:: ../_include/examples/howto/serializers/one_shot_serializer/example3.py
+ :linenos:
+
+.. warning::
+
+ Do not store per-serialization data. You might see some magic.
+
+.. danger::
+
+ Seriously, don't do that.
+
+
+Using A One-Shot Serializer
+===========================
+
+You must pass the :term:`serializer` to the appropriate :term:`protocol object` that is expected by the endpoint class:
+
+.. tabs::
+
+ .. group-tab:: DatagramProtocol
+
+ .. literalinclude:: ../_include/examples/howto/serializers/one_shot_serializer/example4_datagram.py
+ :pyobject: main
+ :linenos:
+
+ .. group-tab:: StreamProtocol
+
+ .. literalinclude:: ../_include/examples/howto/serializers/one_shot_serializer/example4_stream.py
+ :pyobject: main
+ :linenos:
+
+.. note::
+
+ Using a :term:`serializer wrapper` means that the transferred data can be completely different from the original output.
+
+ If this is important to you, don't choose one of them lightly.
+
+
+Writing An Incremental Serializer
+=================================
+
+:term:`Incremental serializers ` are a bit trickier to implement. They can be used directly on both
+:class:`.StreamProtocol` and :class:`.DatagramProtocol` instances.
+
+To write an :term:`incremental serializer`, you must create a subclass of :class:`~.AbstractIncrementalPacketSerializer` and override
+its :meth:`~.AbstractIncrementalPacketSerializer.incremental_serialize` and :meth:`~.AbstractIncrementalPacketSerializer.incremental_deserialize`
+methods. The :meth:`~.AbstractIncrementalPacketSerializer.serialize` and :meth:`~.AbstractIncrementalPacketSerializer.deserialize` methods
+have a default implementation that uses the incremental serialization methods.
+
+Option 1: Using Common Patterns
+-------------------------------
+
+Chances are that the communication protocol uses a simple principle to determine the end of a packet. The most common cases are:
+
+* All your packet frames use a precise byte sequence (most likely a newline).
+
+* Each packet has a fixed size.
+
+In these cases you can use the base classes in :mod:`easynetwork.serializers.base_stream`.
+
+Let's say that for the incremental part, we consider each line received to be a JSON object, separated by ``\r\n``:
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example1.py
+ :linenos:
+ :emphasize-lines: 7,10,12
+
+:class:`.AutoSeparatedPacketSerializer` adds the following behaviors:
+
+* ``incremental_serialize()`` will append ``\r\n`` to the end of the ``serialize()`` output.
+
+* ``incremental_deserialize()`` waits until ``\r\n`` is found in the input, removes the separator, and calls ``deserialize()`` on it.
+
+.. tip::
+
+ Take a look at other available base classes in :mod:`easynetwork.serializers` before rewriting something that already exists.
+
+
+Option 2: From Scratch
+----------------------
+
+Let's see how we can get by without using the :class:`.AutoSeparatedPacketSerializer`:
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example2.py
+ :linenos:
+
+This adds a lot of code! Let's take a closer look at the implementation.
+
+Code Mutualization
+^^^^^^^^^^^^^^^^^^
+
+To avoid duplication of code between the one-shot part and the incremental part,
+the serialization/deserialization part of the code goes to a private method.
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example2.py
+ :start-at: def _dump
+ :end-at: return json.loads
+ :dedent:
+ :lineno-match:
+
+And now ``serialize()`` and ``deserialize()`` use them instead:
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example2.py
+ :start-at: def serialize
+ :end-at: raise DeserializeError
+ :dedent:
+ :lineno-match:
+ :emphasize-lines: 2,6
+
+The Purpose Of ``incremental_serialize()``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+:meth:`~.AbstractIncrementalPacketSerializer.incremental_serialize` must be a :term:`generator` function
+(or at least return a :term:`generator iterator`) that yields all the parts of the serialized packet.
+It must also add any useful metadata to help :meth:`~.AbstractIncrementalPacketSerializer.incremental_deserialize` find the end of the packet.
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example2.py
+ :pyobject: MyJSONSerializer.incremental_serialize
+ :dedent:
+ :lineno-match:
+
+Most of the time, you will have a single :keyword:`yield`. The goal is: each :keyword:`yield` must send as many :class:`bytes` as possible
+**without copying or concatenating**.
+
+.. tip::
+
+ There may be exceptions, like this example. (Your RAM will not cry because you added 2 bytes to a byte sequence of almost 100 bytes.
+ The question may be asked if the byte sequence is ending up to 4 GB.)
+
+ It is up to you to find the balance between RAM explosion and performance degradation.
+
+.. note::
+
+ The endpoint implementation can (and most likely will) decide to concatenate all the pieces and do one big send.
+ This is the optimized way to send a large byte buffer.
+
+ However, it may be more attractive to do something else with the returned bytes.
+ :meth:`~.AbstractIncrementalPacketSerializer.incremental_serialize` is here to give endpoints this freedom.
+
+
+The Purpose Of ``incremental_deserialize()``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+:meth:`~.AbstractIncrementalPacketSerializer.incremental_deserialize` must be a :term:`generator` function
+(or at least return a :term:`generator iterator`) that yields :data:`None` until all the data parts of the packet have been retrieved and parsed.
+
+This generator must return a pair of ``(packet, remainder)`` where ``packet`` is the deserialized packet and ``remainder`` is any
+superfluous trailing bytes that was useless.
+
+At each :keyword:`yield` checkpoint, the endpoint implementation sends the data received from the remote endpoint to the generator.
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example2.py
+ :pyobject: MyJSONSerializer.incremental_deserialize
+ :dedent:
+ :lineno-match:
+ :emphasize-lines: 2,5
+
+.. note::
+
+ Even if we could create 5 more JSON packets from ``remainder``, ``incremental_deserialize()`` must always deserialize the first one available
+ and return the rest **as is**.
+
+ This allows the endpoint implementation to deserialize only the needed packet. The rest is reused when the application wants an other packet.
+
+.. seealso::
+
+ :pep:`255` — Simple Generators
+ The proposal for adding generators and the :keyword:`yield` statement to Python.
+
+ :pep:`342` — Coroutines via Enhanced Generators
+ The proposal to enhance the API and syntax of generators, making them usable as simple coroutines.
+
+ :pep:`380` — Syntax for Delegating to a Subgenerator
+ The proposal to introduce the ``yield_from`` syntax, making delegation to subgenerators easy.
+
+
+Parsing Error
+^^^^^^^^^^^^^
+
+The :meth:`~.AbstractIncrementalPacketSerializer.incremental_deserialize` function must raise an :exc:`.IncrementalDeserializeError`
+to indicate that a parsing error was "expected" so that the received data is considered invalid.
+
+.. literalinclude:: ../_include/examples/howto/serializers/incremental_serializer/example2.py
+ :pyobject: MyJSONSerializer.incremental_deserialize
+ :dedent:
+ :lineno-match:
+ :emphasize-lines: 12-13
+
+.. warning::
+
+ Otherwise, any other error is considered a serializer crash.
+
+ If :exc:`.DeserializeError` is raised instead, this is converted to a :exc:`RuntimeError`.
+
+.. note::
+
+ :exc:`.IncrementalDeserializeError` needs the possible valid remainder, that is not the root cause of the error.
+ In the example, even if ``data`` is an invalid JSON object, all bytes after the ``\r\n`` token (in ``remainder``) are not lost.
diff --git a/docs/source/howto/tcp_clients.rst b/docs/source/howto/tcp_clients.rst
new file mode 100644
index 00000000..949e1da9
--- /dev/null
+++ b/docs/source/howto/tcp_clients.rst
@@ -0,0 +1,434 @@
+*****************************
+How-to — TCP Client Endpoints
+*****************************
+
+.. include:: ../_include/sync-async-variants.rst
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+The Basics
+==========
+
+The Protocol Object
+-------------------
+
+The TCP clients expect a :class:`.StreamProtocol` instance to communicate with the remote endpoint.
+
+.. seealso::
+
+ :doc:`protocols`
+ Explains what a :class:`.StreamProtocol` is and how to use it.
+
+
+Connecting To The Remote Host
+-----------------------------
+
+You need the host address (domain name or IP) and the port of connection in order to connect to the remote host:
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/basics/api_sync/connection_example1.py
+ :linenos:
+
+ You can control the connection timeout with the ``connect_timeout`` parameter:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/basics/api_sync/connection_example2.py
+ :pyobject: main
+ :lineno-match:
+ :emphasize-lines: 5-11
+
+ .. note::
+
+ The client does nothing when it enters the :keyword:`with` context. Everything is done on object creation.
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/basics/api_async/connection_example1.py
+ :linenos:
+
+ You can control the connection timeout by adding a timeout scope using the asynchronous framework:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/basics/api_async/connection_example2.py
+ :pyobject: main
+ :lineno-match:
+ :emphasize-lines: 5-13
+
+ .. note::
+
+ The call to ``wait_connected()`` is required to actually initialize the client, since we cannot perform asynchronous operations
+ at object creation. This is what the client does when it enters the the :keyword:`async with` context.
+
+ Once completed, ``wait_connected()`` is a no-op.
+
+
+Using An Already Connected Socket
+---------------------------------
+
+If you have your own way to obtain a connected :class:`socket.socket` instance, you can pass it to the client.
+
+If the socket is not connected, an :exc:`OSError` is raised.
+
+.. important::
+
+ It *must* be a :data:`~socket.SOCK_STREAM` socket with :data:`~socket.AF_INET` or :data:`~socket.AF_INET6` family.
+
+.. warning::
+
+ The resource ownership is given to the client. You must close the client to close the socket.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/basics/api_sync/socket_example1.py
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/basics/api_async/socket_example1.py
+ :linenos:
+
+ .. note::
+
+ Even with a ready-to-use socket, the call to ``wait_connected()`` is still required.
+
+
+Basic Usage
+===========
+
+Sending Packets
+---------------
+
+There's not much to say, except that objects passed as arguments are automatically converted to bytes to send to the remote host
+thanks to the :term:`protocol object`.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: send_packet_example1
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: send_packet_example1
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+
+Receiving Packets
+-----------------
+
+You get the next available packet, already parsed. Extraneous data is kept for the next call.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: recv_packet_example1
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ You can control the receive timeout with the ``timeout`` parameter:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: recv_packet_example2
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: recv_packet_example1
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ You can control the receive timeout by adding a timeout scope using the asynchronous framework:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: recv_packet_example2
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+
+.. tip::
+
+ Remember to catch invalid data parsing errors.
+
+ .. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: recv_packet_example3
+ :start-after: [start]
+ :dedent:
+ :linenos:
+ :emphasize-lines: 3-4
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: recv_packet_example3
+ :start-after: [start]
+ :dedent:
+ :linenos:
+ :emphasize-lines: 4-5
+
+
+Receiving Multiple Packets At Once
+----------------------------------
+
+You can use ``iter_received_packets()`` to get all the received packets in a sequence or a set.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: recv_packet_example4
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: recv_packet_example4
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+The ``timeout`` parameter defaults to zero to get only the data already in the buffer, but you can change it.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: recv_packet_example5
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. seealso::
+
+ :meth:`TCPNetworkClient.iter_received_packets() <.AbstractNetworkClient.iter_received_packets>`
+ The method description and usage (especially for the ``timeout`` parameter).
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: recv_packet_example5
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. seealso::
+
+ :meth:`AsyncTCPNetworkClient.iter_received_packets() <.AbstractAsyncNetworkClient.iter_received_packets>`
+ The method description and usage (especially for the ``timeout`` parameter).
+
+
+Advanced Usage
+==============
+
+.. note::
+
+ This section is for people who know what they're doing and are looking for something specific.
+
+
+Close The Write-End Stream
+--------------------------
+
+If you are sure you will never reuse ``send_packet()``, you can call ``send_eof()`` to shut down the write stream.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: send_eof_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: send_eof_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+.. note::
+
+ ``send_eof()`` will block until all unsent data has been flushed before closing the stream.
+
+
+Low-Level Socket Operations
+---------------------------
+
+For low-level operations such as :meth:`~socket.socket.setsockopt`, the client object exposes the socket through a :class:`.SocketProxy`:
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: socket_proxy_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: socket_proxy_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. warning::
+
+ Make sure that ``wait_connected()`` has been called before.
+
+
+``socket.recv()`` Buffer Size
+-----------------------------
+
+By default, the client uses a reasonable buffer size when calling ``recv_packet()``.
+You can control this value by setting the ``max_recv_size`` parameter:
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: max_recv_size_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: max_recv_size_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+
+SSL/TLS Connection
+------------------
+
+If you want to use SSL to communicate with the remote host, the easiest way is to pass ``ssl=True``:
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_sync.py
+ :pyobject: ssl_default_context_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/usage/api_async.py
+ :pyobject: ssl_default_context_example
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+.. note::
+
+ Since this is a *client* object, :meth:`ssl.SSLContext.wrap_socket` and :meth:`ssl.SSLContext.wrap_bio` are always called
+ with ``server_side=False``.
+
+.. danger::
+
+ You can pass an :class:`~ssl.SSLContext` instead, but at this point I expect you to *really* know what you are doing.
+
+
+Concurrency And Multithreading
+------------------------------
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ All client methods are thread-safe. Synchronization follows these rules:
+
+ * :meth:`~.TCPNetworkClient.send_packet` and :meth:`~.TCPNetworkClient.recv_packet` do not share the same
+ :class:`threading.Lock` instance.
+
+ * :meth:`~.TCPNetworkClient.close` will not wait for :meth:`~.TCPNetworkClient.recv_packet`.
+
+ * The :attr:`client.socket <.TCPNetworkClient.socket>` methods are also thread-safe. This means that you cannot access
+ the underlying socket methods (e.g. :meth:`~socket.socket.getsockopt`) during a write operation.
+
+ This allows you to do something like this:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/concurrency/api_sync.py
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ All client methods do not require external task synchronization. Synchronization follows these rules:
+
+ * :meth:`~.AsyncTCPNetworkClient.send_packet` and :meth:`~.AsyncTCPNetworkClient.recv_packet` do not share the same lock instance.
+
+ * :meth:`~.AsyncTCPNetworkClient.close` will not wait for :meth:`~.AsyncTCPNetworkClient.recv_packet`.
+
+ This allows you to do something like this:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/concurrency/api_async.py
+ :linenos:
+
+
+SSL/TLS Considerations
+^^^^^^^^^^^^^^^^^^^^^^
+
+For safety, concurrent calls to ``send_packet()`` and ``recv_packet()`` are "disabled" by default when using SSL.
+In fact, they share the same synchronization lock.
+
+If you need this feature after all, you can pass ``ssl_shared_lock=False`` at object creation.
+
+.. danger::
+
+ But you don't need it, do you?
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/concurrency/ssl_shared_lock.py
+ :pyobject: ssl_shared_lock_for_sync_client
+ :start-after: [start]
+ :dedent:
+ :linenos:
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/howto/tcp_clients/concurrency/ssl_shared_lock.py
+ :pyobject: ssl_shared_lock_for_async_client
+ :start-after: [start]
+ :dedent:
+ :linenos:
diff --git a/docs/source/howto/tcp_servers.rst b/docs/source/howto/tcp_servers.rst
new file mode 100644
index 00000000..760da891
--- /dev/null
+++ b/docs/source/howto/tcp_servers.rst
@@ -0,0 +1,213 @@
+********************
+How-to — TCP Servers
+********************
+
+.. include:: ../_include/sync-async-variants.rst
+
+.. contents:: Table of Contents
+ :local:
+
+------
+
+Introduction
+============
+
+The :mod:`easynetwork.api_async.server` module simplifies the task of writing network servers. The service creation model is inspired by
+the standard :mod:`socketserver` library, but is an enhanced version with even more abstraction.
+
+Creating a server requires several steps:
+
+#. Derive a class from :class:`.AsyncStreamRequestHandler` and redefine its :meth:`~.AsyncStreamRequestHandler.handle` method;
+ this method will process incoming requests.
+
+#. Instantiate the :class:`.AsyncTCPNetworkServer` class passing it the server's address, the :term:`protocol object`
+ and the request handler instance.
+
+#. Call :meth:`~.AsyncTCPNetworkServer.serve_forever` to process requests.
+
+Writing :term:`coroutine functions ` is mandatory to use this server.
+
+.. seealso::
+
+ :pep:`492` — Coroutines with async and await syntax
+ The proposal to introduce native coroutines in Python with :keyword:`async` and :keyword:`await` syntax.
+
+ :external+python:doc:`library/asyncio`
+ If you are not familiar with async/await syntax, you can use the standard library to get started with coroutines.
+
+
+Request Handler Objects
+=======================
+
+.. note::
+
+ Unlike :class:`socketserver.BaseRequestHandler`, there is **only one** :class:`.AsyncStreamRequestHandler` instance for the entire service.
+
+
+Here is a simple example:
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/simple_request_handler.py
+ :linenos:
+
+
+Using ``handle()`` Generator
+----------------------------
+
+.. seealso::
+
+ :pep:`525` — Asynchronous Generators
+ The proposal that expanded on :pep:`492` by adding generator capabilities to coroutine functions.
+
+
+Minimum Requirements
+^^^^^^^^^^^^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: MinimumRequestHandler.handle
+ :dedent:
+ :linenos:
+
+
+Closing the connection
+^^^^^^^^^^^^^^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ConnectionCloseRequestHandler.handle
+ :dedent:
+ :linenos:
+ :emphasize-lines: 11
+
+.. tip::
+
+ You can use :func:`contextlib.aclosing` to close the client at the generator exit.
+
+ .. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ConnectionCloseWithContextRequestHandler.handle
+ :dedent:
+ :linenos:
+ :emphasize-lines: 5
+
+.. important::
+
+ The connection is forcibly closed under the following conditions:
+
+ * ``handle()`` raises an exception.
+
+ * ``handle()`` returns *before* the first :keyword:`yield` statement.
+
+ .. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ConnectionCloseBeforeYieldRequestHandler.handle
+ :dedent:
+ :linenos:
+ :emphasize-lines: 5-6
+
+
+Error Handling
+^^^^^^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ErrorHandlingInRequestHandler.handle
+ :dedent:
+ :linenos:
+ :emphasize-lines: 8
+
+.. note::
+
+ ``handle()`` will never get a :exc:`ConnectionError` subclass. In case of an unexpected disconnect, the generator is closed,
+ so you should handle :exc:`GeneratorExit` instead.
+
+
+Having Multiple ``yield`` Statements
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: MultipleYieldInRequestHandler.handle
+ :dedent:
+ :linenos:
+ :emphasize-lines: 5,12
+
+
+.. tip::
+
+ The number of :keyword:`yield` allowed is... infinite!
+
+ You can take advantage of this by having an internal main loop inside the generator:
+
+ .. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ClientLoopInRequestHandler.handle
+ :dedent:
+ :linenos:
+
+
+Cancellation And Timeouts
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Since all :exc:`BaseException` subclasses are thrown into the generator, you can apply a timeout to the read stream
+using the asynchronous framework (the cancellation exception is retrieved in the generator):
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: TimeoutRequestHandler.handle
+ :dedent:
+ :linenos:
+ :emphasize-lines: 6,9-10
+
+
+Connecting/Disconnecting Hooks
+------------------------------
+
+You can override :meth:`~.AsyncStreamRequestHandler.on_connection` and :meth:`~.AsyncStreamRequestHandler.on_disconnection` methods:
+
+* :meth:`~.AsyncStreamRequestHandler.on_connection` is called on client task startup.
+
+* :meth:`~.AsyncStreamRequestHandler.on_disconnection` is called on client task teardown.
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ClientConnectionHooksRequestHandler
+ :start-after: ClientConnectionHooksRequestHandler
+ :dedent:
+ :linenos:
+ :emphasize-lines: 1,7
+
+
+Wait For Client Data On Connection
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you need to use the read stream, :meth:`~.AsyncStreamRequestHandler.on_connection` can be an asynchronous generator instead of
+a coroutine function:
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ClientConnectionAsyncGenRequestHandler
+ :start-after: ClientConnectionAsyncGenRequestHandler
+ :dedent:
+ :linenos:
+ :emphasize-lines: 6
+
+
+Service Initialization
+----------------------
+
+The server will call :meth:`~.AsyncStreamRequestHandler.service_init` and pass it an :class:`~contextlib.AsyncExitStack`
+at the beginning of the :meth:`~.AsyncTCPNetworkServer.serve_forever` task to set up the global service.
+
+This allows you to do something like this:
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/request_handler_explanation.py
+ :pyobject: ServiceInitializationHookRequestHandler
+ :start-after: ServiceInitializationHookRequestHandler
+ :dedent:
+ :linenos:
+ :emphasize-lines: 1
+
+
+Server Object
+=============
+
+A basic example of how to run the server:
+
+.. literalinclude:: ../_include/examples/howto/tcp_servers/async_server.py
+ :linenos:
+
+.. seealso::
+
+ :doc:`/tutorials/echo_client_server_tcp`
+ A working example of the server implementation.
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100644
index 00000000..d5e277b6
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,36 @@
+***************************************
+Welcome to EasyNetwork's documentation!
+***************************************
+
+.. automodule:: easynetwork
+
+.. note::
+
+ This project is under active development.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Table of Contents
+
+ quickstart/index
+ tutorials/index
+ howto/index
+ api/index
+ glossary
+
+.. only:: html
+
+ Useful links
+ ------------
+
+ * :github:repo:`Source code `
+ * :ref:`genindex`
+ * :ref:`modindex`
+
+ .. sidebar-links::
+ :caption: Links
+ :github:
+ :pypi: easynetwork
+
+ genindex
+ modindex
diff --git a/docs/source/quickstart/index.rst b/docs/source/quickstart/index.rst
new file mode 100644
index 00000000..81a605ea
--- /dev/null
+++ b/docs/source/quickstart/index.rst
@@ -0,0 +1,9 @@
+***************
+Getting Started
+***************
+
+.. toctree::
+ :maxdepth: 2
+
+ overview
+ install
diff --git a/docs/source/quickstart/install.rst b/docs/source/quickstart/install.rst
new file mode 100644
index 00000000..0a7a04b0
--- /dev/null
+++ b/docs/source/quickstart/install.rst
@@ -0,0 +1,44 @@
+************
+Installation
+************
+
+To use EasyNetwork, first install it using :program:`pip`:
+
+.. code-block:: console
+
+ (.venv) $ pip install easynetwork
+
+
+.. _optional-dependencies:
+
+Optional Dependencies
+=====================
+
+EasyNetwork has no required dependencies, but comes with many optional dependencies to meet specific needs.
+
+Here is the full list:
+
+* Serializer extensions:
+
+ * ``cbor``: Installs the required dependencies for :class:`.CBORSerializer`.
+
+ * ``encryption``: Installs the required dependencies for :class:`.EncryptorSerializer`.
+
+ * ``msgpack``: Installs the required dependencies for :class:`.MessagePackSerializer`.
+
+* Asynchronous I/O extensions:
+
+ * ``sniffio``: Installs the version supported and tested of :github:repo:`sniffio `.
+
+
+Example where the ``cbor`` and ``msgpack`` extensions are installed:
+
+.. code-block:: console
+
+ (.venv) $ pip install "easynetwork[cbor,msgpack]"
+
+
+.. seealso::
+
+ :doc:`/api/async/backend`
+ See how ``sniffio`` is used.
diff --git a/docs/source/quickstart/overview.rst b/docs/source/quickstart/overview.rst
new file mode 100644
index 00000000..15e54b3a
--- /dev/null
+++ b/docs/source/quickstart/overview.rst
@@ -0,0 +1,13 @@
+********
+Overview
+********
+
+EasyNetwork completely encapsulates the socket handling, providing you with a higher level interface
+that allows an application/software to completely handle the logic part with Python objects,
+without worrying about how to process, send or receive data over the network.
+
+The communication protocol can be whatever you want, be it JSON, Pickle, ASCII, structure, base64 encoded,
+compressed, encrypted, or any other format that is not part of the standard library.
+You choose the data format and the library takes care of the rest.
+
+Works with TCP and UDP.
diff --git a/docs/source/tutorials/echo_client_server_tcp.rst b/docs/source/tutorials/echo_client_server_tcp.rst
new file mode 100644
index 00000000..d35d2260
--- /dev/null
+++ b/docs/source/tutorials/echo_client_server_tcp.rst
@@ -0,0 +1,176 @@
+******************************
+An Echo Client/Server Over TCP
+******************************
+
+To see how to create a server and a client with the minimum requirements,
+let's create a server that will return everything sent by a connected client.
+
+.. include:: ../_include/sync-async-variants.rst
+
+.. contents:: Table of Contents
+ :local:
+
+
+------
+
+
+.. _echo-client-server-tcp-protocol:
+
+The Communication Protocol
+==========================
+
+Before doing all this networking stuff, you need to know what you want to transmit and in what format. It is your :term:`communication protocol`.
+
+Choose The Serializer
+---------------------
+
+There is a bunch of serializers available in :mod:`easynetwork.serializers` for everyone to enjoy:
+
+* :class:`.JSONSerializer`: an :term:`incremental serializer` using the :mod:`json` module.
+
+* :class:`.PickleSerializer`: a :term:`one-shot serializer` using the :mod:`pickle` module.
+
+* :class:`.StringLineSerializer`: an :term:`incremental serializer` for communication based on ASCII character strings (e.g. `FTP`_).
+
+* etc.
+
+For the tutorial, :class:`.JSONSerializer` will be used.
+
+
+Build Your Protocol Object
+--------------------------
+
+For communication via TCP, a :class:`.StreamProtocol` :term:`protocol object` must be created.
+
+.. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/json_protocol.py
+ :linenos:
+ :caption: json_protocol.py
+
+.. note::
+
+ Of course, you are under no obligation to write a subclass. But see :ref:`this note ` for details.
+
+
+The Server
+==========
+
+Now that we have established the :term:`communication protocol`, we can create our server.
+
+.. _echo-client-server-tcp-request-handler:
+
+Create Your Request Handler
+---------------------------
+
+First, you must create a request handler class by subclassing the :class:`.AsyncStreamRequestHandler` class and overriding
+its :meth:`~.AsyncStreamRequestHandler.handle` method; this method will process incoming requests.
+
+.. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/echo_request_handler.py
+ :linenos:
+ :caption: echo_request_handler.py
+
+.. note::
+
+ Pay attention to :meth:`~.AsyncStreamRequestHandler.handle`, it is an :std:term:`asynchronous generator` function.
+ All requests sent by the client are literally injected into the generator via the :keyword:`yield` statement.
+
+ .. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/echo_request_handler.py
+ :start-at: async def handle
+ :end-at: return
+ :lineno-match:
+ :emphasize-lines: 6
+ :dedent:
+
+ You can :keyword:`yield` several times if you want to wait for a new packet from the client in the same context.
+
+.. warning::
+
+ Leaving the generator will *not* close the connection, a new generator will be created afterwards.
+ You may, however, explicitly close the connection if you want to::
+
+ await client.aclose()
+
+
+Start The Server
+----------------
+
+Second, you must instantiate the TCP server class, passing it the server's address, the :term:`protocol object` instance,
+and the request handler instance.
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/server.py
+ :linenos:
+ :caption: server.py
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/async_server.py
+ :linenos:
+ :caption: server.py
+
+.. note::
+
+ Setting ``host`` to :data:`None` will bind the server to all interfaces.
+ This means the server is ready to accept connections with IPv4 and IPv6 addresses (if available).
+
+
+The Client
+==========
+
+This is the client side:
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/client.py
+ :linenos:
+ :caption: client.py
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/echo_client_server_tcp/async_client.py
+ :linenos:
+ :caption: client.py
+
+
+Outputs
+=======
+
+The output of the example should look something like this:
+
+Server:
+
+.. tabs::
+
+ .. group-tab:: IPv4 connection
+
+ .. code-block:: console
+
+ (.venv) $ python server.py
+ 127.0.0.1 sent {'command-line arguments': ['Hello', 'world!']}
+ 127.0.0.1 sent {'command-line arguments': ['Python', 'is', 'nice']}
+
+ .. group-tab:: IPv6 connection
+
+ .. code-block:: console
+
+ (.venv) $ python server.py
+ ::1 sent {'command-line arguments': ['Hello', 'world!']}
+ ::1 sent {'command-line arguments': ['Python', 'is', 'nice']}
+
+Client:
+
+.. code-block:: console
+
+ (.venv) $ python client.py Hello world!
+ Sent: {'command-line arguments': ['Hello', 'world!']}
+ Received: {'command-line arguments': ['Hello', 'world!']}
+ (.venv) $ python client.py Python is nice
+ Sent: {'command-line arguments': ['Python', 'is', 'nice']}
+ Received: {'command-line arguments': ['Python', 'is', 'nice']}
+
+
+.. include:: ../_include/link-labels.rst
diff --git a/docs/source/tutorials/ftp_server.rst b/docs/source/tutorials/ftp_server.rst
new file mode 100644
index 00000000..5fe9f723
--- /dev/null
+++ b/docs/source/tutorials/ftp_server.rst
@@ -0,0 +1,297 @@
+********************************************************
+Practical application — Build an FTP server from scratch
+********************************************************
+
+.. include:: ../_include/sync-async-variants.rst
+
+.. contents:: Table of Contents
+ :local:
+
+
+------
+
+
+TL;DR
+=====
+
+Yes, I know, you will never need to create your own FTP server (unless you want your own service). However, it is still interesting
+to see the structure of such a model, based on a standardized communication protocol.
+
+The `File Transfer Protocol`_ (as defined in :rfc:`959`) is a good example of how to set up a server with precise rules.
+
+We are not going to implement all the requests (that is not the point). The tutorial will show you how to set up the infrastructure
+and exploit all (or most) of the EasyNetwork library's features.
+
+
+The Communication Protocol
+==========================
+
+FTP requests and responses are transmitted as ASCII strings separated by a carriage return (``\r\n``).
+
+Let's say we want to have two classes ``FTPRequest`` and ``FTPReply`` to manage them in our request handler.
+
+``FTPRequest`` Object
+---------------------
+
+An FTP client request consists of a command and, optionally, arguments separated by a space character.
+
+First, we define the exhaustive list of available commands (c.f. :rfc:`RFC 959 (Section 4.1) <959#section-4>`):
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_command.py
+ :linenos:
+ :caption: ftp_command.py
+
+.. note::
+
+ See :mod:`enum` module documentation to understand the usage of :class:`~enum.auto` and :meth:`~enum.Enum._generate_next_value_`.
+
+Second, we define the ``FTPRequest`` class that will be used:
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_request.py
+ :linenos:
+ :caption: ftp_request.py
+
+
+``FTPReply`` Object
+-------------------
+
+An FTP reply consists of a three-digit number (transmitted as three alphanumeric characters) followed by some text.
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_reply.py
+ :linenos:
+ :caption: ftp_reply.py
+ :end-before: @staticmethod
+
+
+Use Converters To Handle Character Strings
+------------------------------------------
+
+The client will send a character string and expect a character string in return. :class:`.StringLineSerializer` will handle this part,
+but we have created our objects in order not to manipulate strings.
+
+To remedy this, we will use :term:`converters ` to switch between our ``FTPRequest`` / ``FTPReply`` objects and strings.
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_converters.py
+ :linenos:
+ :caption: ftp_converters.py
+
+.. note::
+
+ In :meth:`FTPRequestConverter.create_from_dto_packet`, the arguments are left as sent and returned.
+
+ .. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_converters.py
+ :pyobject: FTPRequestConverter.create_from_dto_packet
+ :start-at: try:
+ :end-at: return FTPRequest
+ :lineno-match:
+ :emphasize-lines: 5
+ :dedent:
+
+ An improvement would be to process them here and not leave the job to the request handler.
+ But since we are not building a real (complete and fully featured) FTP server, we will leave the code as is.
+
+The Protocol Object
+-------------------
+
+Now that we have our business objects, we can create our :term:`protocol object`.
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_server_protocol.py
+ :linenos:
+ :caption: ftp_server_protocol.py
+
+
+.. note::
+
+ Note the use of :class:`.StapledPacketConverter`:
+
+ .. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_server_protocol.py
+ :pyobject: FTPServerProtocol.__init__
+ :start-at: super().__init__
+ :lineno-match:
+ :emphasize-lines: 3-6
+ :dedent:
+
+ It will create a :term:`composite converter` with our two converters.
+
+
+The Server
+==========
+
+``FTPReply``: Define Default Replies
+------------------------------------
+
+A good way to reply to the client with default replies is to define them in methods.
+
+Here are just a few that will be used in this tutorial.
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_reply.py
+ :caption: ftp_reply.py
+ :pyobject: FTPReply
+ :lineno-match:
+
+
+The Request Handler
+-------------------
+
+Let's create this request handler.
+
+Service Initialization
+^^^^^^^^^^^^^^^^^^^^^^
+
+A feature we could have used for the :ref:`echo client/server over TCP tutorial `
+is to define actions to perform at start/end of the server.
+
+Here, we'll only initialize the logger, but we could also use it to prepare the folders and files that the server should handle
+(location, permissions, file existence, etc.).
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_server_request_handler.py
+ :pyobject: FTPRequestHandler
+ :end-before: async def on_connection
+ :lineno-match:
+ :dedent:
+
+
+Control Connection Hooks
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Here are the features brought by :class:`.AsyncStreamRequestHandler`: It is possible to perform actions when connecting/disconnecting the client.
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_server_request_handler.py
+ :pyobject: FTPRequestHandler
+ :start-at: async def on_connection
+ :end-before: async def handle
+ :lineno-match:
+ :dedent:
+
+
+The :meth:`~handle` Method
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Only ``NOOP`` and ``QUIT`` commands will be implemented for this tutorial. All parse errors are considered syntax errors.
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_server_request_handler.py
+ :pyobject: FTPRequestHandler.handle
+ :lineno-match:
+ :dedent:
+
+
+Full Code
+^^^^^^^^^
+
+.. literalinclude:: ../_include/examples/tutorials/ftp_server/ftp_server_request_handler.py
+ :caption: ftp_server_request_handler.py
+ :linenos:
+
+
+Start The Server
+----------------
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/ftp_server/server.py
+ :linenos:
+ :caption: server.py
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/ftp_server/async_server.py
+ :linenos:
+ :caption: server.py
+
+
+Outputs
+=======
+
+The output of the example should look something like this:
+
+Server:
+
+.. tabs::
+
+ .. group-tab:: IPv4 connection
+
+ .. code-block:: console
+
+ (.venv) $ python server.py
+ [ INFO ] [ easynetwork.api_async.server.tcp ] Start serving at ('::', 21000), ('0.0.0.0', 21000)
+ [ INFO ] [ easynetwork.api_async.server.tcp ] Accepted new connection (address = ('127.0.0.1', 45994))
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('127.0.0.1', 45994): FTPRequest(command=, args=())
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('127.0.0.1', 45994): FTPRequest(command=, args=())
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('127.0.0.1', 45994): FTPRequest(command=, args=('/path/to/file.txt',))
+ [ WARNING ] [ FTPRequestHandler ] ('127.0.0.1', 45994): PacketConversionError: Command unrecognized: 'UNKNOWN'
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('127.0.0.1', 45994): FTPRequest(command=, args=())
+ [ INFO ] [ easynetwork.api_async.server.tcp ] ('127.0.0.1', 45994) disconnected
+
+ .. group-tab:: IPv6 connection
+
+ .. code-block:: console
+
+ (.venv) $ python server.py
+ [ INFO ] [ easynetwork.api_async.server.tcp ] Start serving at ('::', 21000), ('0.0.0.0', 21000)
+ [ INFO ] [ easynetwork.api_async.server.tcp ] Accepted new connection (address = ('::1', 45994))
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('::1', 45994): FTPRequest(command=, args=())
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('::1', 45994): FTPRequest(command=, args=())
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('::1', 45994): FTPRequest(command=, args=('/path/to/file.txt',))
+ [ WARNING ] [ FTPRequestHandler ] ('::1', 45994): PacketConversionError: Command unrecognized: 'UNKNOWN'
+ [ INFO ] [ FTPRequestHandler ] Sent by client ('::1', 45994): FTPRequest(command=, args=())
+ [ INFO ] [ easynetwork.api_async.server.tcp ] ('::1', 45994) disconnected
+
+
+Client:
+
+.. note::
+
+ The `File Transfer Protocol`_ is based on the `Telnet protocol`_.
+
+ The :manpage:`telnet(1)` command is used to communicate with another host using the `Telnet protocol`_.
+
+.. tabs::
+
+ .. group-tab:: IPv4 connection
+
+ .. code-block:: console
+
+ $ telnet -4 localhost 21000
+ Trying 127.0.0.1...
+ Connected to localhost.
+ Escape character is '^]'.
+ 220 Service ready for new user.
+ NOOP
+ 200 Command okay.
+ nOoP
+ 200 Command okay.
+ STOR /path/to/file.txt
+ 502 Command not implemented.
+ UNKNOWN command
+ 500 Syntax error, command unrecognized.
+ QUIT
+ 221 Service closing control connection.
+ Connection closed by foreign host.
+
+ .. group-tab:: IPv6 connection
+
+ .. code-block:: console
+
+ $ telnet -6 localhost 21000
+ Trying ::1...
+ Connected to localhost.
+ Escape character is '^]'.
+ 220 Service ready for new user.
+ NOOP
+ 200 Command okay.
+ nOoP
+ 200 Command okay.
+ STOR /path/to/file.txt
+ 502 Command not implemented.
+ UNKNOWN command
+ 500 Syntax error, command unrecognized.
+ QUIT
+ 221 Service closing control connection.
+ Connection closed by foreign host.
+
+
+.. Links
+
+.. include:: ../_include/link-labels.rst
diff --git a/docs/source/tutorials/index.rst b/docs/source/tutorials/index.rst
new file mode 100644
index 00000000..b7336c4f
--- /dev/null
+++ b/docs/source/tutorials/index.rst
@@ -0,0 +1,10 @@
+*********
+Tutorials
+*********
+
+.. toctree::
+ :maxdepth: 2
+
+ echo_client_server_tcp
+ udp_endpoint
+ ftp_server
diff --git a/docs/source/tutorials/udp_endpoint.rst b/docs/source/tutorials/udp_endpoint.rst
new file mode 100644
index 00000000..0c56967a
--- /dev/null
+++ b/docs/source/tutorials/udp_endpoint.rst
@@ -0,0 +1,91 @@
+*********************
+Create A UDP Endpoint
+*********************
+
+This tutorial will show you how to create a ready-to-use datagram endpoint over UDP.
+
+.. include:: ../_include/sync-async-variants.rst
+
+.. contents:: Table of Contents
+ :local:
+
+
+------
+
+
+The Communication Protocol
+==========================
+
+You will need a :term:`protocol object`, as for the :ref:`echo client/server over TCP `.
+
+For the tutorial, :class:`.JSONSerializer` will also be used.
+
+For communication via UDP, a :class:`.DatagramProtocol` object must be created this time.
+
+.. literalinclude:: ../_include/examples/tutorials/udp_endpoint/json_protocol.py
+ :linenos:
+ :caption: json_protocol.py
+ :emphasize-lines: 5,14
+
+
+The UDP Endpoint
+================
+
+Here is an example of how to use a UDP endpoint:
+
+.. tabs::
+
+ .. group-tab:: Synchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/udp_endpoint/endpoint.py
+ :linenos:
+ :caption: endpoint.py
+
+ .. group-tab:: Asynchronous
+
+ .. literalinclude:: ../_include/examples/tutorials/udp_endpoint/async_endpoint.py
+ :linenos:
+ :caption: endpoint.py
+
+The output of the example should look something like this:
+
+Receiver:
+
+.. tabs::
+
+ .. group-tab:: IPv4 connection
+
+ .. code-block:: console
+
+ (.venv) $ python endpoint.py receiver
+ Receiver available on ('127.0.0.1', 58456)
+ From ('127.0.0.1', 35985): {'command-line arguments': ['Hello', 'world!']}
+
+ .. group-tab:: IPv6 connection
+
+ .. code-block:: console
+
+ (.venv) $ python endpoint.py receiver
+ Receiver available on ('::1', 58456)
+ From ('::1', 35985): {'command-line arguments': ['Hello', 'world!']}
+
+
+Sender:
+
+.. tabs::
+
+ .. group-tab:: IPv4 connection
+
+ .. code-block:: console
+
+ (.venv) $ python endpoint.py sender "127.0.0.1,58456" Hello world!
+ Sent to ('127.0.0.1', 58456) : {'command-line arguments': ['Hello', 'world!']}
+ Received from ('127.0.0.1', 58456) : {'command-line arguments': ['Hello', 'world!']}
+
+ .. group-tab:: IPv6 connection
+
+ .. code-block:: console
+
+ (.venv) $ python endpoint.py sender "::1,58456" Hello world!
+ Sent to ('::1', 58456) : {'command-line arguments': ['Hello', 'world!']}
+ Received from ('::1', 58456) : {'command-line arguments': ['Hello', 'world!']}
diff --git a/pdm.lock b/pdm.lock
index 5ca2afac..892d061c 100644
--- a/pdm.lock
+++ b/pdm.lock
@@ -2,11 +2,74 @@
# It is not intended for manual editing.
[metadata]
-groups = ["default", "cbor", "dev", "encryption", "flake8", "format", "msgpack", "mypy", "sniffio", "test", "tox", "uvloop", "bandit"]
+groups = ["default", "bandit", "cbor", "dev", "doc", "encryption", "flake8", "format", "msgpack", "mypy", "sniffio", "test", "tox", "uvloop"]
cross_platform = true
static_urls = false
lock_version = "4.3"
-content_hash = "sha256:8e44d15bbb6ff1d24d1843bbc1d7482eccc928d03a2beceee2699624c0d40a00"
+content_hash = "sha256:3c4860f3656bdfb2ff2963a72acf759478d2556b520626f5397cde1d3a000971"
+
+[[package]]
+name = "alabaster"
+version = "0.7.13"
+requires_python = ">=3.6"
+summary = "A configurable sidebar-enabled Sphinx theme"
+files = [
+ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"},
+ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
+]
+
+[[package]]
+name = "apeye"
+version = "1.4.0"
+requires_python = ">=3.6.1"
+summary = "Handy tools for working with URLs and APIs."
+dependencies = [
+ "apeye-core>=1.0.0b2",
+ "domdf-python-tools>=2.6.0",
+ "platformdirs>=2.3.0",
+ "requests>=2.24.0",
+]
+files = [
+ {file = "apeye-1.4.0-py3-none-any.whl", hash = "sha256:32f10f5629c39a0d2a4bc00b16827b43b912c56510395329cb4cc823954ec2be"},
+ {file = "apeye-1.4.0.tar.gz", hash = "sha256:db616f14f1e7c09c5ff76230b6a78ebada6e34bed80596bbb9f1146d94107cdb"},
+]
+
+[[package]]
+name = "apeye-core"
+version = "1.1.4"
+requires_python = ">=3.6.1"
+summary = "Core (offline) functionality for the apeye library."
+dependencies = [
+ "domdf-python-tools>=2.6.0",
+ "idna>=2.5",
+]
+files = [
+ {file = "apeye_core-1.1.4-py3-none-any.whl", hash = "sha256:084bc696448d3ac428fece41c1f2eb08fa9d9ce1d1b2f4d43187e3def4528a60"},
+ {file = "apeye_core-1.1.4.tar.gz", hash = "sha256:72bb89fed3baa647cb81aa28e1d851787edcbf9573853b5d2b5f87c02f50eaf5"},
+]
+
+[[package]]
+name = "autodocsumm"
+version = "0.2.11"
+requires_python = ">=3.7"
+summary = "Extended sphinx autodoc including automatic autosummaries"
+dependencies = [
+ "Sphinx<8.0,>=2.2",
+]
+files = [
+ {file = "autodocsumm-0.2.11-py3-none-any.whl", hash = "sha256:f1d0a623bf1ad64d979a9e23fd360d1fb1b8f869beaf3197f711552cddc174e2"},
+ {file = "autodocsumm-0.2.11.tar.gz", hash = "sha256:183212bd9e9f3b58a96bb21b7958ee4e06224107aa45b2fd894b61b83581b9a9"},
+]
+
+[[package]]
+name = "babel"
+version = "2.12.1"
+requires_python = ">=3.7"
+summary = "Internationalization utilities"
+files = [
+ {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"},
+ {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"},
+]
[[package]]
name = "bandit"
@@ -25,9 +88,22 @@ files = [
{file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"},
]
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.2"
+requires_python = ">=3.6.0"
+summary = "Screen-scraping library"
+dependencies = [
+ "soupsieve>1.2",
+]
+files = [
+ {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"},
+ {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"},
+]
+
[[package]]
name = "black"
-version = "23.7.0"
+version = "23.9.1"
requires_python = ">=3.8"
summary = "The uncompromising code formatter."
dependencies = [
@@ -38,13 +114,13 @@ dependencies = [
"platformdirs>=2",
]
files = [
- {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"},
- {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"},
- {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"},
- {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"},
- {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"},
- {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"},
- {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"},
+ {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"},
+ {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"},
+ {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"},
+ {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"},
+ {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"},
+ {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"},
+ {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"},
]
[[package]]
@@ -57,6 +133,35 @@ files = [
{file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"},
]
+[[package]]
+name = "cachecontrol"
+version = "0.13.1"
+requires_python = ">=3.7"
+summary = "httplib2 caching for requests"
+dependencies = [
+ "msgpack>=0.5.2",
+ "requests>=2.16.0",
+]
+files = [
+ {file = "cachecontrol-0.13.1-py3-none-any.whl", hash = "sha256:95dedbec849f46dda3137866dc28b9d133fc9af55f5b805ab1291833e4457aa4"},
+ {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"},
+]
+
+[[package]]
+name = "cachecontrol"
+version = "0.13.1"
+extras = ["filecache"]
+requires_python = ">=3.7"
+summary = "httplib2 caching for requests"
+dependencies = [
+ "cachecontrol==0.13.1",
+ "filelock>=3.8.0",
+]
+files = [
+ {file = "cachecontrol-0.13.1-py3-none-any.whl", hash = "sha256:95dedbec849f46dda3137866dc28b9d133fc9af55f5b805ab1291833e4457aa4"},
+ {file = "cachecontrol-0.13.1.tar.gz", hash = "sha256:f012366b79d2243a6118309ce73151bf52a38d4a5dac8ea57f09bd29087e506b"},
+]
+
[[package]]
name = "cachetools"
version = "5.3.1"
@@ -84,6 +189,16 @@ files = [
{file = "cbor2-5.4.6.tar.gz", hash = "sha256:b893500db0fe033e570c3adc956af6eefc57e280026bd2d86fd53da9f1e594d7"},
]
+[[package]]
+name = "certifi"
+version = "2023.7.22"
+requires_python = ">=3.6"
+summary = "Python package for providing Mozilla's CA Bundle."
+files = [
+ {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
+ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
+]
+
[[package]]
name = "cffi"
version = "1.15.1"
@@ -107,12 +222,12 @@ files = [
[[package]]
name = "cfgv"
-version = "3.3.1"
-requires_python = ">=3.6.1"
+version = "3.4.0"
+requires_python = ">=3.8"
summary = "Validate configuration and produce human readable error messages."
files = [
- {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
- {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
+ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
@@ -125,6 +240,31 @@ files = [
{file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"},
]
+[[package]]
+name = "charset-normalizer"
+version = "3.2.0"
+requires_python = ">=3.7.0"
+summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+files = [
+ {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
+ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
+]
+
[[package]]
name = "click"
version = "8.1.6"
@@ -150,64 +290,66 @@ files = [
[[package]]
name = "coverage"
-version = "7.2.7"
-requires_python = ">=3.7"
+version = "7.3.0"
+requires_python = ">=3.8"
summary = "Code coverage measurement for Python"
files = [
- {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
- {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
- {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
- {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
- {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
- {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
- {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
- {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
- {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+ {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"},
+ {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"},
+ {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"},
+ {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"},
+ {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"},
+ {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"},
+ {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"},
+ {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"},
+ {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"},
+ {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"},
+ {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"},
+ {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"},
+ {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"},
+ {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"},
+ {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"},
+ {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"},
+ {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"},
+ {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"},
+ {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"},
+ {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"},
+ {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"},
+ {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"},
]
[[package]]
name = "coverage"
-version = "7.2.7"
+version = "7.3.0"
extras = ["toml"]
-requires_python = ">=3.7"
+requires_python = ">=3.8"
summary = "Code coverage measurement for Python"
dependencies = [
- "coverage==7.2.7",
-]
-files = [
- {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
- {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
- {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
- {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
- {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
- {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
- {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
- {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
- {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
- {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
- {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
- {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
- {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+ "coverage==7.3.0",
+]
+files = [
+ {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"},
+ {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"},
+ {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"},
+ {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"},
+ {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"},
+ {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"},
+ {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"},
+ {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"},
+ {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"},
+ {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"},
+ {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"},
+ {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"},
+ {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"},
+ {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"},
+ {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"},
+ {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"},
+ {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"},
+ {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"},
+ {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"},
+ {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"},
+ {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"},
+ {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"},
]
[[package]]
@@ -244,6 +386,30 @@ files = [
{file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"},
]
+[[package]]
+name = "cssutils"
+version = "2.7.1"
+requires_python = ">=3.7"
+summary = "A CSS Cascading Style Sheets library for Python"
+files = [
+ {file = "cssutils-2.7.1-py3-none-any.whl", hash = "sha256:1e92e0d9dab2ec8af9f38d715393964ba533dc3beacab9b072511dfc241db775"},
+ {file = "cssutils-2.7.1.tar.gz", hash = "sha256:340ecfd9835d21df8f98500f0dfcea0aee41cb4e19ecbc2cf94f0a6d36d7cb6c"},
+]
+
+[[package]]
+name = "dict2css"
+version = "0.3.0"
+requires_python = ">=3.6"
+summary = "A μ-library for constructing cascading style sheets from Python dictionaries."
+dependencies = [
+ "cssutils>=2.2.0",
+ "domdf-python-tools>=2.2.0",
+]
+files = [
+ {file = "dict2css-0.3.0-py3-none-any.whl", hash = "sha256:ef934ce73a225fdd5f811b484fe9e2dd768f7ef14a89fc8f4eb5672597131d00"},
+ {file = "dict2css-0.3.0.tar.gz", hash = "sha256:1e8b1bf580dca2083198f88a60ec88c878a8829d760dfe45483ef80fe2905117"},
+]
+
[[package]]
name = "distlib"
version = "0.3.7"
@@ -253,6 +419,61 @@ files = [
{file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
]
+[[package]]
+name = "docutils"
+version = "0.18.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "Docutils -- Python Documentation Utilities"
+files = [
+ {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"},
+ {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"},
+]
+
+[[package]]
+name = "domdf-python-tools"
+version = "3.6.1"
+requires_python = ">=3.6"
+summary = "Helpful functions for Python 🐍 🛠️"
+dependencies = [
+ "natsort>=7.0.1",
+ "typing-extensions>=3.7.4.1",
+]
+files = [
+ {file = "domdf_python_tools-3.6.1-py3-none-any.whl", hash = "sha256:e18158460850957f18e740eb94ede56f580ddb0cb162ab9d9834ed8bbb1b6431"},
+ {file = "domdf_python_tools-3.6.1.tar.gz", hash = "sha256:acc04563d23bce4d437dd08af6b9bea788328c412772a044d8ca428a7ad861be"},
+]
+
+[[package]]
+name = "enum-tools"
+version = "0.11.0"
+requires_python = ">=3.6"
+summary = "Tools to expand Python's enum module."
+dependencies = [
+ "pygments>=2.6.1",
+ "typing-extensions>=3.7.4.3",
+]
+files = [
+ {file = "enum_tools-0.11.0-py3-none-any.whl", hash = "sha256:9e76186ff4fd1798a64a855d334e245a7d2b67970c40029acccff06c58bf0535"},
+ {file = "enum_tools-0.11.0.tar.gz", hash = "sha256:ed10ae4c2109c52e6ca17505a3bdb173b2554f5f0449677621829023a9d8bd33"},
+]
+
+[[package]]
+name = "enum-tools"
+version = "0.11.0"
+extras = ["sphinx"]
+requires_python = ">=3.6"
+summary = "Tools to expand Python's enum module."
+dependencies = [
+ "enum-tools==0.11.0",
+ "sphinx-jinja2-compat>=0.1.1",
+ "sphinx-toolbox>=2.16.0",
+ "sphinx>=3.4.0",
+]
+files = [
+ {file = "enum_tools-0.11.0-py3-none-any.whl", hash = "sha256:9e76186ff4fd1798a64a855d334e245a7d2b67970c40029acccff06c58bf0535"},
+ {file = "enum_tools-0.11.0.tar.gz", hash = "sha256:ed10ae4c2109c52e6ca17505a3bdb173b2554f5f0449677621829023a9d8bd33"},
+]
+
[[package]]
name = "execnet"
version = "2.0.2"
@@ -265,12 +486,12 @@ files = [
[[package]]
name = "filelock"
-version = "3.12.2"
-requires_python = ">=3.7"
+version = "3.12.4"
+requires_python = ">=3.8"
summary = "A platform independent file lock."
files = [
- {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"},
- {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"},
+ {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"},
+ {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"},
]
[[package]]
@@ -340,6 +561,20 @@ files = [
{file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"},
]
+[[package]]
+name = "html5lib"
+version = "1.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "HTML parser based on the WHATWG HTML specification"
+dependencies = [
+ "six>=1.9",
+ "webencodings",
+]
+files = [
+ {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"},
+ {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"},
+]
+
[[package]]
name = "identify"
version = "2.5.26"
@@ -360,6 +595,16 @@ files = [
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+summary = "Getting image size from png/jpeg/jpeg2000/gif file"
+files = [
+ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
[[package]]
name = "iniconfig"
version = "2.0.0"
@@ -380,6 +625,19 @@ files = [
{file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
]
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+requires_python = ">=3.7"
+summary = "A very fast and expressive template engine."
+dependencies = [
+ "MarkupSafe>=2.0",
+]
+files = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+
[[package]]
name = "markdown-it-py"
version = "3.0.0"
@@ -393,6 +651,35 @@ files = [
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
]
+[[package]]
+name = "markupsafe"
+version = "2.1.3"
+requires_python = ">=3.7"
+summary = "Safely add untrusted strings to HTML/XML markup."
+files = [
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
+ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
+]
+
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -444,7 +731,7 @@ files = [
[[package]]
name = "mypy"
-version = "1.5.0"
+version = "1.5.1"
requires_python = ">=3.8"
summary = "Optional static typing for Python"
dependencies = [
@@ -452,13 +739,18 @@ dependencies = [
"typing-extensions>=4.1.0",
]
files = [
- {file = "mypy-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c36011320e452eb30bec38b9fd3ba20569dc9545d7d4540d967f3ea1fab9c374"},
- {file = "mypy-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3940cf5845b2512b3ab95463198b0cdf87975dfd17fdcc6ce9709a9abe09e69"},
- {file = "mypy-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9166186c498170e1ff478a7f540846b2169243feb95bc228d39a67a1a450cdc6"},
- {file = "mypy-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:725b57a19b7408ef66a0fd9db59b5d3e528922250fb56e50bded27fea9ff28f0"},
- {file = "mypy-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:eec5c927aa4b3e8b4781840f1550079969926d0a22ce38075f6cfcf4b13e3eb4"},
- {file = "mypy-1.5.0-py3-none-any.whl", hash = "sha256:69b32d0dedd211b80f1b7435644e1ef83033a2af2ac65adcdc87c38db68a86be"},
- {file = "mypy-1.5.0.tar.gz", hash = "sha256:f3460f34b3839b9bc84ee3ed65076eb827cd99ed13ed08d723f9083cada4a212"},
+ {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"},
+ {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"},
+ {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"},
+ {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"},
+ {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"},
+ {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"},
+ {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"},
+ {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"},
+ {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"},
+ {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"},
+ {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"},
+ {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"},
]
[[package]]
@@ -471,6 +763,16 @@ files = [
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
+[[package]]
+name = "natsort"
+version = "8.4.0"
+requires_python = ">=3.7"
+summary = "Simple yet flexible natural sorting in Python."
+files = [
+ {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"},
+ {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"},
+]
+
[[package]]
name = "nodeenv"
version = "1.8.0"
@@ -526,17 +828,17 @@ files = [
[[package]]
name = "pluggy"
-version = "1.2.0"
-requires_python = ">=3.7"
+version = "1.3.0"
+requires_python = ">=3.8"
summary = "plugin and hook calling mechanisms for python"
files = [
- {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
- {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
+ {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
+ {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
]
[[package]]
name = "pre-commit"
-version = "3.3.3"
+version = "3.4.0"
requires_python = ">=3.8"
summary = "A framework for managing and maintaining multi-language pre-commit hooks."
dependencies = [
@@ -547,8 +849,8 @@ dependencies = [
"virtualenv>=20.10.0",
]
files = [
- {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"},
- {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"},
+ {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"},
+ {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"},
]
[[package]]
@@ -593,20 +895,20 @@ files = [
[[package]]
name = "pyproject-api"
-version = "1.5.3"
-requires_python = ">=3.7"
+version = "1.6.1"
+requires_python = ">=3.8"
summary = "API to interact with the python pyproject.toml based projects"
dependencies = [
"packaging>=23.1",
]
files = [
- {file = "pyproject_api-1.5.3-py3-none-any.whl", hash = "sha256:14cf09828670c7b08842249c1f28c8ee6581b872e893f81b62d5465bec41502f"},
- {file = "pyproject_api-1.5.3.tar.gz", hash = "sha256:ffb5b2d7cad43f5b2688ab490de7c4d3f6f15e0b819cb588c4b771567c9729eb"},
+ {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"},
+ {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"},
]
[[package]]
name = "pytest"
-version = "7.4.0"
+version = "7.4.2"
requires_python = ">=3.7"
summary = "pytest: simple powerful testing with Python"
dependencies = [
@@ -616,8 +918,8 @@ dependencies = [
"pluggy<2.0,>=0.12",
]
files = [
- {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"},
- {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"},
+ {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"},
+ {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"},
]
[[package]]
@@ -685,11 +987,34 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
+[[package]]
+name = "requests"
+version = "2.31.0"
+requires_python = ">=3.7"
+summary = "Python HTTP for Humans."
+dependencies = [
+ "certifi>=2017.4.17",
+ "charset-normalizer<4,>=2",
+ "idna<4,>=2.5",
+ "urllib3<3,>=1.21.1",
+]
+files = [
+ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+ {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+]
+
[[package]]
name = "rich"
version = "13.5.2"
@@ -704,6 +1029,34 @@ files = [
{file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"},
]
+[[package]]
+name = "ruamel-yaml"
+version = "0.17.32"
+requires_python = ">=3"
+summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
+dependencies = [
+ "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.12\"",
+]
+files = [
+ {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"},
+ {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"},
+]
+
+[[package]]
+name = "ruamel-yaml-clib"
+version = "0.2.7"
+requires_python = ">=3.5"
+summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
+files = [
+ {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"},
+ {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"},
+ {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"},
+ {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"},
+ {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"},
+ {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"},
+ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"},
+]
+
[[package]]
name = "setuptools"
version = "68.0.0"
@@ -714,6 +1067,16 @@ files = [
{file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"},
]
+[[package]]
+name = "six"
+version = "1.16.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+summary = "Python 2 and 3 compatibility utilities"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
[[package]]
name = "smmap"
version = "5.0.0"
@@ -734,6 +1097,239 @@ files = [
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
]
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+summary = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.4.1"
+requires_python = ">=3.7"
+summary = "A modern CSS selector implementation for Beautiful Soup."
+files = [
+ {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"},
+ {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"},
+]
+
+[[package]]
+name = "sphinx"
+version = "6.2.1"
+requires_python = ">=3.8"
+summary = "Python documentation generator"
+dependencies = [
+ "Jinja2>=3.0",
+ "Pygments>=2.13",
+ "alabaster<0.8,>=0.7",
+ "babel>=2.9",
+ "colorama>=0.4.5; sys_platform == \"win32\"",
+ "docutils<0.20,>=0.18.1",
+ "imagesize>=1.3",
+ "packaging>=21.0",
+ "requests>=2.25.0",
+ "snowballstemmer>=2.0",
+ "sphinxcontrib-applehelp",
+ "sphinxcontrib-devhelp",
+ "sphinxcontrib-htmlhelp>=2.0.0",
+ "sphinxcontrib-jsmath",
+ "sphinxcontrib-qthelp",
+ "sphinxcontrib-serializinghtml>=1.1.5",
+]
+files = [
+ {file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"},
+ {file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"},
+]
+
+[[package]]
+name = "sphinx-autodoc-typehints"
+version = "1.23.0"
+requires_python = ">=3.7"
+summary = "Type hints (PEP 484) support for the Sphinx autodoc extension"
+dependencies = [
+ "sphinx>=5.3",
+]
+files = [
+ {file = "sphinx_autodoc_typehints-1.23.0-py3-none-any.whl", hash = "sha256:ac099057e66b09e51b698058ba7dd76e57e1fe696cd91b54e121d3dad188f91d"},
+ {file = "sphinx_autodoc_typehints-1.23.0.tar.gz", hash = "sha256:5d44e2996633cdada499b6d27a496ddf9dbc95dd1f0f09f7b37940249e61f6e9"},
+]
+
+[[package]]
+name = "sphinx-jinja2-compat"
+version = "0.2.0"
+requires_python = ">=3.6"
+summary = "Patches Jinja2 v3 to restore compatibility with earlier Sphinx versions."
+dependencies = [
+ "jinja2>=2.10",
+ "markupsafe>=1",
+]
+files = [
+ {file = "sphinx_jinja2_compat-0.2.0-py3-none-any.whl", hash = "sha256:a5f3112d6873991c2cf28e37287163a0485d9c0812863b8aa4df7182722501fb"},
+ {file = "sphinx_jinja2_compat-0.2.0.tar.gz", hash = "sha256:c41346d859653e202b623f4236da8936243ed734abf5984adc3bef59d6f9a946"},
+]
+
+[[package]]
+name = "sphinx-prompt"
+version = "1.5.0"
+summary = "Sphinx directive to add unselectable prompt"
+dependencies = [
+ "Sphinx",
+ "pygments",
+]
+files = [
+ {file = "sphinx_prompt-1.5.0-py3-none-any.whl", hash = "sha256:fa4e90d8088b5a996c76087d701fc7e31175f8b9dc4aab03a507e45051067162"},
+]
+
+[[package]]
+name = "sphinx-rtd-theme"
+version = "1.3.0"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+summary = "Read the Docs theme for Sphinx"
+dependencies = [
+ "docutils<0.19",
+ "sphinx<8,>=1.6",
+ "sphinxcontrib-jquery<5,>=4",
+]
+files = [
+ {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"},
+ {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"},
+]
+
+[[package]]
+name = "sphinx-tabs"
+version = "3.4.1"
+requires_python = "~=3.7"
+summary = "Tabbed views for Sphinx"
+dependencies = [
+ "docutils~=0.18.0",
+ "pygments",
+ "sphinx",
+]
+files = [
+ {file = "sphinx-tabs-3.4.1.tar.gz", hash = "sha256:d2a09f9e8316e400d57503f6df1c78005fdde220e5af589cc79d493159e1b832"},
+ {file = "sphinx_tabs-3.4.1-py3-none-any.whl", hash = "sha256:7cea8942aeccc5d01a995789c01804b787334b55927f29b36ba16ed1e7cb27c6"},
+]
+
+[[package]]
+name = "sphinx-toolbox"
+version = "3.5.0"
+requires_python = ">=3.7"
+summary = "Box of handy tools for Sphinx 🧰 📔"
+dependencies = [
+ "apeye>=0.4.0",
+ "autodocsumm>=0.2.0",
+ "beautifulsoup4>=4.9.1",
+ "cachecontrol[filecache]>=0.13.0",
+ "dict2css>=0.2.3",
+ "docutils>=0.16",
+ "domdf-python-tools>=2.9.0",
+ "filelock>=3.8.0",
+ "html5lib>=1.1",
+ "ruamel-yaml>=0.16.12",
+ "sphinx-autodoc-typehints>=1.11.1",
+ "sphinx-jinja2-compat>=0.1.0",
+ "sphinx-prompt>=1.1.0",
+ "sphinx-tabs<3.5.0,>=1.2.1",
+ "sphinx>=3.2.0",
+ "tabulate>=0.8.7",
+ "typing-extensions!=3.10.0.1,>=3.7.4.3",
+]
+files = [
+ {file = "sphinx_toolbox-3.5.0-py3-none-any.whl", hash = "sha256:20dfd3566717db6f2da7a400a54dc4b946f064fb31250fa44802d54cfb9b8a03"},
+ {file = "sphinx_toolbox-3.5.0.tar.gz", hash = "sha256:e5b5a7153f1997572d71a06aaf6cec225483492ec2c60097a84f15aad6df18b7"},
+]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "1.0.6"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+dependencies = [
+ "Sphinx>=5",
+]
+files = [
+ {file = "sphinxcontrib_applehelp-1.0.6-py3-none-any.whl", hash = "sha256:c0578efa23cab5a2f3aaa8af5691b952433f4fdfaac255befd3452448e7ea4a4"},
+ {file = "sphinxcontrib_applehelp-1.0.6.tar.gz", hash = "sha256:a59274de7a952a99af36b8a5092352d9249279c0e3280b7dceaae8e15873c942"},
+]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "1.0.4"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
+dependencies = [
+ "Sphinx>=5",
+]
+files = [
+ {file = "sphinxcontrib_devhelp-1.0.4-py3-none-any.whl", hash = "sha256:d4e20a17f78865d4096733989b5efa0d5e7743900e98e1f6ecd6f489380febc8"},
+ {file = "sphinxcontrib_devhelp-1.0.4.tar.gz", hash = "sha256:4fd751c63dc40895ac8740948f26bf1a3c87e4e441cc008672abd1cb2bc8a3d1"},
+]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.0.3"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+dependencies = [
+ "Sphinx>=5",
+]
+files = [
+ {file = "sphinxcontrib_htmlhelp-2.0.3-py3-none-any.whl", hash = "sha256:abee4e6c5471203ad2fc40dc6a16ed99884a5d6b15a6f79c9269a7e82cf04149"},
+ {file = "sphinxcontrib_htmlhelp-2.0.3.tar.gz", hash = "sha256:14358d0f88ccf58447f2b54343cdcc0012f32de2f8d27cf934fdbc0b362f9597"},
+]
+
+[[package]]
+name = "sphinxcontrib-jquery"
+version = "4.1"
+requires_python = ">=2.7"
+summary = "Extension to include jQuery on newer Sphinx releases"
+dependencies = [
+ "Sphinx>=1.8",
+]
+files = [
+ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"},
+ {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"},
+]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+requires_python = ">=3.5"
+summary = "A sphinx extension which renders display math in HTML via JavaScript"
+files = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "1.0.5"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
+dependencies = [
+ "Sphinx>=5",
+]
+files = [
+ {file = "sphinxcontrib_qthelp-1.0.5-py3-none-any.whl", hash = "sha256:962730a6ad15d21fd6760b14c9e95c00a097413595aa6ee871dd9dfa4b002a16"},
+ {file = "sphinxcontrib_qthelp-1.0.5.tar.gz", hash = "sha256:d31d1a1beaf3894866bb318fb712f1edc82687f1c06235a01e5b2c50c36d5c40"},
+]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "1.1.9"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
+dependencies = [
+ "Sphinx>=5",
+]
+files = [
+ {file = "sphinxcontrib_serializinghtml-1.1.9-py3-none-any.whl", hash = "sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1"},
+ {file = "sphinxcontrib_serializinghtml-1.1.9.tar.gz", hash = "sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54"},
+]
+
[[package]]
name = "stevedore"
version = "5.1.0"
@@ -748,24 +1344,34 @@ files = [
]
[[package]]
-name = "tox"
-version = "4.7.0"
+name = "tabulate"
+version = "0.9.0"
requires_python = ">=3.7"
+summary = "Pretty-print tabular data"
+files = [
+ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
+ {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
+]
+
+[[package]]
+name = "tox"
+version = "4.11.3"
+requires_python = ">=3.8"
summary = "tox is a generic virtualenv management and test command line tool"
dependencies = [
"cachetools>=5.3.1",
- "chardet>=5.1",
+ "chardet>=5.2",
"colorama>=0.4.6",
- "filelock>=3.12.2",
+ "filelock>=3.12.3",
"packaging>=23.1",
- "platformdirs>=3.9.1",
- "pluggy>=1.2",
- "pyproject-api>=1.5.3",
- "virtualenv>=20.24.1",
+ "platformdirs>=3.10",
+ "pluggy>=1.3",
+ "pyproject-api>=1.6.1",
+ "virtualenv>=20.24.3",
]
files = [
- {file = "tox-4.7.0-py3-none-any.whl", hash = "sha256:79399a3d4641d1fd15eb6bd62c2f35923988038bf0ecf37a688b5e7a767de7d7"},
- {file = "tox-4.7.0.tar.gz", hash = "sha256:89120e1568c763924301cfde61ba7d4b5c4615eeb1086d5370deb03e9cf63c41"},
+ {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"},
+ {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"},
]
[[package]]
@@ -810,6 +1416,16 @@ files = [
{file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
]
+[[package]]
+name = "urllib3"
+version = "2.0.4"
+requires_python = ">=3.7"
+summary = "HTTP library with thread-safe connection pooling, file post, and more."
+files = [
+ {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"},
+ {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"},
+]
+
[[package]]
name = "uvloop"
version = "0.17.0"
@@ -827,7 +1443,7 @@ files = [
[[package]]
name = "virtualenv"
-version = "20.24.2"
+version = "20.24.3"
requires_python = ">=3.7"
summary = "Virtual Python Environment builder"
dependencies = [
@@ -836,6 +1452,15 @@ dependencies = [
"platformdirs<4,>=3.9.1",
]
files = [
- {file = "virtualenv-20.24.2-py3-none-any.whl", hash = "sha256:43a3052be36080548bdee0b42919c88072037d50d56c28bd3f853cbe92b953ff"},
- {file = "virtualenv-20.24.2.tar.gz", hash = "sha256:fd8a78f46f6b99a67b7ec5cf73f92357891a7b3a40fd97637c27f854aae3b9e0"},
+ {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"},
+ {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"},
+]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+summary = "Character encoding aliases for legacy web content"
+files = [
+ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
+ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
]
diff --git a/pyproject.toml b/pyproject.toml
index fa5ecc3a..c4d468f2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,7 +17,9 @@ classifiers = [
"Framework :: AsyncIO",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
- "Operating System :: OS Independent",
+ "Operating System :: Microsoft :: Windows",
+ "Operating System :: POSIX :: BSD",
+ "Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
@@ -40,7 +42,11 @@ keywords = [
]
[project.urls]
+# TODO: "Homepage" will be the documentation URL
Homepage = "https://github.com/francis-clairicia/EasyNetwork"
+"Issue Tracker" = "https://github.com/francis-clairicia/EasyNetwork/issues"
+"Release Notes" = "https://github.com/francis-clairicia/EasyNetwork/releases"
+"Source Code" = "https://github.com/francis-clairicia/EasyNetwork"
[project.optional-dependencies]
cbor = [
@@ -55,9 +61,6 @@ encryption = [
sniffio = [
"sniffio>=1.3.0",
]
-uvloop = [
- "uvloop~=0.17.0; sys_platform!='win32'",
-]
[project.entry-points."easynetwork.async.backends"]
asyncio = "easynetwork_asyncio:AsyncioBackend"
@@ -69,6 +72,9 @@ dev = [
"pre-commit>=2.20.0",
"bump2version>=1.0.1",
]
+uvloop = [
+ "uvloop~=0.17.0; sys_platform!='win32'",
+]
tox = [
"tox<5,>=4.0.8",
]
@@ -77,7 +83,7 @@ format = [
"black>=22.6.0",
]
mypy = [
- "mypy==1.5.0",
+ "mypy~=1.5.1",
"types-cryptography>=3.3.23.2",
"msgpack-types>=0.2.0",
]
@@ -90,7 +96,7 @@ bandit = [
"bandit>=1.7.5",
]
test = [
- "pytest~=7.3",
+ "pytest~=7.4,>=7.4.2",
"pytest-xdist>=3.3.1",
"pytest-mock~=3.10.0",
"pytest-cov<5,>=4.0",
@@ -98,6 +104,13 @@ test = [
"trove-classifiers==2023.8.7",
"trustme>=1.0.0",
]
+doc = [
+ "sphinx>=6.2.1,<7",
+ "sphinx-rtd-theme>=1.2.2",
+ "sphinx-tabs>=3.4.1",
+ "sphinx-toolbox>=3.5.0",
+ "enum-tools[sphinx]>=0.10.0",
+]
############################ hatchling configuration ############################
@@ -124,6 +137,9 @@ combine_as_imports = true
sections = ["FUTURE", "STDLIB", "EASYNETWORK", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
known_easynetwork = ["easynetwork", "easynetwork_asyncio"]
add_imports = ["from __future__ import annotations"]
+extend_skip = [
+ "docs/source/conf.py"
+]
[tool.black]
line-length = 130
diff --git a/src/easynetwork/__init__.py b/src/easynetwork/__init__.py
index 8e9aa52e..3b423ce7 100644
--- a/src/easynetwork/__init__.py
+++ b/src/easynetwork/__init__.py
@@ -1,9 +1,20 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""The easiest way to use sockets in Python
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""The easiest way to use sockets in Python!
-EasyNetwork is a high-level interface for networking in Python
+EasyNetwork is a high-level interface for networking in Python.
"""
from __future__ import annotations
@@ -12,11 +23,11 @@
__author__ = "FrankySnow9"
__contact__ = "clairicia.rcj.francis@gmail.com"
-__copyright__ = "Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine"
+__copyright__ = "Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine"
__credits__ = ["FrankySnow9"]
__deprecated__ = False
__email__ = "clairicia.rcj.francis@gmail.com"
-__license__ = "MIT"
+__license__ = "Apache 2.0"
__maintainer__ = "FrankySnow9"
__status__ = "Development"
__version__ = "1.0.0rc4"
diff --git a/src/easynetwork/_typevars.py b/src/easynetwork/_typevars.py
new file mode 100644
index 00000000..ef239c6c
--- /dev/null
+++ b/src/easynetwork/_typevars.py
@@ -0,0 +1,37 @@
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Common type variables for EasyNetwork's classes"""
+
+from __future__ import annotations
+
+__all__ = [
+ "_DTOPacketT",
+ "_PacketT",
+ "_ReceivedPacketT",
+ "_RequestT",
+ "_ResponseT",
+ "_SentPacketT",
+]
+
+import typing
+
+_DTOPacketT = typing.TypeVar("_DTOPacketT")
+
+_SentPacketT = typing.TypeVar("_SentPacketT")
+_ReceivedPacketT = typing.TypeVar("_ReceivedPacketT")
+_PacketT = typing.TypeVar("_PacketT")
+
+_RequestT = typing.TypeVar("_RequestT")
+_ResponseT = typing.TypeVar("_ResponseT")
diff --git a/src/easynetwork/api_async/__init__.py b/src/easynetwork/api_async/__init__.py
index 150bd0ce..3569ef19 100644
--- a/src/easynetwork/api_async/__init__.py
+++ b/src/easynetwork/api_async/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""
diff --git a/src/easynetwork/api_async/backend/__init__.py b/src/easynetwork/api_async/backend/__init__.py
index 150bd0ce..59190e0c 100644
--- a/src/easynetwork/api_async/backend/__init__.py
+++ b/src/easynetwork/api_async/backend/__init__.py
@@ -1,9 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""
-Asynchronous client/server module
-"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Asynchronous backend engine module"""
from __future__ import annotations
diff --git a/src/easynetwork/api_async/backend/abc.py b/src/easynetwork/api_async/backend/abc.py
index e04d7fb4..6b8d34b4 100644
--- a/src/easynetwork/api_async/backend/abc.py
+++ b/src/easynetwork/api_async/backend/abc.py
@@ -1,40 +1,49 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""
-Asynchronous client/server module
-"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Asynchronous backend engine interfaces module"""
from __future__ import annotations
__all__ = [
- "AbstractAcceptedSocket",
- "AbstractAsyncBackend",
- "AbstractAsyncBaseSocketAdapter",
- "AbstractAsyncDatagramSocketAdapter",
- "AbstractAsyncHalfCloseableStreamSocketAdapter",
- "AbstractAsyncListenerSocketAdapter",
- "AbstractAsyncStreamSocketAdapter",
- "AbstractTask",
- "AbstractTaskGroup",
- "AbstractThreadsPortal",
- "AbstractTimeoutHandle",
+ "AcceptedSocket",
+ "AsyncBackend",
+ "AsyncBaseSocketAdapter",
+ "AsyncDatagramSocketAdapter",
+ "AsyncHalfCloseableStreamSocketAdapter",
+ "AsyncListenerSocketAdapter",
+ "AsyncStreamSocketAdapter",
"ICondition",
"IEvent",
"ILock",
+ "Task",
+ "TaskGroup",
+ "ThreadsPortal",
+ "TimeoutHandle",
]
import contextvars
import math
from abc import ABCMeta, abstractmethod
from collections.abc import Callable, Coroutine, Iterable, Sequence
-from contextlib import AbstractAsyncContextManager as AsyncContextManager
+from contextlib import AbstractAsyncContextManager
from typing import TYPE_CHECKING, Any, Generic, NoReturn, ParamSpec, Protocol, Self, TypeVar
if TYPE_CHECKING:
import concurrent.futures
import socket as _socket
- import ssl as _ssl
+ import ssl as _typing_ssl
from types import TracebackType
from ...tools.socket import ISocket
@@ -46,101 +55,311 @@
class ILock(Protocol):
+ """
+ A mutex lock for asynchronous tasks. Not thread-safe.
+
+ A lock can be used to guarantee exclusive access to a shared resource.
+
+ The preferred way to use a Lock is an :keyword:`async with` statement::
+
+ lock = backend.create_lock()
+
+ # ... later
+ async with lock:
+ # access shared state
+
+ which is equivalent to::
+
+ lock = backend.create_lock()
+
+ # ... later
+ await lock.acquire()
+ try:
+ # access shared state
+ finally:
+ lock.release()
+ """
+
async def __aenter__(self) -> Any: # pragma: no cover
...
async def __aexit__(
self,
- __exc_type: type[BaseException] | None,
- __exc_val: BaseException | None,
- __exc_tb: TracebackType | None,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
/,
) -> bool | None: # pragma: no cover
...
async def acquire(self) -> Any: # pragma: no cover
+ """
+ Acquires the lock.
+
+ This method waits until the lock is *unlocked*, sets it to *locked*.
+
+ When more than one coroutine is blocked in :meth:`acquire` waiting for the lock to be unlocked,
+ only one coroutine eventually proceeds.
+ """
...
def release(self) -> None: # pragma: no cover
+ """
+ Releases the lock.
+
+ When the lock is locked, reset it to unlocked and return.
+
+ Raises:
+ RuntimeError: the lock is *unlocked* or the task does not have the lock ownership.
+ """
...
def locked(self) -> bool: # pragma: no cover
+ """
+ Returns True if the lock is locked.
+
+ Returns:
+ the lock state.
+ """
...
class IEvent(Protocol):
+ """
+ A waitable boolean value useful for inter-task synchronization. Not thread-safe.
+
+ An event object has an internal boolean flag, representing whether the event has happened yet.
+ The flag is initially :data:`False`, and the :meth:`wait` method waits until the flag is :data:`True`.
+ If the flag is already :data:`True`, then :meth:`wait` returns immediately. (If the event has already happened,
+ there's nothing to wait for.) The :meth:`set` method sets the flag to :data:`True`, and wakes up any waiters.
+
+ This behavior is useful because it helps avoid race conditions and lost wakeups: it doesn't matter whether :meth:`set`
+ gets called just before or after :meth:`wait`.
+ """
+
async def wait(self) -> Any: # pragma: no cover
+ """
+ Blocks until the internal flag value becomes :data:`True`.
+
+ If it is already :data:`True`, then this method returns immediately.
+ """
...
def set(self) -> None: # pragma: no cover
+ """
+ Sets the internal flag value to :data:`True`, and wake any waiting tasks.
+ """
...
def is_set(self) -> bool: # pragma: no cover
+ """
+ Returns:
+ the current value of the internal flag.
+ """
...
class ICondition(ILock, Protocol):
+ """
+ A classic condition variable, similar to :class:`threading.Condition`.
+
+ """
+
def notify(self, n: int = ..., /) -> None: # pragma: no cover
+ """
+ Wake one or more tasks that are blocked in :meth:`wait`.
+ """
...
def notify_all(self) -> None: # pragma: no cover
+ """
+ Wake all tasks that are blocked in :meth:`wait`.
+ """
...
async def wait(self) -> Any: # pragma: no cover
+ """
+ Wait until notified.
+
+ Raises:
+ RuntimeError: The underlying lock is not held by this task.
+ """
...
-class AbstractRunner(metaclass=ABCMeta):
+class Runner(metaclass=ABCMeta):
+ """
+ A :term:`context manager` that simplifies `multiple` async function calls in the same context.
+
+ Sometimes several top-level async functions should be called in the same event loop and :class:`contextvars.Context`.
+ """
+
__slots__ = ("__weakref__",)
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None) -> None:
+ """Calls :meth:`close`."""
self.close()
@abstractmethod
def close(self) -> None:
+ """
+ Closes the runner.
+ """
raise NotImplementedError
@abstractmethod
def run(self, coro_func: Callable[..., Coroutine[Any, Any, _T]], *args: Any) -> _T:
+ """
+ Runs an async function, and returns the result.
+
+ Calling::
+
+ runner.run(coro_func, *args)
+
+ is equivalent to::
+
+ await coro_func(*args)
+
+ except that :meth:`run` can (and must) be called from a synchronous context.
+
+ Parameters:
+ coro_func: An async function.
+ args: Positional arguments to be passed to `coro_func`. If you need to pass keyword arguments,
+ then use :func:`functools.partial`.
+
+ Returns:
+ Whatever `coro_func` returns.
+ """
raise NotImplementedError
-class AbstractTask(Generic[_T_co], metaclass=ABCMeta):
+class Task(Generic[_T_co], metaclass=ABCMeta):
+ """
+ A :class:`Task` object represents a concurrent "thread" of execution.
+ """
+
__slots__ = ("__weakref__",)
@abstractmethod
def done(self) -> bool:
+ """
+ Returns :data:`True` if the Task is done.
+
+ A Task is *done* when the wrapped coroutine either returned a value, raised an exception, or the Task was cancelled.
+
+ Returns:
+ The Task state.
+ """
raise NotImplementedError
@abstractmethod
def cancel(self) -> bool:
+ """
+ Request the Task to be cancelled.
+
+ This arranges for a ``backend.get_cancelled_exc_class()`` exception to be thrown into the wrapped coroutine
+ on the next cycle of the event loop.
+
+ :meth:`Task.cancel` does not guarantee that the Task will be cancelled,
+ although suppressing cancellation completely is not common and is actively discouraged.
+
+ Returns:
+ :data:`True` if the cancellation request have been taken into account.
+ :data:`False` if the task is already *done*.
+ """
raise NotImplementedError
@abstractmethod
def cancelled(self) -> bool:
+ """
+ Returns :data:`True` if the Task is *cancelled*.
+
+ The Task is *cancelled* when the cancellation was requested with :meth:`cancel` and the wrapped coroutine propagated
+ the ``backend.get_cancelled_exc_class()`` exception thrown into it.
+
+ Returns:
+ the cancellation state.
+ """
raise NotImplementedError
@abstractmethod
async def wait(self) -> None:
+ """
+ Blocks until the task has been completed, but *does not* unwrap the result.
+
+ See the :meth:`join` method to get the actual task state.
+
+ Important:
+ Cancelling :meth:`Task.wait` *does not* cancel the task.
+ """
raise NotImplementedError
@abstractmethod
async def join(self) -> _T_co:
+ """
+ Blocks until the task has been completed, and returns the result.
+
+ Important:
+ Cancelling :meth:`Task.join` *does not* cancel the task.
+
+ Raises:
+ backend.get_cancelled_exc_class(): The task was cancelled.
+ BaseException: Any exception raised by the task.
+
+ Returns:
+ the task result.
+ """
raise NotImplementedError
-class AbstractSystemTask(AbstractTask[_T_co]):
+class SystemTask(Task[_T_co]):
+ """
+ A :class:`SystemTask` is a :class:`Task` that runs concurrently with the current root task.
+ """
+
__slots__ = ()
@abstractmethod
async def join_or_cancel(self) -> _T_co:
+ """
+ Similar to :meth:`Task.join` except that if the coroutine is cancelled, the cancellation is propagated to this task.
+
+ Roughly equivalent to::
+
+ try:
+ await task.wait()
+ except backend.get_cancelled_exc_class():
+ task.cancel()
+ await backend.ignore_cancellation(task.wait())
+ if task.cancelled():
+ raise
+ assert task.done()
+ return await task.join()
+ """
raise NotImplementedError
-class AbstractTaskGroup(metaclass=ABCMeta):
+class TaskGroup(metaclass=ABCMeta):
+ """
+ Groups several asynchronous tasks together.
+
+ Example::
+
+ async def main():
+ async with backend.create_task_group() as tg:
+ task1 = tg.start_soon(some_coro)
+ task2 = tg.start_soon(another_coro)
+ print("Both tasks have completed now.")
+
+ The :keyword:`async with` statement will wait for all tasks in the group to finish.
+ While waiting, new tasks may still be added to the group
+ (for example, by passing ``tg`` into one of the coroutines and calling ``tg.start_soon()`` in that coroutine).
+ Once the last task has finished and the :keyword:`async with` block is exited, no new tasks may be added to the group.
+ """
+
__slots__ = ("__weakref__",)
@abstractmethod
@@ -159,37 +378,82 @@ async def __aexit__(
@abstractmethod
def start_soon(
self,
- __coro_func: Callable[_P, Coroutine[Any, Any, _T]],
+ coro_func: Callable[..., Coroutine[Any, Any, _T]],
/,
- *args: _P.args,
- **kwargs: _P.kwargs,
- ) -> AbstractTask[_T]:
+ *args: Any,
+ context: contextvars.Context | None = ...,
+ ) -> Task[_T]:
+ """
+ Starts a new task in this task group.
+
+ Parameters:
+ coro_func: An async function.
+ args: Positional arguments to be passed to `coro_func`. If you need to pass keyword arguments,
+ then use :func:`functools.partial`.
+ context: If given, it must be a :class:`contextvars.Context` instance in which the coroutine should be executed.
+ If the framework does not support contexts (or does not use them), it must simply ignore this parameter.
+
+ Returns:
+ the created task.
+ """
raise NotImplementedError
- def start_soon_with_context(
- self,
- context: contextvars.Context,
- coro_func: Callable[_P, Coroutine[Any, Any, _T]],
- /,
- *args: _P.args,
- **kwargs: _P.kwargs,
- ) -> AbstractTask[_T]:
- raise NotImplementedError("contextvars.Context management not supported by this backend")
+class ThreadsPortal(metaclass=ABCMeta):
+ """
+ An object that lets external threads run code in an asynchronous event loop.
+ """
-class AbstractThreadsPortal(metaclass=ABCMeta):
__slots__ = ("__weakref__",)
@abstractmethod
def run_coroutine(self, coro_func: Callable[_P, Coroutine[Any, Any, _T]], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
+ """
+ Run the given async function in the bound event loop thread, blocking until it is complete. Thread-safe.
+
+ Parameters:
+ coro_func: An async function.
+ args: Positional arguments to be passed to `coro_func`.
+ kwargs: Keyword arguments to be passed to `coro_func`.
+
+ Raises:
+ backend.get_cancelled_exc_class(): The scheduler was shut down while ``coro_func()`` was running
+ and cancelled the task.
+ RuntimeError: if the scheduler is shut down.
+ RuntimeError: if you try calling this from inside the event loop thread, which would otherwise cause a deadlock.
+ Exception: Whatever raises ``coro_func(*args, **kwargs)``
+
+ Returns:
+ Whatever returns ``coro_func(*args, **kwargs)``
+ """
raise NotImplementedError
@abstractmethod
def run_sync(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
+ """
+ Executes a function in the event loop thread from a worker thread. Thread-safe.
+
+ Parameters:
+ func: A synchronous function.
+ args: Positional arguments to be passed to `func`.
+ kwargs: Keyword arguments to be passed to `func`.
+
+ Raises:
+ RuntimeError: if the scheduler is shut down.
+ RuntimeError: if you try calling this from inside the event loop thread, which would otherwise cause a deadlock.
+ Exception: Whatever raises ``func(*args, **kwargs)``
+
+ Returns:
+ Whatever returns ``func(*args, **kwargs)``
+ """
raise NotImplementedError
-class AbstractAsyncBaseSocketAdapter(metaclass=ABCMeta):
+class AsyncBaseSocketAdapter(metaclass=ABCMeta):
+ """
+ Base class for asynchronous socket adapters.
+ """
+
__slots__ = ("__weakref__",)
async def __aenter__(self) -> Self:
@@ -201,101 +465,280 @@ async def __aexit__(
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
+ """Calls :meth:`aclose`."""
await self.aclose()
@abstractmethod
def is_closing(self) -> bool:
+ """
+ Checks if the socket is closed or in the process of being closed.
+ """
raise NotImplementedError
@abstractmethod
async def aclose(self) -> None:
+ """
+ Closes the socket.
+
+ Warning:
+ :meth:`aclose` performs a graceful close.
+
+ If :meth:`aclose` is cancelled, the socket is closed abruptly.
+ """
raise NotImplementedError
@abstractmethod
def get_local_address(self) -> tuple[Any, ...]:
+ """
+ Returns the local socket address. Roughly similar to :meth:`socket.socket.getsockname`.
+
+ Returns:
+ The socket address.
+ """
raise NotImplementedError
@abstractmethod
def socket(self) -> ISocket:
+ """
+ Returns the socket instance for low-level operations (such as ``socket.setsockopt()``).
+
+ Returns:
+ An object implementing the :protocol:`.ISocket` interface.
+ """
raise NotImplementedError
-class AbstractAsyncStreamSocketAdapter(AbstractAsyncBaseSocketAdapter):
+class AsyncStreamSocketAdapter(AsyncBaseSocketAdapter):
+ """
+ A stream-oriented socket interface.
+ """
+
__slots__ = ()
@abstractmethod
def get_remote_address(self) -> tuple[Any, ...]:
+ """
+ Returns the remote endpoint's address. Roughly similar to :meth:`socket.socket.getpeername`.
+
+ Returns:
+ The remote address.
+ """
raise NotImplementedError
@abstractmethod
async def recv(self, bufsize: int, /) -> bytes:
+ """
+ Similar to :meth:`socket.socket.recv`, except asynchronous.
+
+ Parameters:
+ bufsize: The maximum amount of bytes to receive.
+ """
raise NotImplementedError
@abstractmethod
async def sendall(self, data: bytes, /) -> None:
+ """
+ Similar to :meth:`socket.socket.sendall`, except asynchronous.
+
+ Parameters:
+ data: The bytes to send.
+ """
raise NotImplementedError
async def sendall_fromiter(self, iterable_of_data: Iterable[bytes], /) -> None:
+ """
+ An efficient way to send a bunch of data via the socket.
+
+ Currently, the default implementation concatenates the arguments and
+ calls :meth:`sendall` on the result.
+
+ Parameters:
+ iterable_of_data: An :term:`iterable` yielding the bytes to send.
+ """
await self.sendall(b"".join(iterable_of_data))
-class AbstractAsyncHalfCloseableStreamSocketAdapter(AbstractAsyncStreamSocketAdapter):
+class AsyncHalfCloseableStreamSocketAdapter(AsyncStreamSocketAdapter):
+ """
+ A stream-oriented socket interface that also supports closing only the write end of the stream.
+ """
+
__slots__ = ()
@abstractmethod
async def send_eof(self) -> None:
+ """
+ Send an end-of-file indication on this stream, if possible. Similar to :meth:`socket.socket.shutdown`.
+ """
raise NotImplementedError
-class AbstractAsyncDatagramSocketAdapter(AbstractAsyncBaseSocketAdapter):
+class AsyncDatagramSocketAdapter(AsyncBaseSocketAdapter):
+ """
+ A datagram-oriented socket interface.
+ """
+
__slots__ = ()
@abstractmethod
def get_remote_address(self) -> tuple[Any, ...] | None:
+ """
+ Returns the remote endpoint's address. Roughly similar to :meth:`socket.socket.getpeername`.
+
+ Returns:
+ The remote address if configured, :data:`None` otherwise.
+ """
raise NotImplementedError
@abstractmethod
async def recvfrom(self, bufsize: int, /) -> tuple[bytes, tuple[Any, ...]]:
+ """
+ Similar to :meth:`socket.socket.recvfrom`, except asynchronous.
+
+ Parameters:
+ bufsize: The maximum amount of bytes to receive.
+ """
raise NotImplementedError
@abstractmethod
async def sendto(self, data: bytes, address: tuple[Any, ...] | None, /) -> None:
+ """
+ Similar to :meth:`socket.socket.sendto`, except asynchronous.
+
+ Parameters:
+ data: The bytes to send.
+ """
raise NotImplementedError
-class AbstractAsyncListenerSocketAdapter(AbstractAsyncBaseSocketAdapter):
+class AsyncListenerSocketAdapter(AsyncBaseSocketAdapter):
+ """
+ An interface for socket listeners.
+ """
+
__slots__ = ()
@abstractmethod
- async def accept(self) -> AbstractAcceptedSocket:
+ async def accept(self) -> AcceptedSocket:
+ """
+ Similar to :meth:`socket.socket.accept`, except asynchronous.
+
+ The returned object is not directly usable for stream operations. You must call :meth:`AcceptedSocket.connect` on it::
+
+ accepted_socket = await listener.accept()
+ stream_socket = await accepted_socket.connect()
+
+ This helps to improve interoperability between tasks::
+
+ async def echo_handler(stream_socket):
+ async with stream_socket:
+ while True:
+ data = await stream_socket.recv(1024)
+ if not data:
+ break
+ await stream_socket.sendall(data)
+
+ async def serve(listener, task_group):
+
+ # Coroutine that waits for the connection to be fully etablished
+ # (e.g. Perform SSL/TLS handshake if necessary.)
+ async def connect_and_run(accepted_socket):
+ stream_socket = await accepted_socket.connect()
+ # The connection is up; call the real handler
+ task_group.start_soon(echo_handler, stream_socket)
+
+ while True:
+ accepted_socket = await listener.accept()
+
+ # Run 'accepted_socket.connect()' in another task.
+ task_group.start_soon(connect_and_run, accepted_socket)
+
+ In this case, ``listener.accept()`` will simply dequeue the pending connections without waiting for
+ a ready stream interface to become available.
+
+ Returns:
+ The accepted socket.
+ """
raise NotImplementedError
-class AbstractAcceptedSocket(metaclass=ABCMeta):
+class AcceptedSocket(metaclass=ABCMeta):
+ """
+ An object representing an accepted socket from an :class:`AsyncListenerSocketAdapter`.
+ """
+
__slots__ = ()
@abstractmethod
- async def connect(self) -> AbstractAsyncStreamSocketAdapter:
+ async def connect(self) -> AsyncStreamSocketAdapter:
+ """
+ Wraps the accepted socket into an asynchronous stream socket, and perform connection initialization if necessary.
+
+ For example, an SSL/TLS stream would perform a TLS handshake.
+
+ Returns:
+ A stream socket.
+ """
raise NotImplementedError
-class AbstractTimeoutHandle(metaclass=ABCMeta):
+class TimeoutHandle(metaclass=ABCMeta):
+ """
+ Interface to deal with an actual timeout scope.
+
+ See :meth:`AsyncBackend.move_on_after` for details.
+ """
+
__slots__ = ()
@abstractmethod
def when(self) -> float:
+ """
+ Returns the current deadline.
+
+ Returns:
+ the absolute time in seconds. :data:`math.inf` if the current deadline is not set.
+ A negative value can be returned.
+ """
raise NotImplementedError
@abstractmethod
- def reschedule(self, when: float) -> None:
+ def reschedule(self, when: float, /) -> None:
+ """
+ Reschedules the timeout.
+
+ Parameters:
+ when: The new deadline.
+ """
raise NotImplementedError
@abstractmethod
def expired(self) -> bool:
+ """
+ Returns whether the context manager has exceeded its deadline (expired).
+
+ Returns:
+ the timeout state.
+ """
raise NotImplementedError
@property
def deadline(self) -> float:
+ """
+ A read-write attribute to simplify the timeout management.
+
+ For example, this statement::
+
+ handle.deadline += 30
+
+ is equivalent to::
+
+ handle.reschedule(handle.when() + 30)
+
+ It is also possible to remove the timeout by deleting the attribute::
+
+ del handle.deadline
+ """
return self.when()
@deadline.setter
@@ -307,76 +750,282 @@ def deadline(self) -> None:
self.reschedule(math.inf)
-class AbstractAsyncBackend(metaclass=ABCMeta):
+class AsyncBackend(metaclass=ABCMeta):
+ """
+ Asynchronous backend interface.
+
+ It bridges the gap between asynchronous frameworks (``asyncio``, ``trio``, or whatever) and EasyNetwork.
+ """
+
__slots__ = ("__weakref__",)
@abstractmethod
- def new_runner(self) -> AbstractRunner:
+ def new_runner(self) -> Runner:
+ """
+ Returns an asynchronous function runner.
+
+ Returns:
+ A :class:`Runner` context.
+ """
raise NotImplementedError
def bootstrap(self, coro_func: Callable[..., Coroutine[Any, Any, _T]], *args: Any) -> _T:
+ """
+ Runs an async function, and returns the result.
+
+ Equivalent to::
+
+ with backend.new_runner() as runner:
+ return runner.run(coro_func, *args)
+
+ See :meth:`Runner.run` documentation for details.
+
+ Parameters:
+ coro_func: An async function.
+ args: Positional arguments to be passed to `coro_func`. If you need to pass keyword arguments,
+ then use :func:`functools.partial`.
+
+ Returns:
+ Whatever `coro_func` returns.
+ """
with self.new_runner() as runner:
return runner.run(coro_func, *args)
@abstractmethod
async def coro_yield(self) -> None:
+ """
+ Explicitly introduce a breakpoint to suspend a task.
+
+ This checks for cancellation and allows other tasks to be scheduled, without otherwise blocking.
+
+ Note:
+ The scheduler has the option of ignoring this and continuing to run the current task
+ if it decides this is appropriate (e.g. for increased efficiency).
+ """
raise NotImplementedError
@abstractmethod
async def cancel_shielded_coro_yield(self) -> None:
+ """
+ Introduce a schedule point, but not a cancel point.
+
+ Equivalent to (but probably more efficient than)::
+
+ await backend.ignore_cancellation(backend.coro_yield())
+ """
raise NotImplementedError
@abstractmethod
def get_cancelled_exc_class(self) -> type[BaseException]:
+ """
+ Returns the current async library's cancellation exception class.
+
+ Returns:
+ An exception class.
+ """
raise NotImplementedError
@abstractmethod
async def ignore_cancellation(self, coroutine: Coroutine[Any, Any, _T_co]) -> _T_co:
+ """
+ Protect a :term:`coroutine` from being cancelled.
+
+ The statement::
+
+ res = await backend.ignore_cancellation(something())
+
+ is equivalent to::
+
+ res = await something()
+
+ `except` that if the coroutine containing it is cancelled, the Task running in ``something()`` is not cancelled.
+
+ Important:
+ Depending on the implementation, the coroutine may or may not be executed in the same :class:`contextvars.Context`.
+
+ """
raise NotImplementedError
@abstractmethod
- def timeout(self, delay: float) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def timeout(self, delay: float) -> AbstractAsyncContextManager[TimeoutHandle]:
+ """
+ Returns an :term:`asynchronous context manager` that can be used to limit the amount of time spent waiting on something.
+
+ This function and :meth:`move_on_after` are similar in that both create a context manager with a given timeout,
+ and if the timeout expires then both will cause ``backend.get_cancelled_exc_class()`` to be raised within the scope.
+ The difference is that when the exception reaches :meth:`move_on_after`, it is caught and discarded. When it reaches
+ :meth:`timeout`, then it is caught and :exc:`TimeoutError` is raised in its place.
+
+ Parameters:
+ delay: number of seconds to wait.
+
+ Returns:
+ an :term:`asynchronous context manager`
+ """
raise NotImplementedError
@abstractmethod
- def timeout_at(self, deadline: float) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def timeout_at(self, deadline: float) -> AbstractAsyncContextManager[TimeoutHandle]:
+ """
+ Returns an :term:`asynchronous context manager` that can be used to limit the amount of time spent waiting on something.
+
+ This function and :meth:`move_on_at` are similar in that both create a context manager with a given timeout,
+ and if the timeout expires then both will cause ``backend.get_cancelled_exc_class()`` to be raised within the scope.
+ The difference is that when the exception reaches :meth:`move_on_at`, it is caught and discarded. When it reaches
+ :meth:`timeout_at`, then it is caught and :exc:`TimeoutError` is raised in its place.
+
+ Parameters:
+ deadline: absolute time to stop waiting.
+
+ Returns:
+ an :term:`asynchronous context manager`
+ """
raise NotImplementedError
@abstractmethod
- def move_on_after(self, delay: float) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def move_on_after(self, delay: float) -> AbstractAsyncContextManager[TimeoutHandle]:
+ """
+ Returns an :term:`asynchronous context manager` that can be used to limit the amount of time spent waiting on something.
+ The deadline is set to now + `delay`.
+
+ Example::
+
+ async def long_running_operation(backend):
+ await backend.sleep(3600) # 1 hour
+
+ async def main():
+ ...
+
+ async with backend.move_on_after(10):
+ await long_running_operation(backend)
+
+ print("After at most 10 seconds.")
+
+ If ``long_running_operation`` takes more than 10 seconds to complete, the context manager will cancel the current task
+ and handle the resulting ``backend.get_cancelled_exc_class()`` exception internally.
+
+ Parameters:
+ delay: number of seconds to wait. If `delay` is :data:`math.inf`,
+ no time limit will be applied; this can be useful if the delay is unknown when the context manager is created.
+ In either case, the context manager can be rescheduled after creation using :meth:`TimeoutHandle.reschedule`.
+
+ Returns:
+ an :term:`asynchronous context manager`
+ """
raise NotImplementedError
@abstractmethod
- def move_on_at(self, deadline: float) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def move_on_at(self, deadline: float) -> AbstractAsyncContextManager[TimeoutHandle]:
+ """
+ Similar to :meth:`move_on_after`, except `deadline` is the absolute time to stop waiting, or :data:`math.inf`.
+
+ Example::
+
+ async def long_running_operation(backend):
+ await backend.sleep(3600) # 1 hour
+
+ async def main():
+ ...
+
+ deadline = backend.current_time() + 10
+ async with backend.move_on_at(deadline):
+ await long_running_operation(backend)
+
+ print("After at most 10 seconds.")
+
+ Parameters:
+ deadline: absolute time to stop waiting.
+
+ Returns:
+ an :term:`asynchronous context manager`
+ """
raise NotImplementedError
@abstractmethod
def current_time(self) -> float:
+ """
+ Returns the current time according to the scheduler clock.
+
+ Returns:
+ The current time.
+ """
raise NotImplementedError
@abstractmethod
async def sleep(self, delay: float) -> None:
+ """
+ Pause execution of the current task for the given number of seconds.
+
+ Parameters:
+ delay: The number of seconds to sleep. May be zero to insert a checkpoint without actually blocking.
+
+ Raises:
+ ValueError: if `delay` is negative or NaN.
+ """
raise NotImplementedError
@abstractmethod
async def sleep_forever(self) -> NoReturn:
+ """
+ Pause execution of the current task forever (or at least until cancelled).
+
+ Equivalent to (but probably more efficient than)::
+
+ await backend.sleep(math.inf)
+ """
raise NotImplementedError
async def sleep_until(self, deadline: float) -> None:
+ """
+ Pause execution of the current task until the given time.
+
+ The difference between :meth:`sleep` and :meth:`sleep_until` is that the former takes a relative time and the latter
+ takes an absolute time (as returned by :meth:`current_time`).
+
+ Parameters:
+ deadline: The time at which we should wake up again. May be in the past, in which case this function
+ executes a checkpoint but does not block.
+ """
return await self.sleep(max(deadline - self.current_time(), 0))
@abstractmethod
def spawn_task(
self,
- coro_func: Callable[_P, Coroutine[Any, Any, _T]],
+ coro_func: Callable[..., Coroutine[Any, Any, _T]],
/,
- *args: _P.args,
- **kwargs: _P.kwargs,
- ) -> AbstractSystemTask[_T]:
+ *args: Any,
+ context: contextvars.Context | None = ...,
+ ) -> SystemTask[_T]:
+ """
+ Starts a new "system" task.
+
+ It is a background task that runs concurrently with the current root task.
+
+ Parameters:
+ coro_func: An async function.
+ args: Positional arguments to be passed to `coro_func`. If you need to pass keyword arguments,
+ then use :func:`functools.partial`.
+ context: If given, it must be a :class:`contextvars.Context` instance in which the coroutine should be executed.
+ If the framework does not support contexts (or does not use them), it must simply ignore this parameter.
+
+ Returns:
+ the created task.
+ """
raise NotImplementedError
@abstractmethod
- def create_task_group(self) -> AbstractTaskGroup:
+ def create_task_group(self) -> TaskGroup:
+ """
+ Creates a task group.
+
+ The most common use is as an :term:`asynchronous context manager`::
+
+ async with backend.create_task_group() as task_group:
+ ...
+
+ Returns:
+ A new task group.
+ """
raise NotImplementedError
@abstractmethod
@@ -387,36 +1036,115 @@ async def create_tcp_connection(
*,
local_address: tuple[str, int] | None = ...,
happy_eyeballs_delay: float | None = ...,
- ) -> AbstractAsyncStreamSocketAdapter:
+ ) -> AsyncStreamSocketAdapter:
+ """
+ Opens a connection using the TCP/IP protocol.
+
+ Parameters:
+ host: The host IP/domain name.
+ port: Port of connection.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ happy_eyeballs_delay: If given, is the "Connection Attempt Delay" as defined in :rfc:`8305`.
+
+ Raises:
+ ConnectionError: Cannot connect to `host` with the given `port`.
+ OSError: unrelated OS error occurred.
+
+ Returns:
+ A stream socket.
+ """
raise NotImplementedError
async def create_ssl_over_tcp_connection(
self,
host: str,
port: int,
- ssl_context: _ssl.SSLContext,
+ ssl_context: _typing_ssl.SSLContext,
*,
server_hostname: str | None,
ssl_handshake_timeout: float,
ssl_shutdown_timeout: float,
local_address: tuple[str, int] | None = ...,
happy_eyeballs_delay: float | None = ...,
- ) -> AbstractAsyncStreamSocketAdapter:
+ ) -> AsyncStreamSocketAdapter:
+ """
+ Opens an SSL/TLS stream connection on top of the TCP/IP protocol.
+
+ Parameters:
+ host: The host IP/domain name.
+ port: Port of connection.
+ ssl_context: TLS connection configuration (see :mod:`ssl` module).
+ server_hostname: sets or overrides the hostname that the target server's certificate will be matched against.
+ By default, `host` is used.
+ ssl_handshake_timeout: the time in seconds to wait for the TLS handshake to complete.
+ ssl_shutdown_timeout: the time in seconds to wait for the SSL shutdown to complete before aborting the connection.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ happy_eyeballs_delay: If given, is the "Connection Attempt Delay" as defined in :rfc:`8305`.
+
+ Raises:
+ ConnectionError: Cannot connect to `host` with the given `port`.
+ ssl.SSLError: Error in the TLS handshake (invalid certificate, ciphers, etc.).
+ OSError: unrelated OS error occurred.
+
+ Returns:
+ A stream socket.
+ """
raise NotImplementedError("SSL/TLS is not supported by this backend")
@abstractmethod
- async def wrap_tcp_client_socket(self, socket: _socket.socket) -> AbstractAsyncStreamSocketAdapter:
+ async def wrap_tcp_client_socket(self, socket: _socket.socket) -> AsyncStreamSocketAdapter:
+ """
+ Wraps an already connected over TCP/IP protocol socket into an asynchronous stream socket.
+
+ Important:
+ The returned stream socket takes the ownership of `socket`.
+
+ You should use :meth:`AsyncStreamSocketAdapter.aclose` to close the socket.
+
+ Parameters:
+ socket: The socket to wrap.
+
+ Raises:
+ ValueError: Invalid socket type or family.
+
+ Returns:
+ A stream socket.
+ """
raise NotImplementedError
async def wrap_ssl_over_tcp_client_socket(
self,
socket: _socket.socket,
- ssl_context: _ssl.SSLContext,
+ ssl_context: _typing_ssl.SSLContext,
*,
server_hostname: str,
ssl_handshake_timeout: float,
ssl_shutdown_timeout: float,
- ) -> AbstractAsyncStreamSocketAdapter:
+ ) -> AsyncStreamSocketAdapter:
+ """
+ Wraps an already connected over TCP/IP protocol socket into an asynchronous stream socket in a SSL/TLS context.
+
+ Important:
+ The returned stream socket takes the ownership of `socket`.
+
+ You should use :meth:`AsyncStreamSocketAdapter.aclose` to close the socket.
+
+ Parameters:
+ socket: The socket to wrap.
+ ssl_context: TLS connection configuration (see :mod:`ssl` module).
+ server_hostname: sets the hostname that the target server's certificate will be matched against.
+ ssl_handshake_timeout: the time in seconds to wait for the TLS handshake to complete.
+ ssl_shutdown_timeout: the time in seconds to wait for the SSL shutdown to complete before aborting the connection.
+
+ Raises:
+ ConnectionError: TLS handshake failed to connect to the remote.
+ ssl.SSLError: Error in the TLS handshake (invalid certificate, ciphers, etc.).
+ OSError: unrelated OS error occurred.
+ ValueError: Invalid socket type or family.
+
+ Returns:
+ A stream socket.
+ """
raise NotImplementedError("SSL/TLS is not supported by this backend")
@abstractmethod
@@ -427,7 +1155,33 @@ async def create_tcp_listeners(
backlog: int,
*,
reuse_port: bool = ...,
- ) -> Sequence[AbstractAsyncListenerSocketAdapter]:
+ ) -> Sequence[AsyncListenerSocketAdapter]:
+ """
+ Opens listener sockets for TCP connections.
+
+ Parameters:
+ host: Can be set to several types which determine where the server would be listening:
+
+ * If `host` is a string, the TCP server is bound to a single network interface specified by `host`.
+
+ * If `host` is a sequence of strings, the TCP server is bound to all network interfaces specified by the sequence.
+
+ * If `host` is :data:`None`, all interfaces are assumed and a list of multiple sockets will be returned
+ (most likely one for IPv4 and another one for IPv6).
+ port: specify which port the server should listen on. If the value is ``0``, a random unused port will be selected
+ (note that if `host` resolves to multiple network interfaces, a different random port will be selected
+ for each interface).
+ backlog: is the maximum number of queued connections passed to :class:`~socket.socket.listen` (defaults to ``100``).
+ reuse_port: tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints
+ are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows.
+
+ Raises:
+ OSError: unrelated OS error occurred.
+
+ Returns:
+ A sequence of listener sockets.
+ """
raise NotImplementedError
async def create_ssl_over_tcp_listeners(
@@ -435,12 +1189,43 @@ async def create_ssl_over_tcp_listeners(
host: str | Sequence[str] | None,
port: int,
backlog: int,
- ssl_context: _ssl.SSLContext,
+ ssl_context: _typing_ssl.SSLContext,
*,
ssl_handshake_timeout: float,
ssl_shutdown_timeout: float,
reuse_port: bool = ...,
- ) -> Sequence[AbstractAsyncListenerSocketAdapter]:
+ ) -> Sequence[AsyncListenerSocketAdapter]:
+ """
+ Opens listener sockets for TCP connections.
+
+ Parameters:
+ host: Can be set to several types which determine where the server would be listening:
+
+ * If `host` is a string, the TCP server is bound to a single network interface specified by `host`.
+
+ * If `host` is a sequence of strings, the TCP server is bound to all network interfaces specified by the sequence.
+
+ * If `host` is :data:`None`, all interfaces are assumed and a list of multiple sockets will be returned
+ (most likely one for IPv4 and another one for IPv6).
+ port: specify which port the server should listen on. If the value is ``0``, a random unused port will be selected
+ (note that if `host` resolves to multiple network interfaces, a different random port will be selected
+ for each interface).
+ backlog: is the maximum number of queued connections passed to :class:`~socket.socket.listen` (defaults to ``100``).
+ ssl: can be set to an :class:`ssl.SSLContext` instance to enable TLS over the accepted connections.
+ ssl_handshake_timeout: (for a TLS connection) the time in seconds to wait for the TLS handshake to complete
+ before aborting the connection. ``60.0`` seconds if :data:`None` (default).
+ ssl_shutdown_timeout: the time in seconds to wait for the SSL shutdown to complete before aborting the connection.
+ ``30.0`` seconds if :data:`None` (default).
+ reuse_port: tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints
+ are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows.
+
+ Raises:
+ OSError: unrelated OS error occurred.
+
+ Returns:
+ A sequence of listener sockets.
+ """
raise NotImplementedError("SSL/TLS is not supported by this backend")
@abstractmethod
@@ -450,33 +1235,151 @@ async def create_udp_endpoint(
local_address: tuple[str, int] | None = ...,
remote_address: tuple[str, int] | None = ...,
reuse_port: bool = ...,
- ) -> AbstractAsyncDatagramSocketAdapter:
+ ) -> AsyncDatagramSocketAdapter:
+ """
+ Opens an endpoint using the UDP protocol.
+
+ Parameters:
+ remote_address: If given, is a ``(host, port)`` tuple used to connect the socket.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ reuse_port: If :data:`True`, sets the :data:`~socket.SO_REUSEPORT` socket option if supported.
+
+ Raises:
+ OSError: unrelated OS error occurred.
+
+ Returns:
+ A datagram socket.
+ """
raise NotImplementedError
@abstractmethod
- async def wrap_udp_socket(self, socket: _socket.socket) -> AbstractAsyncDatagramSocketAdapter:
+ async def wrap_udp_socket(self, socket: _socket.socket) -> AsyncDatagramSocketAdapter:
+ """
+ Wraps an already open UDP socket into an asynchronous datagram socket.
+
+ Important:
+ The returned stream socket takes the ownership of `socket`.
+
+ You should use :meth:`AsyncDatagramSocketAdapter.aclose` to close the socket.
+
+ Parameters:
+ socket: The socket to wrap.
+
+ Raises:
+ ValueError: Invalid socket type or family.
+
+ Returns:
+ A datagram socket.
+ """
raise NotImplementedError
@abstractmethod
def create_lock(self) -> ILock:
+ """
+ Creates a Lock object for inter-task synchronization.
+
+ Returns:
+ A new Lock.
+ """
raise NotImplementedError
@abstractmethod
def create_event(self) -> IEvent:
+ """
+ Creates an Event object for inter-task synchronization.
+
+ Returns:
+ A new Event.
+ """
raise NotImplementedError
@abstractmethod
def create_condition_var(self, lock: ILock | None = ...) -> ICondition:
+ """
+ Creates a Condition variable object for inter-task synchronization.
+
+ Parameters:
+ lock: If given, it must be a lock created by :meth:`create_lock`. Otherwise a new Lock object is created automatically.
+
+ Returns:
+ A new Condition.
+ """
raise NotImplementedError
@abstractmethod
async def run_in_thread(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
+ """
+ Executes a synchronous function in a worker thread.
+
+ This is useful to execute a long-running (or temporarily blocking) function and let other tasks run.
+
+ From inside the worker thread, you can get back into the scheduler loop using a :class:`ThreadsPortal`.
+ See :meth:`create_threads_portal` for details.
+
+ Cancellation handling:
+ Because there is no way to "cancel" an arbitrary function call in an OS thread,
+ once the job is started, any cancellation requests will be discarded.
+
+ Warning:
+ Due to the current coroutine implementation, `func` should not raise a :exc:`StopIteration`.
+ This can lead to unexpected (and unwanted) behavior.
+
+ Parameters:
+ func: A synchronous function.
+ args: Positional arguments to be passed to `func`.
+ kwargs: Keyword arguments to be passed to `func`.
+
+ Raises:
+ Exception: Whatever ``func(*args, **kwargs)`` raises.
+
+ Returns:
+ Whatever ``func(*args, **kwargs)`` returns.
+ """
raise NotImplementedError
@abstractmethod
- def create_threads_portal(self) -> AbstractThreadsPortal:
+ def create_threads_portal(self) -> ThreadsPortal:
+ """
+ Creates a portal for executing functions in the event loop thread for use in external threads.
+
+ Use this function in asynchronous code when you need to allow external threads access to the event loop
+ where your asynchronous code is currently running.
+
+ Raises:
+ RuntimeError: not called in the event loop thread.
+
+ Returns:
+ a new thread portal.
+ """
raise NotImplementedError
@abstractmethod
async def wait_future(self, future: concurrent.futures.Future[_T_co]) -> _T_co:
+ """
+ Blocks until the future is done, and returns the result.
+
+ Cancellation handling:
+ In the case of cancellation, the rules follows what :class:`concurrent.futures.Future` defines:
+
+ * :meth:`wait_future` tries to cancel the given `future` (using :meth:`concurrent.futures.Future.cancel`)
+
+ * If the future has been effectively cancelled, the cancellation request is "accepted" and propagated.
+
+ * Otherwise, the cancellation request is "rejected" and discarded.
+ :meth:`wait_future` will block until `future` is done, and will ignore any further cancellation request.
+
+ * A coroutine awaiting a `future` in ``running`` state (:meth:`concurrent.futures.Future.running` returns :data:`True`)
+ cannot be cancelled.
+
+ Parameters:
+ future: The :class:`~concurrent.futures.Future` object to wait for.
+
+ Raises:
+ concurrent.futures.CancelledError: the future has been unexpectedly cancelled by an external code
+ (typically :meth:`concurrent.futures.Executor.shutdown`).
+ Exception: If ``future.exception()`` does not return :data:`None`, this exception is raised.
+
+ Returns:
+ Whatever returns ``future.result()``
+ """
raise NotImplementedError
diff --git a/src/easynetwork/api_async/backend/factory.py b/src/easynetwork/api_async/backend/factory.py
index 85147692..5130aa08 100644
--- a/src/easynetwork/api_async/backend/factory.py
+++ b/src/easynetwork/api_async/backend/factory.py
@@ -1,9 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""
-Asynchronous client/server module
-"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Asynchronous backend engine factory module"""
from __future__ import annotations
@@ -16,7 +25,7 @@
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, Final, final
-from .abc import AbstractAsyncBackend
+from .abc import AsyncBackend
from .sniffio import current_async_library as _sniffio_current_async_library
if TYPE_CHECKING:
@@ -26,12 +35,12 @@
@final
class AsyncBackendFactory:
GROUP_NAME: Final[str] = "easynetwork.async.backends"
- __BACKEND: str | type[AbstractAsyncBackend] | None = None
- __BACKEND_EXTENSIONS: Final[dict[str, type[AbstractAsyncBackend]]] = {}
+ __BACKEND: str | type[AsyncBackend] | None = None
+ __BACKEND_EXTENSIONS: Final[dict[str, type[AsyncBackend]]] = {}
@staticmethod
- def get_default_backend(guess_current_async_library: bool = True) -> type[AbstractAsyncBackend]:
- backend: str | type[AbstractAsyncBackend] | None = AsyncBackendFactory.__BACKEND
+ def get_default_backend(guess_current_async_library: bool = True) -> type[AsyncBackend]:
+ backend: str | type[AsyncBackend] | None = AsyncBackendFactory.__BACKEND
if isinstance(backend, type):
return backend
if backend is None:
@@ -46,9 +55,9 @@ def get_default_backend(guess_current_async_library: bool = True) -> type[Abstra
)
@staticmethod
- def set_default_backend(backend: str | type[AbstractAsyncBackend] | None) -> None:
+ def set_default_backend(backend: str | type[AsyncBackend] | None) -> None:
match backend:
- case type() if not issubclass(backend, AbstractAsyncBackend) or inspect.isabstract(backend):
+ case type() if not issubclass(backend, AsyncBackend) or inspect.isabstract(backend):
raise TypeError(f"Invalid backend class: {backend!r}")
case type() | None:
pass
@@ -60,7 +69,7 @@ def set_default_backend(backend: str | type[AbstractAsyncBackend] | None) -> Non
AsyncBackendFactory.__BACKEND = backend
@staticmethod
- def extend(backend_name: str, backend_cls: type[AbstractAsyncBackend] | None) -> None:
+ def extend(backend_name: str, backend_cls: type[AsyncBackend] | None) -> None:
default_backend_cls = AsyncBackendFactory.__get_backend_cls(backend_name, extended=False)
if backend_cls is None or backend_cls is default_backend_cls:
AsyncBackendFactory.__BACKEND_EXTENSIONS.pop(backend_name, None)
@@ -70,8 +79,8 @@ def extend(backend_name: str, backend_cls: type[AbstractAsyncBackend] | None) ->
AsyncBackendFactory.__BACKEND_EXTENSIONS[backend_name] = backend_cls
@staticmethod
- def new(backend: str | None = None, /, **kwargs: Any) -> AbstractAsyncBackend:
- backend_cls: type[AbstractAsyncBackend]
+ def new(backend: str | None = None, /, **kwargs: Any) -> AsyncBackend:
+ backend_cls: type[AsyncBackend]
if backend is None:
backend_cls = AsyncBackendFactory.get_default_backend(guess_current_async_library=True)
else:
@@ -79,15 +88,15 @@ def new(backend: str | None = None, /, **kwargs: Any) -> AbstractAsyncBackend:
return backend_cls(**kwargs)
@staticmethod
- def ensure(backend: str | AbstractAsyncBackend | None, kwargs: Mapping[str, Any] | None = None) -> AbstractAsyncBackend:
- if not isinstance(backend, AbstractAsyncBackend):
+ def ensure(backend: str | AsyncBackend | None, kwargs: Mapping[str, Any] | None = None) -> AsyncBackend:
+ if not isinstance(backend, AsyncBackend):
if kwargs is None:
kwargs = {}
backend = AsyncBackendFactory.new(backend, **kwargs)
return backend
@staticmethod
- def get_all_backends(*, extended: bool = True) -> MappingProxyType[str, type[AbstractAsyncBackend]]:
+ def get_all_backends(*, extended: bool = True) -> MappingProxyType[str, type[AsyncBackend]]:
backends = {
name: AsyncBackendFactory.__get_backend_cls(name, extended=extended)
for name in AsyncBackendFactory.__get_available_backends()
@@ -114,7 +123,7 @@ def __get_backend_cls(
error_msg_format: str = "Unknown backend {name!r}",
*,
extended: bool,
- ) -> type[AbstractAsyncBackend]:
+ ) -> type[AsyncBackend]:
if extended:
try:
return AsyncBackendFactory.__BACKEND_EXTENSIONS[name]
@@ -127,13 +136,13 @@ def __get_backend_cls(
@staticmethod
@functools.cache
- def __load_backend_cls_from_entry_point(name: str) -> type[AbstractAsyncBackend]:
+ def __load_backend_cls_from_entry_point(name: str) -> type[AsyncBackend]:
entry_point: EntryPoint = AsyncBackendFactory.__get_available_backends()[name]
entry_point_cls: Any = entry_point.load()
if (
not isinstance(entry_point_cls, type)
- or not issubclass(entry_point_cls, AbstractAsyncBackend)
+ or not issubclass(entry_point_cls, AsyncBackend)
or inspect.isabstract(entry_point_cls)
):
raise TypeError(f"Invalid backend entry point (name={name!r}): {entry_point_cls!r}")
diff --git a/src/easynetwork/api_async/backend/futures.py b/src/easynetwork/api_async/backend/futures.py
index 64abf17e..18f4dea4 100644
--- a/src/easynetwork/api_async/backend/futures.py
+++ b/src/easynetwork/api_async/backend/futures.py
@@ -1,36 +1,95 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""
-Asynchronous client/server module
-"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Asynchronous backend engine bindings with concurrent.futures module"""
from __future__ import annotations
-__all__ = ["AsyncExecutor", "AsyncThreadPoolExecutor"]
+__all__ = ["AsyncExecutor"]
import concurrent.futures
import contextvars
-from collections.abc import Callable
-from typing import TYPE_CHECKING, Any, ParamSpec, Self, TypeVar, overload
+import functools
+from collections.abc import Callable, Mapping
+from typing import TYPE_CHECKING, Any, ParamSpec, Self, TypeVar
+from .factory import AsyncBackendFactory
from .sniffio import current_async_library_cvar as _sniffio_current_async_library_cvar
if TYPE_CHECKING:
from types import TracebackType
- from .abc import AbstractAsyncBackend
+ from .abc import AsyncBackend
_P = ParamSpec("_P")
_T = TypeVar("_T")
class AsyncExecutor:
- __slots__ = ("__backend", "__executor", "__weakref__")
+ """
+ Wraps a :class:`concurrent.futures.Executor` instance.
+
+
+ For example, this code::
+
+ from concurrent.futures import ProcessPoolExecutor, wait
+
+ def main() -> None:
+ with ProcessPoolExecutor() as executor:
+ futures = [executor.submit(pow, a, b) for a, b in [(3, 4), (12, 2), (6, 8)]]
+ wait(futures)
+ results = [f.result() for f in futures]
+
+ can be converted to::
+
+ from concurrent.futures import ProcessPoolExecutor
- def __init__(self, backend: AbstractAsyncBackend, executor: concurrent.futures.Executor) -> None:
- self.__backend: AbstractAsyncBackend = backend
+ async def main() -> None:
+ ...
+
+ async with AsyncExecutor(ProcessPoolExecutor()) as executor:
+ async with backend.create_task_group() as task_group:
+ tasks = [task_group.start_soon(executor.run, pow, a, b) for a, b in [(3, 4), (12, 2), (6, 8)]]
+ results = [await t.join() for t in tasks]
+ """
+
+ __slots__ = ("__backend", "__executor", "__handle_contexts", "__weakref__")
+
+ def __init__(
+ self,
+ executor: concurrent.futures.Executor,
+ backend: str | AsyncBackend | None = None,
+ backend_kwargs: Mapping[str, Any] | None = None,
+ *,
+ handle_contexts: bool = False,
+ ) -> None:
+ """
+ Parameters:
+ executor: The executor instance to wrap.
+ backend: the backend to use. Automatically determined otherwise.
+ backend_kwargs: Keyword arguments for backend instanciation.
+ Ignored if `backend` is already an :class:`.AsyncBackend` instance.
+ handle_contexts: If :data:`True`, contexts (:class:`contextvars.Context`) are properly propagated to workers.
+ Defaults to :data:`False` because not all executors support the use of contexts
+ (e.g. :class:`concurrent.futures.ProcessPoolExecutor`).
+ """
+ if not isinstance(executor, concurrent.futures.Executor):
+ raise TypeError("Invalid executor type")
+
+ self.__backend: AsyncBackend = AsyncBackendFactory.ensure(backend, backend_kwargs)
self.__executor: concurrent.futures.Executor = executor
+ self.__handle_contexts: bool = bool(handle_contexts)
async def __aenter__(self) -> Self:
return self
@@ -41,51 +100,77 @@ async def __aexit__(
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
+ """Calls :meth:`shutdown`."""
await self.shutdown()
async def run(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
- try:
- return await self.__backend.wait_future(self.__executor.submit(func, *args, **kwargs))
- finally:
- del func, args, kwargs
+ """
+ Executes ``func(*args, **kwargs)`` in the executor, blocking until it is complete.
+
+ Example::
+
+ async with AsyncExecutor(ThreadPoolExecutor(max_workers=1)) as executor:
+ result = await executor.run(pow, 323, 1235)
+
+ Warning:
+ Due to the current coroutine implementation, `func` should not raise a :exc:`StopIteration`.
+ This can lead to unexpected (and unwanted) behavior.
+
+ Parameters:
+ func: A synchronous function.
+ args: Positional arguments to be passed to `func`.
+ kwargs: Keyword arguments to be passed to `func`.
+
+ Raises:
+ RuntimeError: if the executor is closed.
+ concurrent.futures.CancelledError: if the executor is shutting down and pending task has been cancelled.
+ Exception: Whatever raises ``func(*args, **kwargs)``.
+
+ Returns:
+ Whatever returns ``func(*args, **kwargs)``.
+ """
+ func = self._setup_func(func)
+ executor = self.__executor
+ backend = self.__backend
+ return await backend.wait_future(executor.submit(func, *args, **kwargs))
def shutdown_nowait(self, *, cancel_futures: bool = False) -> None:
- self.__executor.shutdown(wait=False, cancel_futures=cancel_futures)
+ """
+ Signal the executor that it should free any resources that it is using when the currently pending futures
+ are done executing.
- async def shutdown(self, *, cancel_futures: bool = False) -> None:
- await self.__backend.run_in_thread(self.__executor.shutdown, wait=True, cancel_futures=cancel_futures)
+ Calls to :meth:`AsyncExecutor.run` made after shutdown will raise :exc:`RuntimeError`.
+ Parameters:
+ cancel_futures: If :data:`True`, this method will cancel all pending futures that the executor
+ has not started running. Any futures that are completed or running won't be cancelled,
+ regardless of the value of `cancel_futures`.
+ """
+ self.__executor.shutdown(wait=False, cancel_futures=cancel_futures)
-class AsyncThreadPoolExecutor(AsyncExecutor):
- __slots__ = ()
+ async def shutdown(self, *, cancel_futures: bool = False) -> None:
+ """
+ Signal the executor that it should free any resources that it is using when the currently pending futures
+ are done executing.
- @overload
- def __init__(self, backend: AbstractAsyncBackend) -> None:
- ...
+ Calls to :meth:`AsyncExecutor.run` made after shutdown will raise :exc:`RuntimeError`.
- @overload
- def __init__(
- self,
- backend: AbstractAsyncBackend,
- *,
- max_workers: int | None = ...,
- thread_name_prefix: str = ...,
- initializer: Callable[..., object] | None = ...,
- initargs: tuple[Any, ...] = ...,
- **kwargs: Any,
- ) -> None:
- ...
+ This method will block until all the pending futures are done executing and
+ the resources associated with the executor have been freed.
- def __init__(self, backend: AbstractAsyncBackend, **kwargs: Any) -> None:
- super().__init__(backend, concurrent.futures.ThreadPoolExecutor(**kwargs))
+ Parameters:
+ cancel_futures: If :data:`True`, this method will cancel all pending futures that the executor
+ has not started running. Any futures that are completed or running won't be cancelled,
+ regardless of the value of `cancel_futures`.
+ """
+ await self.__backend.run_in_thread(self.__executor.shutdown, wait=True, cancel_futures=cancel_futures)
- async def run(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
- ctx = contextvars.copy_context()
+ def _setup_func(self, func: Callable[_P, _T]) -> Callable[_P, _T]:
+ if self.__handle_contexts:
+ ctx = contextvars.copy_context()
- if _sniffio_current_async_library_cvar is not None:
- ctx.run(_sniffio_current_async_library_cvar.set, None)
+ if _sniffio_current_async_library_cvar is not None:
+ ctx.run(_sniffio_current_async_library_cvar.set, None)
- try:
- return await super().run(ctx.run, func, *args, **kwargs) # type: ignore[arg-type]
- finally:
- del func, args, kwargs
+ func = functools.partial(ctx.run, func) # type: ignore[assignment]
+ return func
diff --git a/src/easynetwork/api_async/backend/sniffio.py b/src/easynetwork/api_async/backend/sniffio.py
index ced1200b..a8b1008b 100644
--- a/src/easynetwork/api_async/backend/sniffio.py
+++ b/src/easynetwork/api_async/backend/sniffio.py
@@ -1,9 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""
-Asynchronous client/server module
-"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Helper module for sniffio integration"""
from __future__ import annotations
diff --git a/src/easynetwork/api_async/backend/tasks.py b/src/easynetwork/api_async/backend/tasks.py
index 244698ed..4cb513a8 100644
--- a/src/easynetwork/api_async/backend/tasks.py
+++ b/src/easynetwork/api_async/backend/tasks.py
@@ -1,19 +1,29 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""
-Asynchronous client/server module
-"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Task utilities module"""
from __future__ import annotations
__all__ = ["SingleTaskRunner"]
+import functools
from collections.abc import Callable, Coroutine
from typing import TYPE_CHECKING, Any, Generic, ParamSpec, TypeVar
if TYPE_CHECKING:
- from .abc import AbstractAsyncBackend, AbstractSystemTask
+ from .abc import AsyncBackend, SystemTask
_P = ParamSpec("_P")
@@ -21,6 +31,29 @@
class SingleTaskRunner(Generic[_T_co]):
+ """
+ An helper class to execute a coroutine function only once.
+
+ In addition to one-time execution, concurrent calls will simply wait for the result::
+
+ async def expensive_task():
+ print("Start expensive task")
+
+ ...
+
+ print("Done")
+ return 42
+
+ async def main():
+ ...
+
+ task_runner = SingleTaskRunner(backend, expensive_task)
+ async with backend.create_task_group() as task_group:
+ tasks = [task_group.start_soon(task_runner.run) for _ in range(10)]
+
+ assert all(await t.join() == 42 for t in tasks)
+ """
+
__slots__ = (
"__backend",
"__coro_func",
@@ -30,40 +63,66 @@ class SingleTaskRunner(Generic[_T_co]):
def __init__(
self,
- __backend: AbstractAsyncBackend,
- __coro_func: Callable[_P, Coroutine[Any, Any, _T_co]],
+ backend: AsyncBackend,
+ coro_func: Callable[_P, Coroutine[Any, Any, _T_co]],
/,
*args: _P.args,
**kwargs: _P.kwargs,
) -> None:
+ """
+ Parameters:
+ backend: The asynchronous backend interface.
+ coro_func: An async function.
+ args: Positional arguments to be passed to `coro_func`.
+ kwargs: Keyword arguments to be passed to `coro_func`.
+ """
super().__init__()
- self.__backend: AbstractAsyncBackend = __backend
- self.__coro_func: tuple[Callable[..., Coroutine[Any, Any, _T_co]], tuple[Any, ...], dict[str, Any]] | None = (
- __coro_func,
- args,
- kwargs,
+ self.__backend: AsyncBackend = backend
+ self.__coro_func: Callable[[], Coroutine[Any, Any, _T_co]] | None = functools.partial(
+ coro_func,
+ *args,
+ **kwargs,
)
- self.__task: AbstractSystemTask[_T_co] | None = None
+ self.__task: SystemTask[_T_co] | None = None
def cancel(self) -> bool:
+ """
+ Cancel coroutine execution.
+
+ If the runner was not used yet, :meth:`run` will not call `coro_func` and raise ``backend.get_cancelled_exc_class()``.
+
+ If `coro_func` is already running, a cancellation request is sent to the coroutine.
+
+ Returns:
+ :data:`True` in case of success, :data:`False` otherwise.
+ """
self.__coro_func = None
if self.__task is not None:
return self.__task.cancel()
return True
async def run(self) -> _T_co:
+ """
+ Executes the coroutine `coro_func`.
+
+ Raises:
+ Exception: Whatever ``coro_func`` raises.
+
+ Returns:
+ Whatever ``coro_func`` returns.
+ """
must_cancel_inner_task: bool = False
if self.__task is None:
+ must_cancel_inner_task = True
if self.__coro_func is None:
self.__task = self.__backend.spawn_task(self.__backend.sleep_forever)
self.__task.cancel()
else:
- coro_func, args, kwargs = self.__coro_func
+ coro_func = self.__coro_func
self.__coro_func = None
- self.__task = self.__backend.spawn_task(coro_func, *args, **kwargs)
- del coro_func, args, kwargs
- must_cancel_inner_task = True
+ self.__task = self.__backend.spawn_task(coro_func)
+ del coro_func
try:
if must_cancel_inner_task:
diff --git a/src/easynetwork/api_async/client/__init__.py b/src/easynetwork/api_async/client/__init__.py
index a16ca8a3..480a629f 100644
--- a/src/easynetwork/api_async/client/__init__.py
+++ b/src/easynetwork/api_async/client/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network client module"""
diff --git a/src/easynetwork/api_async/client/abc.py b/src/easynetwork/api_async/client/abc.py
index 5ba9b4aa..2a7b79f6 100644
--- a/src/easynetwork/api_async/client/abc.py
+++ b/src/easynetwork/api_async/client/abc.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network client module"""
@@ -7,25 +18,32 @@
__all__ = ["AbstractAsyncNetworkClient"]
+import math
+import time
from abc import ABCMeta, abstractmethod
from collections.abc import AsyncIterator
-from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar
+from typing import TYPE_CHECKING, Any, Generic, Self
+from ..._typevars import _ReceivedPacketT, _SentPacketT
from ...tools.socket import SocketAddress
if TYPE_CHECKING:
from types import TracebackType
- from ..backend.abc import AbstractAsyncBackend
-
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
+ from ..backend.abc import AsyncBackend
class AbstractAsyncNetworkClient(Generic[_SentPacketT, _ReceivedPacketT], metaclass=ABCMeta):
+ """
+ The base class for an asynchronous network client interface.
+ """
+
__slots__ = ("__weakref__",)
async def __aenter__(self) -> Self:
+ """
+ Calls :meth:`wait_connected`.
+ """
await self.wait_connected()
return self
@@ -35,6 +53,9 @@ async def __aexit__(
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
+ """
+ Calls :meth:`aclose`.
+ """
await self.aclose()
def __getstate__(self) -> Any: # pragma: no cover
@@ -42,48 +63,181 @@ def __getstate__(self) -> Any: # pragma: no cover
@abstractmethod
def is_connected(self) -> bool:
+ """
+ Checks if the client initialization is finished.
+
+ See Also:
+ :meth:`wait_connected` method.
+
+ Returns:
+ the client connection state.
+ """
raise NotImplementedError
@abstractmethod
async def wait_connected(self) -> None:
+ """
+ Finishes initializing the client, doing the asynchronous operations that could not be done in the constructor.
+
+ It is not needed to call it directly if the client is used as an :term:`asynchronous context manager`::
+
+ async with client: # wait_connected() has been called.
+ ...
+
+ Warning:
+ In the case of a cancellation, this would leave the client in an inconsistent state.
+
+ It is recommended to close the client in this case.
+
+ Can be safely called multiple times.
+ """
raise NotImplementedError
@abstractmethod
def is_closing(self) -> bool:
+ """
+ Checks if the client is closed or in the process of being closed.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ See Also:
+ :meth:`aclose` method.
+
+ Returns:
+ the client state.
+ """
raise NotImplementedError
@abstractmethod
async def aclose(self) -> None:
+ """
+ Close the client.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Warning:
+ :meth:`aclose` performs a graceful close, waiting for the connection to close.
+
+ If :meth:`aclose` is cancelled, the client is closed abruptly.
+
+ Can be safely called multiple times.
+ """
raise NotImplementedError
@abstractmethod
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address.
+
+ If :meth:`wait_connected` was not called, an :exc:`OSError` may occurr.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's local address.
+ """
raise NotImplementedError
@abstractmethod
def get_remote_address(self) -> SocketAddress:
+ """
+ Returns the remote socket IP address.
+
+ If :meth:`wait_connected` was not called, an :exc:`OSError` may occurr.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's remote address.
+ """
raise NotImplementedError
@abstractmethod
async def send_packet(self, packet: _SentPacketT) -> None:
+ """
+ Sends `packet` to the remote endpoint.
+
+ Warning:
+ In the case of a cancellation, it is impossible to know if all the packet data has been sent.
+ This would leave the connection in an inconsistent state.
+
+ Parameters:
+ packet: the Python object to send.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
raise NotImplementedError
@abstractmethod
async def recv_packet(self) -> _ReceivedPacketT:
+ """
+ Waits for a new packet to arrive from the remote endpoint.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ BaseProtocolParseError: invalid data received.
+
+ Returns:
+ the received packet.
+ """
raise NotImplementedError
- async def iter_received_packets(self) -> AsyncIterator[_ReceivedPacketT]:
+ async def iter_received_packets(self, *, timeout: float | None = 0) -> AsyncIterator[_ReceivedPacketT]:
+ """
+ Returns an :term:`asynchronous iterator` that waits for a new packet to arrive from the remote endpoint.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds; it defaults to zero.
+
+ Important:
+ The `timeout` is for the entire iterator::
+
+ async_iterator = client.iter_received_packets(timeout=10)
+
+ # Let's say that this call took 6 seconds...
+ first_packet = await anext(async_iterator)
+
+ # ...then this call has a maximum of 4 seconds, not 10.
+ second_packet = await anext(async_iterator)
+
+ The time taken outside the iterator object is not decremented to the timeout parameter.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for all the receive operations.
+
+ Yields:
+ the received packet.
+ """
+
+ if timeout is None:
+ timeout = math.inf
+
+ perf_counter = time.perf_counter
+ timeout_after = self.get_backend().timeout
+
while True:
try:
- packet = await self.recv_packet()
+ async with timeout_after(timeout):
+ _start = perf_counter()
+ packet = await self.recv_packet()
+ _end = perf_counter()
except OSError:
return
yield packet
+ timeout -= _end - _start
+ timeout = max(timeout, 0)
@abstractmethod
- def fileno(self) -> int:
- raise NotImplementedError
-
- @abstractmethod
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
raise NotImplementedError
diff --git a/src/easynetwork/api_async/client/tcp.py b/src/easynetwork/api_async/client/tcp.py
index ecd65cff..5d3dfdf3 100644
--- a/src/easynetwork/api_async/client/tcp.py
+++ b/src/easynetwork/api_async/client/tcp.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network client module"""
@@ -11,7 +22,7 @@
import errno as _errno
import socket as _socket
from collections.abc import Callable, Iterator, Mapping
-from typing import TYPE_CHECKING, Any, Generic, NoReturn, TypedDict, TypeVar, cast, final, overload
+from typing import TYPE_CHECKING, Any, NoReturn, TypedDict, final, overload
try:
import ssl as _ssl
@@ -21,6 +32,7 @@
_ssl_module = _ssl
del _ssl
+from ..._typevars import _ReceivedPacketT, _SentPacketT
from ...exceptions import ClientClosedError
from ...protocol import StreamProtocol
from ...tools._stream import StreamDataConsumer
@@ -32,21 +44,13 @@
)
from ...tools.constants import CLOSED_SOCKET_ERRNOS, MAX_STREAM_BUFSIZE, SSL_HANDSHAKE_TIMEOUT, SSL_SHUTDOWN_TIMEOUT
from ...tools.socket import SocketAddress, SocketProxy, new_socket_address, set_tcp_keepalive, set_tcp_nodelay
-from ..backend.abc import (
- AbstractAsyncBackend,
- AbstractAsyncHalfCloseableStreamSocketAdapter,
- AbstractAsyncStreamSocketAdapter,
- ILock,
-)
+from ..backend.abc import AsyncBackend, AsyncHalfCloseableStreamSocketAdapter, AsyncStreamSocketAdapter, ILock
from ..backend.factory import AsyncBackendFactory
from ..backend.tasks import SingleTaskRunner
from .abc import AbstractAsyncNetworkClient
if TYPE_CHECKING:
- from ssl import SSLContext as _SSLContext
-
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
+ import ssl as _typing_ssl
class _ClientInfo(TypedDict):
@@ -55,7 +59,11 @@ class _ClientInfo(TypedDict):
remote_address: SocketAddress
-class AsyncTCPNetworkClient(AbstractAsyncNetworkClient[_SentPacketT, _ReceivedPacketT], Generic[_SentPacketT, _ReceivedPacketT]):
+class AsyncTCPNetworkClient(AbstractAsyncNetworkClient[_SentPacketT, _ReceivedPacketT]):
+ """
+ An asynchronous network client interface for TCP connections.
+ """
+
__slots__ = (
"__socket",
"__backend",
@@ -81,13 +89,13 @@ def __init__(
*,
local_address: tuple[str, int] | None = ...,
happy_eyeballs_delay: float | None = ...,
- ssl: _SSLContext | bool | None = ...,
+ ssl: _typing_ssl.SSLContext | bool | None = ...,
server_hostname: str | None = ...,
ssl_handshake_timeout: float | None = ...,
ssl_shutdown_timeout: float | None = ...,
ssl_shared_lock: bool | None = ...,
max_recv_size: int | None = ...,
- backend: str | AbstractAsyncBackend | None = ...,
+ backend: str | AsyncBackend | None = ...,
backend_kwargs: Mapping[str, Any] | None = ...,
) -> None:
...
@@ -99,13 +107,13 @@ def __init__(
/,
protocol: StreamProtocol[_SentPacketT, _ReceivedPacketT],
*,
- ssl: _SSLContext | bool | None = ...,
+ ssl: _typing_ssl.SSLContext | bool | None = ...,
server_hostname: str | None = ...,
ssl_handshake_timeout: float | None = ...,
ssl_shutdown_timeout: float | None = ...,
ssl_shared_lock: bool | None = ...,
max_recv_size: int | None = ...,
- backend: str | AbstractAsyncBackend | None = ...,
+ backend: str | AsyncBackend | None = ...,
backend_kwargs: Mapping[str, Any] | None = ...,
) -> None:
...
@@ -116,16 +124,55 @@ def __init__(
/,
protocol: StreamProtocol[_SentPacketT, _ReceivedPacketT],
*,
- ssl: _SSLContext | bool | None = None,
+ ssl: _typing_ssl.SSLContext | bool | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
ssl_shared_lock: bool | None = None,
max_recv_size: int | None = None,
- backend: str | AbstractAsyncBackend | None = None,
+ backend: str | AsyncBackend | None = None,
backend_kwargs: Mapping[str, Any] | None = None,
**kwargs: Any,
) -> None:
+ """
+ Common Parameters:
+ protocol: The :term:`protocol object` to use.
+
+ Connection Parameters:
+ address: A pair of ``(host, port)`` for connection.
+ happy_eyeballs_delay: the "Connection Attempt Delay" as defined in :rfc:`8305`.
+ A sensible default value recommended by the RFC is 0.25 (250 milliseconds).
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+
+ Socket Parameters:
+ socket: An already connected TCP :class:`socket.socket`. If `socket` is given,
+ none of `happy_eyeballs_delay` and `local_address` should be specified.
+
+ Keyword Arguments:
+ ssl: If given and not false, a SSL/TLS transport is created (by default a plain TCP transport is created).
+ If ssl is a :class:`ssl.SSLContext` object, this context is used to create the transport;
+ if ssl is :data:`True`, a default context returned from :func:`ssl.create_default_context` is used.
+ server_hostname: sets or overrides the hostname that the target server's certificate will be matched against.
+ Should only be passed if `ssl` is not :data:`None`. By default the value of the host in `address`
+ argument is used. If `socket` is provided instead, there is no default and you must pass a value
+ for `server_hostname`. If `server_hostname` is an empty string, hostname matching is disabled
+ (which is a serious security risk, allowing for potential man-in-the-middle attacks).
+ ssl_handshake_timeout: (for a TLS connection) the time in seconds to wait for the TLS handshake to complete
+ before aborting the connection. ``60.0`` seconds if :data:`None` (default).
+ ssl_shutdown_timeout: the time in seconds to wait for the SSL shutdown to complete before aborting the connection.
+ ``30.0`` seconds if :data:`None` (default).
+ ssl_shared_lock: If :data:`True` (the default), :meth:`send_packet` and :meth:`recv_packet` uses
+ the same lock instance.
+ max_recv_size: Read buffer size. If not given, a default reasonable value is used.
+
+ Backend Parameters:
+ backend: the backend to use. Automatically determined otherwise.
+ backend_kwargs: Keyword arguments for backend instanciation.
+ Ignored if `backend` is already an :class:`.AsyncBackend` instance.
+
+ See Also:
+ :ref:`SSL/TLS security considerations `
+ """
super().__init__()
if not isinstance(protocol, StreamProtocol):
@@ -139,15 +186,16 @@ def __init__(
if not isinstance(max_recv_size, int) or max_recv_size <= 0:
raise ValueError("'max_recv_size' must be a strictly positive integer")
- self.__socket: AbstractAsyncStreamSocketAdapter | None = None
- self.__backend: AbstractAsyncBackend = backend
+ self.__socket: AsyncStreamSocketAdapter | None = None
+ self.__backend: AsyncBackend = backend
self.__info: _ClientInfo | None = None
if ssl:
if _ssl_module is None:
raise RuntimeError("stdlib ssl module not available")
if isinstance(ssl, bool):
- ssl = cast("_SSLContext", _ssl_module.create_default_context())
+ ssl = _ssl_module.create_default_context()
+ assert isinstance(ssl, _ssl_module.SSLContext) # nosec assert_used
if server_hostname is not None and not server_hostname:
ssl.check_hostname = False
else:
@@ -169,7 +217,7 @@ def __init__(
def _value_or_default(value: float | None, default: float) -> float:
return value if value is not None else default
- self.__socket_connector: SingleTaskRunner[AbstractAsyncStreamSocketAdapter] | None = None
+ self.__socket_connector: SingleTaskRunner[AsyncStreamSocketAdapter] | None = None
match __arg:
case _socket.socket() as socket:
_check_socket_family(socket.family)
@@ -228,7 +276,42 @@ def __repr__(self) -> str:
return f"<{type(self).__name__} (partially initialized)>"
return f"<{type(self).__name__} socket={socket!r}>"
+ def is_connected(self) -> bool:
+ """
+ Checks if the client initialization is finished.
+
+ See Also:
+ :meth:`wait_connected` method.
+
+ Returns:
+ the client connection state.
+ """
+ return self.__socket is not None and self.__info is not None
+
async def wait_connected(self) -> None:
+ """
+ Finishes initializing the client, doing the asynchronous operations that could not be done in the constructor.
+ Does not require task synchronization.
+
+ It is not needed to call it directly if the client is used as an :term:`asynchronous context manager`::
+
+ async with client: # wait_connected() has been called.
+ ...
+
+ Can be safely called multiple times.
+
+ Warning:
+ Due to limitations of the underlying operating system APIs,
+ it is not always possible to properly cancel a connection attempt once it has begun.
+
+ If :meth:`wait_connected` is cancelled, and is unable to abort the connection attempt, then it will forcibly
+ close the socket to prevent accidental re-use.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: could not connect to remote.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
if self.__socket is None:
socket_connector = self.__socket_connector
if socket_connector is None:
@@ -247,7 +330,7 @@ async def wait_connected(self) -> None:
set_tcp_keepalive(socket_proxy, True)
@staticmethod
- def __build_info_dict(socket: AbstractAsyncStreamSocketAdapter) -> _ClientInfo:
+ def __build_info_dict(socket: AsyncStreamSocketAdapter) -> _ClientInfo:
socket_proxy = SocketProxy(socket.socket())
local_address: SocketAddress = new_socket_address(socket.get_local_address(), socket_proxy.family)
remote_address: SocketAddress = new_socket_address(socket.get_remote_address(), socket_proxy.family)
@@ -257,17 +340,37 @@ def __build_info_dict(socket: AbstractAsyncStreamSocketAdapter) -> _ClientInfo:
"remote_address": remote_address,
}
- def is_connected(self) -> bool:
- return self.__socket is not None and self.__info is not None
-
- @final
def is_closing(self) -> bool:
+ """
+ Checks if the client is closed or in the process of being closed.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ See Also:
+ :meth:`aclose` method.
+
+ Returns:
+ the client state.
+ """
if self.__socket_connector is not None:
return False
socket = self.__socket
return socket is None or socket.is_closing()
async def aclose(self) -> None:
+ """
+ Close the client. Does not require task synchronization.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Warning:
+ :meth:`aclose` performs a graceful close, waiting for the connection to close.
+
+ If :meth:`aclose` is cancelled, the client is closed abruptly.
+
+ Can be safely called multiple times.
+ """
if self.__socket_connector is not None:
self.__socket_connector.cancel()
self.__socket_connector = None
@@ -283,6 +386,25 @@ async def aclose(self) -> None:
pass
async def send_packet(self, packet: _SentPacketT) -> None:
+ """
+ Sends `packet` to the remote endpoint. Does not require task synchronization.
+
+ Calls :meth:`wait_connected`.
+
+ Warning:
+ In the case of a cancellation, it is impossible to know if all the packet data has been sent.
+ This would leave the connection in an inconsistent state.
+
+ Parameters:
+ packet: the Python object to send.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ RuntimeError: :meth:`send_eof` has been called earlier.
+ """
async with self.__send_lock:
socket = await self.__ensure_connected(check_socket_is_closing=True)
if self.__eof_sent:
@@ -292,11 +414,20 @@ async def send_packet(self, packet: _SentPacketT) -> None:
_check_real_socket_state(self.socket)
async def send_eof(self) -> None:
+ """
+ Close the write end of the stream after the buffered write data is flushed. Does not require task synchronization.
+
+ Can be safely called multiple times.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
try:
socket = await self.__ensure_connected(check_socket_is_closing=False)
except ConnectionError:
return
- if not isinstance(socket, AbstractAsyncHalfCloseableStreamSocketAdapter):
+ if not isinstance(socket, AsyncHalfCloseableStreamSocketAdapter):
raise NotImplementedError
async with self.__send_lock:
@@ -307,6 +438,21 @@ async def send_eof(self) -> None:
await socket.send_eof()
async def recv_packet(self) -> _ReceivedPacketT:
+ """
+ Waits for a new packet to arrive from the remote endpoint. Does not require task synchronization.
+
+ Calls :meth:`wait_connected`.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ StreamProtocolParseError: invalid data received.
+
+ Returns:
+ the received packet.
+ """
async with self.__receive_lock:
consumer = self.__consumer
try:
@@ -319,7 +465,6 @@ async def recv_packet(self) -> _ReceivedPacketT:
self.__abort(None)
bufsize: int = self.__max_recv_size
- backend = self.__backend
while True:
with self.__convert_socket_error():
@@ -335,29 +480,47 @@ async def recv_packet(self) -> _ReceivedPacketT:
return next(consumer)
except StopIteration:
pass
- # Attempt failed, wait for one iteration
- await backend.coro_yield()
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address.
+
+ If :meth:`wait_connected` was not called, an :exc:`OSError` may occurr.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's local address.
+ """
if self.__info is None:
raise _error_from_errno(_errno.ENOTSOCK)
return self.__info["local_address"]
def get_remote_address(self) -> SocketAddress:
+ """
+ Returns the remote socket IP address.
+
+ If :meth:`wait_connected` was not called, an :exc:`OSError` may occurr.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's remote address.
+ """
if self.__info is None:
raise _error_from_errno(_errno.ENOTSOCK)
return self.__info["remote_address"]
- def fileno(self) -> int:
- socket = self.__socket
- if socket is None:
- return -1
- return socket.socket().fileno()
-
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
return self.__backend
- async def __ensure_connected(self, *, check_socket_is_closing: bool) -> AbstractAsyncStreamSocketAdapter:
+ get_backend.__doc__ = AbstractAsyncNetworkClient.get_backend.__doc__
+
+ async def __ensure_connected(self, *, check_socket_is_closing: bool) -> AsyncStreamSocketAdapter:
await self.wait_connected()
assert self.__socket is not None # nosec assert_used
socket = self.__socket
@@ -385,11 +548,16 @@ def __abort(cause: BaseException | None) -> NoReturn:
@property
@final
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute.
+
+ May raise :exc:`AttributeError` if :meth:`wait_connected` was not called.
+ """
if self.__info is None:
- raise _error_from_errno(_errno.ENOTSOCK)
+ raise AttributeError("Socket not connected")
return self.__info["proxy"]
@property
@final
def max_recv_size(self) -> int:
+ """Read buffer size. Read-only attribute."""
return self.__max_recv_size
diff --git a/src/easynetwork/api_async/client/udp.py b/src/easynetwork/api_async/client/udp.py
index a7ad8561..ed484728 100644
--- a/src/easynetwork/api_async/client/udp.py
+++ b/src/easynetwork/api_async/client/udp.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network client module"""
@@ -7,11 +18,15 @@
__all__ = ["AsyncUDPNetworkClient", "AsyncUDPNetworkEndpoint"]
+import contextlib
import errno as _errno
+import math
import socket as _socket
-from collections.abc import AsyncIterator, Mapping
-from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, TypeVar, final, overload
+import time
+from collections.abc import AsyncGenerator, AsyncIterator, Mapping
+from typing import TYPE_CHECKING, Any, Generic, Self, TypedDict, final, overload
+from ..._typevars import _ReceivedPacketT, _SentPacketT
from ...exceptions import ClientClosedError, DatagramProtocolParseError
from ...protocol import DatagramProtocol
from ...tools._utils import (
@@ -23,7 +38,7 @@
)
from ...tools.constants import MAX_DATAGRAM_BUFSIZE
from ...tools.socket import SocketAddress, SocketProxy, new_socket_address
-from ..backend.abc import AbstractAsyncBackend, AbstractAsyncDatagramSocketAdapter, ILock
+from ..backend.abc import AsyncBackend, AsyncDatagramSocketAdapter, ILock
from ..backend.factory import AsyncBackendFactory
from ..backend.tasks import SingleTaskRunner
from .abc import AbstractAsyncNetworkClient
@@ -31,9 +46,6 @@
if TYPE_CHECKING:
from types import TracebackType
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
-
class _EndpointInfo(TypedDict):
proxy: SocketProxy
@@ -42,6 +54,13 @@ class _EndpointInfo(TypedDict):
class AsyncUDPNetworkEndpoint(Generic[_SentPacketT, _ReceivedPacketT]):
+ """Asynchronous generic UDP endpoint interface.
+
+ See Also:
+ :class:`.AsyncUDPNetworkServer`
+ An advanced API for handling datagrams.
+ """
+
__slots__ = (
"__socket",
"__backend",
@@ -61,7 +80,7 @@ def __init__(
local_address: tuple[str | None, int] | None = ...,
remote_address: tuple[str, int] | None = ...,
reuse_port: bool = ...,
- backend: str | AbstractAsyncBackend | None = ...,
+ backend: str | AsyncBackend | None = ...,
backend_kwargs: Mapping[str, Any] | None = ...,
) -> None:
...
@@ -72,7 +91,7 @@ def __init__(
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
*,
socket: _socket.socket,
- backend: str | AbstractAsyncBackend | None = ...,
+ backend: str | AsyncBackend | None = ...,
backend_kwargs: Mapping[str, Any] | None = ...,
) -> None:
...
@@ -81,10 +100,31 @@ def __init__(
self,
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
*,
- backend: str | AbstractAsyncBackend | None = None,
+ backend: str | AsyncBackend | None = None,
backend_kwargs: Mapping[str, Any] | None = None,
**kwargs: Any,
) -> None:
+ """
+ Common Parameters:
+ protocol: The :term:`protocol object` to use.
+
+ Connection Parameters:
+ remote_address: If given, is a ``(host, port)`` tuple used to connect the socket.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ reuse_port: Tells the kernel to allow this endpoint to be bound to the same port as other existing
+ endpoints are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows and some Unixes.
+ If the SO_REUSEPORT constant is not defined then this capability is unsupported.
+
+ Socket Parameters:
+ socket: An already connected UDP :class:`socket.socket`. If `socket` is given,
+ none of and `local_address`, `remote_address` and `reuse_port` should be specified.
+
+ Backend Parameters:
+ backend: the backend to use. Automatically determined otherwise.
+ backend_kwargs: Keyword arguments for backend instanciation.
+ Ignored if `backend` is already an :class:`.AsyncBackend` instance.
+ """
super().__init__()
backend = AsyncBackendFactory.ensure(backend, backend_kwargs)
@@ -92,11 +132,11 @@ def __init__(
raise TypeError(f"Expected a DatagramProtocol object, got {protocol!r}")
self.__protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT] = protocol
- self.__socket: AbstractAsyncDatagramSocketAdapter | None = None
- self.__backend: AbstractAsyncBackend = backend
+ self.__socket: AsyncDatagramSocketAdapter | None = None
+ self.__backend: AsyncBackend = backend
self.__info: _EndpointInfo | None = None
- self.__socket_builder: SingleTaskRunner[AbstractAsyncDatagramSocketAdapter] | None = None
+ self.__socket_builder: SingleTaskRunner[AsyncDatagramSocketAdapter] | None = None
match kwargs:
case {"socket": _socket.socket() as socket, **kwargs}:
_check_socket_family(socket.family)
@@ -119,6 +159,9 @@ def __repr__(self) -> str:
return f"<{type(self).__name__} socket={socket!r}>"
async def __aenter__(self) -> Self:
+ """
+ Calls :meth:`wait_bound`.
+ """
await self.wait_bound()
return self
@@ -128,15 +171,46 @@ async def __aexit__(
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
+ """
+ Calls :meth:`aclose`.
+ """
await self.aclose()
def __getstate__(self) -> Any: # pragma: no cover
raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
def is_bound(self) -> bool:
+ """
+ Checks if the endpoint initialization is finished.
+
+ See Also:
+ :meth:`wait_bound` method.
+
+ Returns:
+ the endpoint connection state.
+ """
return self.__socket is not None and self.__info is not None
async def wait_bound(self) -> None:
+ """
+ Finishes initializing the endpoint, doing the asynchronous operations that could not be done in the constructor.
+
+ It is not needed to call it directly if the endpoint is used as an :term:`asynchronous context manager`::
+
+ async with endpoint: # wait_bound() has been called.
+ ...
+
+ Can be safely called multiple times.
+
+ Warning:
+ In the case of a cancellation, this would leave the endpoint in an inconsistent state.
+
+ It is recommended to close the endpoint in this case.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
if self.__socket is None:
socket_builder = self.__socket_builder
if socket_builder is None:
@@ -150,7 +224,7 @@ async def wait_bound(self) -> None:
self.__info = self.__build_info_dict(self.__socket)
@staticmethod
- def __build_info_dict(socket: AbstractAsyncDatagramSocketAdapter) -> _EndpointInfo:
+ def __build_info_dict(socket: AsyncDatagramSocketAdapter) -> _EndpointInfo:
socket_proxy = SocketProxy(socket.socket())
local_address: SocketAddress = new_socket_address(socket.get_local_address(), socket_proxy.family)
if local_address.port == 0:
@@ -166,14 +240,36 @@ def __build_info_dict(socket: AbstractAsyncDatagramSocketAdapter) -> _EndpointIn
"remote_address": remote_address,
}
- @final
def is_closing(self) -> bool:
+ """
+ Checks if the endpoint is closed or in the process of being closed.
+
+ If :data:`True`, all future operations on the endpoint object will raise a :exc:`.ClientClosedError`.
+
+ See Also:
+ :meth:`aclose` method.
+
+ Returns:
+ the endpoint state.
+ """
if self.__socket_builder is not None:
return False
socket = self.__socket
return socket is None or socket.is_closing()
async def aclose(self) -> None:
+ """
+ Close the endpoint.
+
+ Once that happens, all future operations on the endpoint object will raise a :exc:`.ClientClosedError`.
+
+ Warning:
+ :meth:`aclose` performs a graceful close, waiting for the connection to close.
+
+ If :meth:`aclose` is cancelled, the client is closed abruptly.
+
+ Can be safely called multiple times.
+ """
if self.__socket_builder is not None:
self.__socket_builder.cancel()
self.__socket_builder = None
@@ -193,6 +289,25 @@ async def send_packet_to(
packet: _SentPacketT,
address: tuple[str, int] | tuple[str, int, int, int] | None,
) -> None:
+ """
+ Sends `packet` to the remote endpoint `address`. Does not require task synchronization.
+
+ Calls :meth:`wait_bound`.
+
+ If a remote address is configured, `address` must be :data:`None` or the same as the remote address,
+ otherwise `address` must not be :data:`None`.
+
+ Warning:
+ In the case of a cancellation, it is impossible to know if all the packet data has been sent.
+
+ Parameters:
+ packet: the Python object to send.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ ValueError: Invalid `address` value.
+ """
async with self.__send_lock:
socket = await self.__ensure_opened()
assert self.__info is not None # nosec assert_used
@@ -208,6 +323,19 @@ async def send_packet_to(
_check_real_socket_state(self.socket)
async def recv_packet_from(self) -> tuple[_ReceivedPacketT, SocketAddress]:
+ """
+ Waits for a new packet to arrive from another endpoint. Does not require task synchronization.
+
+ Calls :meth:`wait_bound`.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ DatagramProtocolParseError: invalid data received.
+
+ Returns:
+ A ``(packet, address)`` tuple, where `address` is the endpoint that delivered this packet.
+ """
async with self.__receive_lock:
socket = await self.__ensure_opened()
data, sender = await socket.recvfrom(MAX_DATAGRAM_BUFSIZE)
@@ -220,34 +348,88 @@ async def recv_packet_from(self) -> tuple[_ReceivedPacketT, SocketAddress]:
finally:
del data
- async def iter_received_packets_from(self) -> AsyncIterator[tuple[_ReceivedPacketT, SocketAddress]]:
+ async def iter_received_packets_from(
+ self,
+ *,
+ timeout: float | None = 0,
+ ) -> AsyncGenerator[tuple[_ReceivedPacketT, SocketAddress], None]:
+ """
+ Returns an :term:`asynchronous iterator` that waits for a new packet to arrive from another endpoint.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds; it defaults to zero.
+
+ Important:
+ The `timeout` is for the entire iterator::
+
+ async_iterator = endpoint.iter_received_packets_from(timeout=10)
+
+ # Let's say that this call took 6 seconds...
+ first_packet = await anext(async_iterator)
+
+ # ...then this call has a maximum of 4 seconds, not 10.
+ second_packet = await anext(async_iterator)
+
+ The time taken outside the iterator object is not decremented to the timeout parameter.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for all the receive operations.
+
+ Yields:
+ A ``(packet, address)`` tuple, where `address` is the endpoint that delivered this packet.
+ """
+
+ if timeout is None:
+ timeout = math.inf
+
+ perf_counter = time.perf_counter
+ timeout_after = self.get_backend().timeout
+
while True:
try:
- packet_tuple = await self.recv_packet_from()
+ async with timeout_after(timeout):
+ _start = perf_counter()
+ packet_tuple = await self.recv_packet_from()
+ _end = perf_counter()
except OSError:
return
yield packet_tuple
+ timeout -= _end - _start
+ timeout = max(timeout, 0)
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the endpoint's local address.
+ """
if self.__info is None:
raise _error_from_errno(_errno.ENOTSOCK)
return self.__info["local_address"]
def get_remote_address(self) -> SocketAddress | None:
+ """
+ Returns the remote socket IP address.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the endpoint's remote address if configured, :data:`None` otherwise.
+ """
if self.__info is None:
raise _error_from_errno(_errno.ENOTSOCK)
return self.__info["remote_address"]
- def fileno(self) -> int:
- socket = self.__socket
- if socket is None:
- return -1
- return socket.socket().fileno()
-
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
return self.__backend
- async def __ensure_opened(self) -> AbstractAsyncDatagramSocketAdapter:
+ async def __ensure_opened(self) -> AsyncDatagramSocketAdapter:
await self.wait_bound()
assert self.__socket is not None # nosec assert_used
if self.__socket.is_closing():
@@ -257,12 +439,20 @@ async def __ensure_opened(self) -> AbstractAsyncDatagramSocketAdapter:
@property
@final
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute.
+
+ May raise :exc:`AttributeError` if :meth:`wait_bound` was not called.
+ """
if self.__info is None:
- raise _error_from_errno(_errno.ENOTSOCK)
+ raise AttributeError("Socket not connected")
return self.__info["proxy"]
class AsyncUDPNetworkClient(AbstractAsyncNetworkClient[_SentPacketT, _ReceivedPacketT], Generic[_SentPacketT, _ReceivedPacketT]):
+ """
+ An asynchronous network client interface for UDP communication.
+ """
+
__slots__ = ("__endpoint",)
@overload
@@ -274,7 +464,7 @@ def __init__(
*,
local_address: tuple[str, int] | None = ...,
reuse_port: bool = ...,
- backend: str | AbstractAsyncBackend | None = ...,
+ backend: str | AsyncBackend | None = ...,
backend_kwargs: Mapping[str, Any] | None = ...,
) -> None:
...
@@ -286,7 +476,7 @@ def __init__(
/,
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
*,
- backend: str | AbstractAsyncBackend | None = ...,
+ backend: str | AsyncBackend | None = ...,
backend_kwargs: Mapping[str, Any] | None = ...,
) -> None:
...
@@ -298,6 +488,27 @@ def __init__(
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
**kwargs: Any,
) -> None:
+ """
+ Common Parameters:
+ protocol: The :term:`protocol object` to use.
+
+ Connection Parameters:
+ address: A pair of ``(host, port)`` for connection.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ reuse_port: Tells the kernel to allow this endpoint to be bound to the same port as other existing
+ endpoints are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows and some Unixes.
+ If the SO_REUSEPORT constant is not defined then this capability is unsupported.
+
+ Socket Parameters:
+ socket: An already connected UDP :class:`socket.socket`. If `socket` is given,
+ none of and `local_address` and `reuse_port` should be specified.
+
+ Backend Parameters:
+ backend: the backend to use. Automatically determined otherwise.
+ backend_kwargs: Keyword arguments for backend instanciation.
+ Ignored if `backend` is already an :class:`.AsyncBackend` instance.
+ """
super().__init__()
endpoint: AsyncUDPNetworkEndpoint[_SentPacketT, _ReceivedPacketT]
@@ -322,46 +533,143 @@ def __repr__(self) -> str:
return f"<{type(self).__name__} (partially initialized)>"
def is_connected(self) -> bool:
+ """
+ Checks if the client initialization is finished.
+
+ See Also:
+ :meth:`wait_connected` method.
+
+ Returns:
+ the client connection state.
+ """
return self.__endpoint.is_bound() and self.__endpoint.get_remote_address() is not None
async def wait_connected(self) -> None:
+ """
+ Finishes initializing the client, doing the asynchronous operations that could not be done in the constructor.
+ Does not require task synchronization.
+
+ It is not needed to call it directly if the client is used as an :term:`asynchronous context manager`::
+
+ async with client: # wait_connected() has been called.
+ ...
+
+ Can be safely called multiple times.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
await self.__endpoint.wait_bound()
self.__check_remote_address()
- @final
def is_closing(self) -> bool:
+ """
+ Checks if the client is closed or in the process of being closed.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ See Also:
+ :meth:`aclose` method.
+
+ Returns:
+ the client state.
+ """
return self.__endpoint.is_closing()
async def aclose(self) -> None:
+ """
+ Close the client. Does not require task synchronization.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Warning:
+ :meth:`aclose` performs a graceful close, waiting for the connection to close.
+
+ If :meth:`aclose` is cancelled, the client is closed abruptly.
+
+ Can be safely called multiple times.
+ """
return await self.__endpoint.aclose()
async def send_packet(self, packet: _SentPacketT) -> None:
+ """
+ Sends `packet` to the remote endpoint. Does not require task synchronization.
+
+ Calls :meth:`wait_connected`.
+
+ Warning:
+ In the case of a cancellation, it is impossible to know if all the packet data has been sent.
+
+ Parameters:
+ packet: the Python object to send.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
await self.wait_connected()
return await self.__endpoint.send_packet_to(packet, None)
async def recv_packet(self) -> _ReceivedPacketT:
+ """
+ Waits for a new packet to arrive from the remote endpoint. Does not require task synchronization.
+
+ Calls :meth:`wait_connected`.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ DatagramProtocolParseError: invalid data received.
+
+ Returns:
+ the received packet.
+ """
await self.wait_connected()
packet, _ = await self.__endpoint.recv_packet_from()
return packet
- async def iter_received_packets(self) -> AsyncIterator[_ReceivedPacketT]:
+ async def iter_received_packets(self, *, timeout: float | None = 0) -> AsyncIterator[_ReceivedPacketT]:
await self.wait_connected()
- async for packet, _ in self.__endpoint.iter_received_packets_from():
- yield packet
+ async with contextlib.aclosing(self.__endpoint.iter_received_packets_from(timeout=timeout)) as generator:
+ async for packet, _ in generator:
+ yield packet
+
+ iter_received_packets.__doc__ = AbstractAsyncNetworkClient.iter_received_packets.__doc__
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's local address.
+ """
return self.__endpoint.get_local_address()
def get_remote_address(self) -> SocketAddress:
+ """
+ Returns the remote socket IP address.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's remote address.
+ """
remote_address: SocketAddress = self.__check_remote_address()
return remote_address
- def fileno(self) -> int:
- return self.__endpoint.fileno()
-
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
return self.__endpoint.get_backend()
+ get_backend.__doc__ = AbstractAsyncNetworkClient.get_backend.__doc__
+
def __check_remote_address(self) -> SocketAddress:
remote_address: SocketAddress | None = self.__endpoint.get_remote_address()
if remote_address is None:
@@ -371,4 +679,8 @@ def __check_remote_address(self) -> SocketAddress:
@property
@final
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute.
+
+ May raise :exc:`AttributeError` if :meth:`wait_connected` was not called.
+ """
return self.__endpoint.socket
diff --git a/src/easynetwork/api_async/server/__init__.py b/src/easynetwork/api_async/server/__init__.py
index d88926d3..807c4b71 100644
--- a/src/easynetwork/api_async/server/__init__.py
+++ b/src/easynetwork/api_async/server/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -8,7 +19,6 @@
__all__ = [
"AbstractAsyncNetworkServer",
"AsyncBaseClientInterface",
- "AsyncBaseRequestHandler",
"AsyncDatagramClient",
"AsyncDatagramRequestHandler",
"AsyncStreamClient",
diff --git a/src/easynetwork/api_async/server/_tools/__init__.py b/src/easynetwork/api_async/server/_tools/__init__.py
index 2d5a3522..3651fa64 100644
--- a/src/easynetwork/api_async/server/_tools/__init__.py
+++ b/src/easynetwork/api_async/server/_tools/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
diff --git a/src/easynetwork/api_async/server/_tools/actions.py b/src/easynetwork/api_async/server/_tools/actions.py
index f58f9fef..a470c382 100644
--- a/src/easynetwork/api_async/server/_tools/actions.py
+++ b/src/easynetwork/api_async/server/_tools/actions.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -8,16 +19,26 @@
__all__ = [] # type: list[str]
import dataclasses
-from typing import Generic, TypeVar
+from collections.abc import AsyncGenerator
+from typing import Any, Generic, TypeVar
_T = TypeVar("_T")
-@dataclasses.dataclass(match_args=True, slots=True)
+@dataclasses.dataclass(slots=True)
class RequestAction(Generic[_T]):
request: _T
+ async def asend(self, generator: AsyncGenerator[None, _T]) -> None:
+ await generator.asend(self.request)
-@dataclasses.dataclass(match_args=True, slots=True)
+
+@dataclasses.dataclass(slots=True)
class ErrorAction:
exception: BaseException
+
+ async def asend(self, generator: AsyncGenerator[None, Any]) -> None:
+ try:
+ await generator.athrow(self.exception)
+ finally:
+ del self # Needed to avoid circular reference with raised exception
diff --git a/src/easynetwork/api_async/server/abc.py b/src/easynetwork/api_async/server/abc.py
index a0bc14f8..de20a21b 100644
--- a/src/easynetwork/api_async/server/abc.py
+++ b/src/easynetwork/api_async/server/abc.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -16,15 +27,28 @@
if TYPE_CHECKING:
from types import TracebackType
- from ..backend.abc import AbstractAsyncBackend
+ from ..backend.abc import AsyncBackend
class SupportsEventSet(Protocol):
+ """
+ A :class:`threading.Event`-like object.
+ """
+
def set(self) -> None: # pragma: no cover
+ """
+ Notifies that the event has happened.
+
+ This method MUST be idempotent.
+ """
...
class AbstractAsyncNetworkServer(metaclass=ABCMeta):
+ """
+ The base class for an asynchronous network server.
+ """
+
__slots__ = ("__weakref__",)
async def __aenter__(self) -> Self:
@@ -36,6 +60,7 @@ async def __aexit__(
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
+ """Calls :meth:`server_close`."""
await self.server_close()
def __getstate__(self) -> Any: # pragma: no cover
@@ -43,20 +68,44 @@ def __getstate__(self) -> Any: # pragma: no cover
@abstractmethod
def is_serving(self) -> bool:
+ """
+ Checks whether the server is up and accepting new clients.
+ """
raise NotImplementedError
@abstractmethod
async def serve_forever(self, *, is_up_event: SupportsEventSet | None = ...) -> None:
+ """
+ Starts the server's main loop.
+
+ Parameters:
+ is_up_event: If given, will be triggered when the server is ready to accept new clients.
+
+ Raises:
+ ServerClosedError: The server is closed.
+ ServerAlreadyRunning: Another task already called :meth:`serve_forever`.
+ """
raise NotImplementedError
@abstractmethod
async def server_close(self) -> None:
+ """
+ Closes the server.
+ """
raise NotImplementedError
@abstractmethod
async def shutdown(self) -> None:
+ """
+ Asks for the server to stop.
+
+ All active client tasks will be cancelled.
+
+ Warning:
+ Do not call this method in the :meth:`serve_forever` task; it will cause a deadlock.
+ """
raise NotImplementedError
@abstractmethod
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
raise NotImplementedError
diff --git a/src/easynetwork/api_async/server/handler.py b/src/easynetwork/api_async/server/handler.py
index 1b1ce486..0f1212de 100644
--- a/src/easynetwork/api_async/server/handler.py
+++ b/src/easynetwork/api_async/server/handler.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network servers' request handler base classes module"""
@@ -7,31 +18,37 @@
__all__ = [
"AsyncBaseClientInterface",
- "AsyncBaseRequestHandler",
"AsyncDatagramClient",
"AsyncDatagramRequestHandler",
"AsyncStreamClient",
"AsyncStreamRequestHandler",
]
+import contextlib
from abc import ABCMeta, abstractmethod
-from collections.abc import AsyncGenerator, Callable, Coroutine
-from typing import TYPE_CHECKING, Any, Generic, TypeVar, final
+from collections.abc import AsyncGenerator, Coroutine
+from typing import TYPE_CHECKING, Any, Generic, final
-if TYPE_CHECKING:
- from ...exceptions import DatagramProtocolParseError, StreamProtocolParseError
- from ...tools.socket import SocketAddress, SocketProxy
- from ..backend.abc import AbstractAsyncBackend
+from ..._typevars import _RequestT, _ResponseT
+from ...tools.socket import SocketAddress, SocketProxy
-
-_RequestT = TypeVar("_RequestT")
-_ResponseT = TypeVar("_ResponseT")
+if TYPE_CHECKING:
+ from ..server.tcp import AsyncTCPNetworkServer
+ from ..server.udp import AsyncUDPNetworkServer
class AsyncBaseClientInterface(Generic[_ResponseT], metaclass=ABCMeta):
+ """
+ The base class for a client interface, used by request handlers.
+ """
+
__slots__ = ("__addr", "__weakref__")
def __init__(self, address: SocketAddress) -> None:
+ """
+ Parameters:
+ address: The remote endpoint's address.
+ """
super().__init__()
self.__addr: SocketAddress = address
@@ -43,32 +60,79 @@ def __getstate__(self) -> Any: # pragma: no cover
@abstractmethod
async def send_packet(self, packet: _ResponseT, /) -> None:
+ """
+ Sends `packet` to the remote endpoint. Does not require task synchronization.
+
+ Warning:
+ In the case of a cancellation, it is impossible to know if all the packet data has been sent.
+ This would leave the connection in an inconsistent state.
+
+ Parameters:
+ packet: the Python object to send.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
raise NotImplementedError
@abstractmethod
def is_closing(self) -> bool:
+ """
+ Checks if the client is closed or in the process of being closed.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ Returns:
+ the client state.
+ """
raise NotImplementedError
@property
@final
def address(self) -> SocketAddress:
+ """The remote endpoint's address. Read-only attribute."""
return self.__addr
@property
@abstractmethod
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute."""
raise NotImplementedError
class AsyncStreamClient(AsyncBaseClientInterface[_ResponseT]):
+ """
+ A client interface for stream oriented connection, used by stream request handlers.
+ """
+
__slots__ = ()
@abstractmethod
async def aclose(self) -> None:
+ """
+ Close the client. Does not require task synchronization.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Can be safely called multiple times.
+
+ Warning:
+ :meth:`aclose` performs a graceful close, waiting for the connection to close.
+
+ If :meth:`aclose` is cancelled, the client is closed abruptly.
+ """
raise NotImplementedError
class AsyncDatagramClient(AsyncBaseClientInterface[_ResponseT]):
+ """
+ A client interface for datagram oriented connection, used by datagram request handlers.
+ """
+
__slots__ = ()
@abstractmethod
@@ -80,31 +144,57 @@ def __eq__(self, other: object, /) -> bool:
raise NotImplementedError
-class AsyncBaseRequestHandler(metaclass=ABCMeta):
+class AsyncStreamRequestHandler(Generic[_RequestT, _ResponseT], metaclass=ABCMeta):
+ """
+ The base class for a stream request handler, used by TCP network servers.
+ """
+
__slots__ = ("__weakref__",)
- def set_async_backend(self, backend: AbstractAsyncBackend, /) -> None:
+ async def service_init(
+ self,
+ exit_stack: contextlib.AsyncExitStack,
+ server: AsyncTCPNetworkServer[_RequestT, _ResponseT],
+ /,
+ ) -> None:
+ """
+ Called at the server startup. The default implementation does nothing.
+
+ Parameters:
+ exit_stack: An :class:`~contextlib.AsyncExitStack` that can be used to add actions on server's tear down.
+ server: A :func:`weakref.proxy` to the server instance.
+ """
pass
- async def service_init(self) -> None:
- pass
+ @abstractmethod
+ def handle(self, client: AsyncStreamClient[_ResponseT], /) -> AsyncGenerator[None, _RequestT]:
+ """
+ This function must do all the work required to service a request.
- async def service_quit(self) -> None:
- pass
+ It is an :term:`asynchronous generator` function::
- async def service_actions(self) -> None:
- pass
+ async def handle(self, client):
+ request = yield
+ # Do some stuff
+ ...
-class AsyncStreamRequestHandler(AsyncBaseRequestHandler, Generic[_RequestT, _ResponseT]):
- __slots__ = ()
+ await client.send_packet(response)
- @abstractmethod
- def handle(self, client: AsyncStreamClient[_ResponseT], /) -> AsyncGenerator[None, _RequestT]:
- raise NotImplementedError
+ :meth:`handle` can :keyword:`yield` whenever a request from the `client` is needed.
- @abstractmethod
- async def bad_request(self, client: AsyncStreamClient[_ResponseT], exc: StreamProtocolParseError, /) -> bool | None:
+ The generator is started immediately after :meth:`on_connection`.
+ When the generator returns, a new generator is created and started immediately after.
+
+ The generator **does not** represent the client life time, ``await client.aclose()`` must be called explicitly.
+
+ Note:
+ There is one exception: if the generator returns before the first :keyword:`yield` statement,
+ the connection is forcibly closed.
+
+ Parameters:
+ client: An interface to communicate with the remote endpoint.
+ """
raise NotImplementedError
def on_connection(
@@ -112,25 +202,122 @@ def on_connection(
client: AsyncStreamClient[_ResponseT],
/,
) -> Coroutine[Any, Any, None] | AsyncGenerator[None, _RequestT]:
+ """
+ Called once the client is connected to perform any initialization actions required.
+ The default implementation does nothing.
+
+ It can be either a :term:`coroutine function`::
+
+ async def on_connection(self, client):
+ # Do some stuff
+ ...
+
+ or an :term:`asynchronous generator` function::
+
+ async def on_connection(self, client):
+ # Do some stuff
+ ...
+
+ initial_info = yield
+
+ # Finish initialization
+ ...
+
+ In the latter case, as for :meth:`handle`, :meth:`on_connection` can :keyword:`yield` whenever a request from
+ the `client` is needed.
+
+ Parameters:
+ client: An interface to communicate with the remote endpoint.
+ """
+
async def _pass() -> None:
pass
return _pass()
async def on_disconnection(self, client: AsyncStreamClient[_ResponseT], /) -> None:
- pass
+ """
+ Called once the client is disconnected to perform any clean-up actions required. The default implementation does nothing.
+
+ This function will not be called in the following conditions:
- def set_stop_listening_callback(self, stop_listening_callback: Callable[[], None], /) -> None:
+ * If :meth:`on_connection` is a simple :term:`coroutine function` and raises an exception.
+
+ * If :meth:`on_connection` is an :term:`asynchronous generator` function and raises an exception
+ before the first :keyword:`yield`.
+
+ Important:
+ :meth:`AsyncStreamClient.is_closing` should return :data:`True` when this function is called.
+ However, if :meth:`handle` raises an exception, the client task is shut down and the connection is forcibly closed
+ *after* :meth:`on_disconnection` is called.
+
+ This behavior allows you to notify the client that something unusual has occurred.
+
+ Parameters:
+ client: An interface to communicate with the remote endpoint.
+ """
pass
-class AsyncDatagramRequestHandler(AsyncBaseRequestHandler, Generic[_RequestT, _ResponseT]):
- __slots__ = ()
+class AsyncDatagramRequestHandler(Generic[_RequestT, _ResponseT], metaclass=ABCMeta):
+ """
+ The base class for a datagram request handler, used by UDP network servers.
+ """
+
+ __slots__ = ("__weakref__",)
+
+ async def service_init(
+ self,
+ exit_stack: contextlib.AsyncExitStack,
+ server: AsyncUDPNetworkServer[_RequestT, _ResponseT],
+ /,
+ ) -> None:
+ """
+ Called at the server startup. The default implementation does nothing.
+
+ Parameters:
+ exit_stack: An :class:`~contextlib.AsyncExitStack` that can be used to add actions on server's tear down.
+ server: A :func:`weakref.proxy` to the server instance.
+ """
+ pass
@abstractmethod
def handle(self, client: AsyncDatagramClient[_ResponseT], /) -> AsyncGenerator[None, _RequestT]:
- raise NotImplementedError
+ """
+ This function must do all the work required to service a request.
- @abstractmethod
- async def bad_request(self, client: AsyncDatagramClient[_ResponseT], exc: DatagramProtocolParseError, /) -> bool | None:
+ It is an :term:`asynchronous generator` function::
+
+ async def handle(self, client):
+ request = yield
+
+ # Do some stuff
+ ...
+
+ await client.send_packet(response)
+
+ :meth:`handle` can :keyword:`yield` whenever a request from the `client` is needed.
+
+ Warning:
+ UDP does not guarantee ordered delivery. Packets are typically "sent" in order, but they may be received out of order.
+ In large networks, it is reasonably common for some packets to arrive out of sequence (or not at all).
+
+ Since there is no connection management, the generator is started when the datagram is received.
+ When the generator returns, a new generator is created and started when a new datagram is received.
+
+ Important:
+ There will always be only one active generator per client.
+ All the pending datagrams received while the generator is running are queued.
+
+ This behavior is designed to act like a stream request handler.
+
+ Note:
+ If the generator returns before the first :keyword:`yield` statement, the received datagram is discarded.
+
+ This is useful when a client that you do not expect to see sends something; the datagrams are parsed only when
+ the generator hits a :keyword:`yield` statement.
+
+ Parameters:
+ client: An interface to communicate with the remote endpoint.
+ """
raise NotImplementedError
diff --git a/src/easynetwork/api_async/server/tcp.py b/src/easynetwork/api_async/server/tcp.py
index eb055ca2..ef2341c8 100644
--- a/src/easynetwork/api_async/server/tcp.py
+++ b/src/easynetwork/api_async/server/tcp.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -10,21 +21,20 @@
import contextlib as _contextlib
import errno as _errno
import inspect
-import logging as _logging
-import math
+import logging
import os
import weakref
from collections import deque
from collections.abc import AsyncGenerator, AsyncIterator, Callable, Coroutine, Iterator, Mapping, Sequence
-from typing import TYPE_CHECKING, Any, Generic, TypeVar, assert_never, final
+from typing import TYPE_CHECKING, Any, Generic, final
-from ...exceptions import ClientClosedError, ServerAlreadyRunning, ServerClosedError, StreamProtocolParseError
+from ..._typevars import _RequestT, _ResponseT
+from ...exceptions import ClientClosedError, ServerAlreadyRunning, ServerClosedError
from ...protocol import StreamProtocol
from ...tools._stream import StreamDataConsumer, StreamDataProducer
from ...tools._utils import (
check_real_socket_state as _check_real_socket_state,
make_callback as _make_callback,
- recursively_clear_exception_traceback_frames as _recursively_clear_exception_traceback_frames,
remove_traceback_frames_in_place as _remove_traceback_frames_in_place,
)
from ...tools.constants import (
@@ -43,7 +53,7 @@
set_tcp_keepalive,
set_tcp_nodelay,
)
-from ..backend.abc import AbstractAsyncHalfCloseableStreamSocketAdapter
+from ..backend.abc import AsyncHalfCloseableStreamSocketAdapter
from ..backend.factory import AsyncBackendFactory
from ..backend.tasks import SingleTaskRunner
from ._tools.actions import ErrorAction as _ErrorAction, RequestAction as _RequestAction
@@ -54,21 +64,21 @@
from ssl import SSLContext as _SSLContext
from ..backend.abc import (
- AbstractAcceptedSocket,
- AbstractAsyncBackend,
- AbstractAsyncListenerSocketAdapter,
- AbstractAsyncStreamSocketAdapter,
- AbstractTask,
- AbstractTaskGroup,
+ AcceptedSocket,
+ AsyncBackend,
+ AsyncListenerSocketAdapter,
+ AsyncStreamSocketAdapter,
IEvent,
+ Task,
+ TaskGroup,
)
-_RequestT = TypeVar("_RequestT")
-_ResponseT = TypeVar("_ResponseT")
-
-
class AsyncTCPNetworkServer(AbstractAsyncNetworkServer, Generic[_RequestT, _ResponseT]):
+ """
+ An asynchronous network server for TCP connections.
+ """
+
__slots__ = (
"__backend",
"__listeners",
@@ -81,7 +91,6 @@ class AsyncTCPNetworkServer(AbstractAsyncNetworkServer, Generic[_RequestT, _Resp
"__max_recv_size",
"__listener_tasks",
"__mainloop_task",
- "__service_actions_interval",
"__client_connection_log_level",
"__logger",
)
@@ -99,12 +108,50 @@ def __init__(
backlog: int | None = None,
reuse_port: bool = False,
max_recv_size: int | None = None,
- service_actions_interval: float | None = None,
- backend: str | AbstractAsyncBackend | None = None,
- backend_kwargs: Mapping[str, Any] | None = None,
log_client_connection: bool | None = None,
- logger: _logging.Logger | None = None,
+ logger: logging.Logger | None = None,
+ backend: str | AsyncBackend | None = None,
+ backend_kwargs: Mapping[str, Any] | None = None,
) -> None:
+ """
+ Parameters:
+ host: Can be set to several types which determine where the server would be listening:
+
+ * If `host` is a string, the TCP server is bound to a single network interface specified by `host`.
+
+ * If `host` is a sequence of strings, the TCP server is bound to all network interfaces specified by the sequence.
+
+ * If `host` is :data:`None`, all interfaces are assumed and a list of multiple sockets will be returned
+ (most likely one for IPv4 and another one for IPv6).
+ port: specify which port the server should listen on. If the value is ``0``, a random unused port will be selected
+ (note that if `host` resolves to multiple network interfaces, a different random port will be selected
+ for each interface).
+ protocol: The :term:`protocol object` to use.
+ request_handler: The request handler to use.
+
+ Keyword Arguments:
+ ssl: can be set to an :class:`ssl.SSLContext` instance to enable TLS over the accepted connections.
+ ssl_handshake_timeout: (for a TLS connection) the time in seconds to wait for the TLS handshake to complete
+ before aborting the connection. ``60.0`` seconds if :data:`None` (default).
+ ssl_shutdown_timeout: the time in seconds to wait for the SSL shutdown to complete before aborting the connection.
+ ``30.0`` seconds if :data:`None` (default).
+ backlog: is the maximum number of queued connections passed to :class:`~socket.socket.listen` (defaults to ``100``).
+ reuse_port: tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints
+ are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows.
+ max_recv_size: Read buffer size. If not given, a default reasonable value is used.
+ log_client_connection: If :data:`True`, log clients connection/disconnection in :data:`logging.INFO` level.
+ (This log will always be available in :data:`logging.DEBUG` level.)
+ logger: If given, the logger instance to use.
+
+ Backend Parameters:
+ backend: the backend to use. Automatically determined otherwise.
+ backend_kwargs: Keyword arguments for backend instanciation.
+ Ignored if `backend` is already an :class:`.AsyncBackend` instance.
+
+ See Also:
+ :ref:`SSL/TLS security considerations `
+ """
super().__init__()
if not isinstance(protocol, StreamProtocol):
@@ -134,7 +181,7 @@ def __init__(
def _value_or_default(value: float | None, default: float) -> float:
return value if value is not None else default
- self.__listeners_factory: Callable[[], Coroutine[Any, Any, Sequence[AbstractAsyncListenerSocketAdapter]]] | None
+ self.__listeners_factory: Callable[[], Coroutine[Any, Any, Sequence[AsyncListenerSocketAdapter]]] | None
if ssl:
self.__listeners_factory = _make_callback(
backend.create_ssl_over_tcp_listeners,
@@ -154,33 +201,39 @@ def _value_or_default(value: float | None, default: float) -> float:
backlog=backlog,
reuse_port=reuse_port,
)
- self.__listeners_factory_runner: SingleTaskRunner[Sequence[AbstractAsyncListenerSocketAdapter]] | None = None
-
- if service_actions_interval is None:
- service_actions_interval = 1.0
+ self.__listeners_factory_runner: SingleTaskRunner[Sequence[AsyncListenerSocketAdapter]] | None = None
- self.__service_actions_interval: float = max(service_actions_interval, 0)
- self.__backend: AbstractAsyncBackend = backend
- self.__listeners: tuple[AbstractAsyncListenerSocketAdapter, ...] | None = None
+ self.__backend: AsyncBackend = backend
+ self.__listeners: tuple[AsyncListenerSocketAdapter, ...] | None = None
self.__protocol: StreamProtocol[_ResponseT, _RequestT] = protocol
self.__request_handler: AsyncStreamRequestHandler[_RequestT, _ResponseT] = request_handler
self.__is_shutdown: IEvent = self.__backend.create_event()
self.__is_shutdown.set()
self.__shutdown_asked: bool = False
self.__max_recv_size: int = max_recv_size
- self.__listener_tasks: deque[AbstractTask[None]] = deque()
- self.__mainloop_task: AbstractTask[None] | None = None
- self.__logger: _logging.Logger = logger or _logging.getLogger(__name__)
+ self.__listener_tasks: deque[Task[None]] = deque()
+ self.__mainloop_task: Task[None] | None = None
+ self.__logger: logging.Logger = logger or logging.getLogger(__name__)
self.__client_connection_log_level: int
if log_client_connection:
- self.__client_connection_log_level = _logging.INFO
+ self.__client_connection_log_level = logging.INFO
else:
- self.__client_connection_log_level = _logging.DEBUG
+ self.__client_connection_log_level = logging.DEBUG
def is_serving(self) -> bool:
return self.__listeners is not None and all(not listener.is_closing() for listener in self.__listeners)
+ is_serving.__doc__ = AbstractAsyncNetworkServer.is_serving.__doc__
+
def stop_listening(self) -> None:
+ """
+ Schedules the shutdown of all listener sockets.
+
+ After that, all new connections will be refused, but the server will continue to run and handle
+ previously accepted connections.
+
+ Further calls to :meth:`is_serving` will return :data:`False`.
+ """
with _contextlib.ExitStack() as exit_stack:
for listener_task in self.__listener_tasks:
exit_stack.callback(listener_task.cancel)
@@ -191,12 +244,14 @@ async def server_close(self) -> None:
self.__listeners_factory = None
await self.__close_listeners()
+ server_close.__doc__ = AbstractAsyncNetworkServer.server_close.__doc__
+
async def __close_listeners(self) -> None:
async with _contextlib.AsyncExitStack() as exit_stack:
listeners, self.__listeners = self.__listeners, None
if listeners is not None:
- async def close_listener(listener: AbstractAsyncListenerSocketAdapter) -> None:
+ async def close_listener(listener: AsyncListenerSocketAdapter) -> None:
with _contextlib.suppress(OSError):
await listener.aclose()
@@ -224,15 +279,18 @@ async def shutdown(self) -> None:
finally:
self.__shutdown_asked = False
+ shutdown.__doc__ = AbstractAsyncNetworkServer.shutdown.__doc__
+
def __kill_listener_factory_runner(self) -> None:
if self.__listeners_factory_runner is not None:
self.__listeners_factory_runner.cancel()
async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) -> None:
async with _contextlib.AsyncExitStack() as server_exit_stack:
+ is_up_callback = server_exit_stack.enter_context(_contextlib.ExitStack())
if is_up_event is not None:
# Force is_up_event to be set, in order not to stuck the waiting task
- server_exit_stack.callback(is_up_event.set)
+ is_up_callback.callback(is_up_event.set)
# Wake up server
if not self.__is_shutdown.is_set():
@@ -262,10 +320,10 @@ async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) ->
################
# Initialize request handler
- self.__request_handler.set_async_backend(self.__backend)
- await self.__request_handler.service_init()
- server_exit_stack.push_async_callback(self.__request_handler.service_quit)
- self.__request_handler.set_stop_listening_callback(self.__make_stop_listening_callback())
+ await self.__request_handler.service_init(
+ await server_exit_stack.enter_async_context(_contextlib.AsyncExitStack()),
+ weakref.proxy(self),
+ )
############################
# Setup task group
@@ -282,9 +340,8 @@ async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) ->
#################
# Server is up
- if is_up_event is not None:
- is_up_event.set()
- task_group.start_soon(self.__service_actions_task)
+ is_up_callback.close()
+ del is_up_callback
##############
# Main loop
@@ -296,35 +353,15 @@ async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) ->
finally:
self.__mainloop_task = None
- def __make_stop_listening_callback(self) -> Callable[[], None]:
- selfref = weakref.ref(self)
+ serve_forever.__doc__ = AbstractAsyncNetworkServer.serve_forever.__doc__
- def stop_listening() -> None:
- self = selfref()
- return self.stop_listening() if self is not None else None
-
- return stop_listening
-
- async def __service_actions_task(self) -> None:
- request_handler = self.__request_handler
- backend = self.__backend
- service_actions_interval = self.__service_actions_interval
- if math.isinf(service_actions_interval):
- return
- while True:
- await backend.sleep(service_actions_interval)
- try:
- await request_handler.service_actions()
- except Exception:
- self.__logger.exception("Error occurred in request_handler.service_actions()")
-
- async def __listener_accept(self, listener: AbstractAsyncListenerSocketAdapter, task_group: AbstractTaskGroup) -> None:
+ async def __listener_accept(self, listener: AsyncListenerSocketAdapter, task_group: TaskGroup) -> None:
backend = self.__backend
client_task = self.__client_coroutine
async with listener:
while True:
try:
- client_socket: AbstractAcceptedSocket = await listener.accept()
+ client_socket: AcceptedSocket = await listener.accept()
except OSError as exc: # pragma: no cover # Not testable
if exc.errno in ACCEPT_CAPACITY_ERRNOS:
self.__logger.error(
@@ -342,12 +379,12 @@ async def __listener_accept(self, listener: AbstractAsyncListenerSocketAdapter,
del client_socket
await backend.coro_yield()
- async def __client_coroutine(self, accepted_socket: AbstractAcceptedSocket) -> None:
+ async def __client_coroutine(self, accepted_socket: AcceptedSocket) -> None:
async with _contextlib.AsyncExitStack() as client_exit_stack:
client_exit_stack.enter_context(self.__suppress_and_log_remaining_exception())
try:
- socket: AbstractAsyncStreamSocketAdapter = await accepted_socket.connect()
+ socket: AsyncStreamSocketAdapter = await accepted_socket.connect()
finally:
del accepted_socket
@@ -357,7 +394,7 @@ async def __client_coroutine(self, accepted_socket: AbstractAcceptedSocket) -> N
# tell the OS to immediately abort the connection when calling socket.socket.close()
client_exit_stack.callback(self.__set_socket_linger_if_not_closed, socket.socket())
- logger: _logging.Logger = self.__logger
+ logger: logging.Logger = self.__logger
backend = self.__backend
producer = StreamDataProducer(self.__protocol)
consumer = StreamDataConsumer(self.__protocol)
@@ -409,39 +446,9 @@ async def __client_coroutine(self, accepted_socket: AbstractAcceptedSocket) -> N
return
async for action in request_receiver:
try:
- match action:
- case _RequestAction(request):
- logger.debug("Processing request sent by %s", client.address)
- try:
- await request_handler_generator.asend(request)
- except StopAsyncIteration:
- request_handler_generator = None
- finally:
- del request
- case _ErrorAction(StreamProtocolParseError() as exception):
- logger.debug("Malformed request sent by %s", client.address)
- try:
- try:
- _recursively_clear_exception_traceback_frames(exception)
- except RecursionError:
- logger.warning("Recursion depth reached when clearing exception's traceback frames")
- should_close_handle = not (await self.__request_handler.bad_request(client, exception))
- if should_close_handle:
- try:
- await request_handler_generator.aclose()
- finally:
- request_handler_generator = None
- finally:
- del exception
- case _ErrorAction(exception):
- try:
- await request_handler_generator.athrow(exception)
- except StopAsyncIteration:
- request_handler_generator = None
- finally:
- del exception
- case _: # pragma: no cover
- assert_never(action)
+ await action.asend(request_handler_generator)
+ except StopAsyncIteration:
+ request_handler_generator = None
finally:
del action
await backend.cancel_shielded_coro_yield()
@@ -463,7 +470,7 @@ async def __new_request_handler(self, client: _ConnectedClientAPI[_ResponseT]) -
return None
return request_handler_generator
- async def __force_close_stream_socket(self, socket: AbstractAsyncStreamSocketAdapter) -> None:
+ async def __force_close_stream_socket(self, socket: AsyncStreamSocketAdapter) -> None:
with _contextlib.suppress(OSError):
await self.__backend.ignore_cancellation(socket.aclose())
@@ -488,7 +495,7 @@ def __suppress_and_log_remaining_exception(self, client_address: SocketAddress |
except* OSError as excgrp:
if self.__have_errno(excgrp, {_errno.ENOTCONN, _errno.EINVAL}):
# The remote host closed the connection before starting the task.
- # See this test for more information:
+ # See this test for details:
# test____serve_forever____accept_client____client_sent_RST_packet_right_after_accept
self.__logger.warning("A client connection was interrupted just after listener.accept()")
@@ -519,6 +526,13 @@ def __suppress_and_log_remaining_exception(self, client_address: SocketAddress |
self.__logger.error("-" * 40)
def get_addresses(self) -> Sequence[SocketAddress]:
+ """
+ Returns all interfaces to which the listeners are bound.
+
+ Returns:
+ A sequence of network socket address.
+ If the server is not serving (:meth:`is_serving` returns :data:`False`), an empty sequence is returned.
+ """
if (listeners := self.__listeners) is None:
return ()
return tuple(
@@ -527,17 +541,21 @@ def get_addresses(self) -> Sequence[SocketAddress]:
if not listener.is_closing()
)
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
return self.__backend
+ get_backend.__doc__ = AbstractAsyncNetworkServer.get_backend.__doc__
+
@property
def sockets(self) -> Sequence[SocketProxy]:
+ """The listeners sockets. Read-only attribute."""
if (listeners := self.__listeners) is None:
return ()
return tuple(SocketProxy(listener.socket()) for listener in listeners)
@property
- def logger(self) -> _logging.Logger:
+ def logger(self) -> logging.Logger:
+ """The server's logger."""
return self.__logger
@@ -547,26 +565,26 @@ class _RequestReceiver(Generic[_RequestT]):
def __init__(
self,
consumer: StreamDataConsumer[_RequestT],
- socket: AbstractAsyncStreamSocketAdapter,
+ socket: AsyncStreamSocketAdapter,
max_recv_size: int,
api: _ConnectedClientAPI[Any],
- logger: _logging.Logger,
+ logger: logging.Logger,
) -> None:
assert max_recv_size > 0, f"{max_recv_size=}" # nosec assert_used
self.__consumer: StreamDataConsumer[_RequestT] = consumer
- self.__socket: AbstractAsyncStreamSocketAdapter = socket
+ self.__socket: AsyncStreamSocketAdapter = socket
self.__max_recv_size: int = max_recv_size
self.__api: _ConnectedClientAPI[Any] = api
- self.__logger: _logging.Logger = logger
+ self.__logger: logging.Logger = logger
def __aiter__(self) -> AsyncIterator[_RequestAction[_RequestT] | _ErrorAction]:
return self
async def __anext__(self) -> _RequestAction[_RequestT] | _ErrorAction:
consumer: StreamDataConsumer[_RequestT] = self.__consumer
- socket: AbstractAsyncStreamSocketAdapter = self.__socket
+ socket: AsyncStreamSocketAdapter = self.__socket
client: _ConnectedClientAPI[Any] = self.__api
- logger: _logging.Logger = self.__logger
+ logger: logging.Logger = self.__logger
bufsize: int = self.__max_recv_size
try:
while not socket.is_closing():
@@ -603,19 +621,19 @@ class _ConnectedClientAPI(AsyncStreamClient[_ResponseT]):
def __init__(
self,
- backend: AbstractAsyncBackend,
- socket: AbstractAsyncStreamSocketAdapter,
+ backend: AsyncBackend,
+ socket: AsyncStreamSocketAdapter,
producer: StreamDataProducer[_ResponseT],
- logger: _logging.Logger,
+ logger: logging.Logger,
) -> None:
super().__init__(new_socket_address(socket.get_remote_address(), socket.socket().family))
- self.__socket: AbstractAsyncStreamSocketAdapter = socket
+ self.__socket: AsyncStreamSocketAdapter = socket
self.__closed: bool = False
self.__producer: StreamDataProducer[_ResponseT] = producer
self.__send_lock = backend.create_lock()
self.__proxy: SocketProxy = SocketProxy(socket.socket())
- self.__logger: _logging.Logger = logger
+ self.__logger: logging.Logger = logger
def is_closing(self) -> bool:
return self.__closed or self.__socket.is_closing()
@@ -651,15 +669,15 @@ async def send_packet(self, packet: _ResponseT, /) -> None:
_check_real_socket_state(self.socket)
self.__logger.debug("Data sent to %s", self.address)
- def __check_closed(self) -> AbstractAsyncStreamSocketAdapter:
+ def __check_closed(self) -> AsyncStreamSocketAdapter:
socket = self.__socket
if self.__closed:
raise ClientClosedError("Closed client")
return socket
@staticmethod
- async def __shutdown_socket(socket: AbstractAsyncStreamSocketAdapter) -> None:
- if not isinstance(socket, AbstractAsyncHalfCloseableStreamSocketAdapter):
+ async def __shutdown_socket(socket: AsyncStreamSocketAdapter) -> None:
+ if not isinstance(socket, AsyncHalfCloseableStreamSocketAdapter):
return
with _contextlib.suppress(OSError):
if not socket.is_closing():
diff --git a/src/easynetwork/api_async/server/udp.py b/src/easynetwork/api_async/server/udp.py
index ea996025..2712e961 100644
--- a/src/easynetwork/api_async/server/udp.py
+++ b/src/easynetwork/api_async/server/udp.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -9,21 +20,20 @@
import contextlib as _contextlib
import contextvars
-import logging as _logging
-import math
+import logging
import operator
import weakref
from collections import Counter, deque
from collections.abc import AsyncGenerator, AsyncIterator, Callable, Coroutine, Iterator, Mapping
-from typing import TYPE_CHECKING, Any, Generic, TypeVar, assert_never, final
+from typing import TYPE_CHECKING, Any, Generic, TypeVar, final
from weakref import WeakValueDictionary
+from ..._typevars import _RequestT, _ResponseT
from ...exceptions import ClientClosedError, DatagramProtocolParseError, ServerAlreadyRunning, ServerClosedError
from ...protocol import DatagramProtocol
from ...tools._utils import (
check_real_socket_state as _check_real_socket_state,
make_callback as _make_callback,
- recursively_clear_exception_traceback_frames as _recursively_clear_exception_traceback_frames,
remove_traceback_frames_in_place as _remove_traceback_frames_in_place,
)
from ...tools.constants import MAX_DATAGRAM_BUFSIZE
@@ -35,25 +45,17 @@
from .handler import AsyncDatagramClient, AsyncDatagramRequestHandler
if TYPE_CHECKING:
- from ..backend.abc import (
- AbstractAsyncBackend,
- AbstractAsyncDatagramSocketAdapter,
- AbstractTask,
- AbstractTaskGroup,
- ICondition,
- IEvent,
- ILock,
- )
-
-
-_RequestT = TypeVar("_RequestT")
-_ResponseT = TypeVar("_ResponseT")
+ from ..backend.abc import AsyncBackend, AsyncDatagramSocketAdapter, ICondition, IEvent, ILock, Task, TaskGroup
_KT = TypeVar("_KT")
_VT = TypeVar("_VT")
class AsyncUDPNetworkServer(AbstractAsyncNetworkServer, Generic[_RequestT, _ResponseT]):
+ """
+ An asynchronous network server for UDP communication.
+ """
+
__slots__ = (
"__backend",
"__socket",
@@ -68,7 +70,6 @@ class AsyncUDPNetworkServer(AbstractAsyncNetworkServer, Generic[_RequestT, _Resp
"__clients_waiting_for_new_datagrams",
"__client_task_running",
"__mainloop_task",
- "__service_actions_interval",
"__logger",
)
@@ -80,11 +81,30 @@ def __init__(
request_handler: AsyncDatagramRequestHandler[_RequestT, _ResponseT],
*,
reuse_port: bool = False,
- backend: str | AbstractAsyncBackend | None = None,
+ logger: logging.Logger | None = None,
+ backend: str | AsyncBackend | None = None,
backend_kwargs: Mapping[str, Any] | None = None,
- service_actions_interval: float | None = None,
- logger: _logging.Logger | None = None,
) -> None:
+ """
+ Parameters:
+ host: specify which network interface to which the server should bind.
+ port: specify which port the server should listen on. If the value is ``0``, a random unused port will be selected
+ (note that if `host` resolves to multiple network interfaces, a different random port will be selected
+ for each interface).
+ protocol: The :term:`protocol object` to use.
+ request_handler: The request handler to use.
+
+ Keyword Arguments:
+ reuse_port: tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints
+ are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows.
+ logger: If given, the logger instance to use.
+
+ Backend Parameters:
+ backend: the backend to use. Automatically determined otherwise.
+ backend_kwargs: Keyword arguments for backend instanciation.
+ Ignored if `backend` is already an :class:`.AsyncBackend` instance.
+ """
super().__init__()
if not isinstance(protocol, DatagramProtocol):
@@ -94,29 +114,25 @@ def __init__(
backend = AsyncBackendFactory.ensure(backend, backend_kwargs)
- self.__socket_factory: Callable[[], Coroutine[Any, Any, AbstractAsyncDatagramSocketAdapter]] | None
+ self.__socket_factory: Callable[[], Coroutine[Any, Any, AsyncDatagramSocketAdapter]] | None
self.__socket_factory = _make_callback(
backend.create_udp_endpoint,
local_address=(host, port),
remote_address=None,
reuse_port=reuse_port,
)
- self.__socket_factory_runner: SingleTaskRunner[AbstractAsyncDatagramSocketAdapter] | None = None
+ self.__socket_factory_runner: SingleTaskRunner[AsyncDatagramSocketAdapter] | None = None
- if service_actions_interval is None:
- service_actions_interval = 1.0
-
- self.__service_actions_interval: float = max(service_actions_interval, 0)
- self.__backend: AbstractAsyncBackend = backend
- self.__socket: AbstractAsyncDatagramSocketAdapter | None = None
+ self.__backend: AsyncBackend = backend
+ self.__socket: AsyncDatagramSocketAdapter | None = None
self.__protocol: DatagramProtocol[_ResponseT, _RequestT] = protocol
self.__request_handler: AsyncDatagramRequestHandler[_RequestT, _ResponseT] = request_handler
self.__is_shutdown: IEvent = self.__backend.create_event()
self.__is_shutdown.set()
self.__shutdown_asked: bool = False
self.__sendto_lock: ILock = backend.create_lock()
- self.__mainloop_task: AbstractTask[None] | None = None
- self.__logger: _logging.Logger = logger or _logging.getLogger(__name__)
+ self.__mainloop_task: Task[None] | None = None
+ self.__logger: logging.Logger = logger or logging.getLogger(__name__)
self.__client_manager: _ClientAPIManager[_ResponseT] = _ClientAPIManager(
self.__backend,
self.__protocol,
@@ -129,17 +145,21 @@ def __init__(
def is_serving(self) -> bool:
return (socket := self.__socket) is not None and not socket.is_closing()
+ is_serving.__doc__ = AbstractAsyncNetworkServer.is_serving.__doc__
+
async def server_close(self) -> None:
self.__kill_socket_factory_runner()
self.__socket_factory = None
await self.__close_socket()
+ server_close.__doc__ = AbstractAsyncNetworkServer.server_close.__doc__
+
async def __close_socket(self) -> None:
async with _contextlib.AsyncExitStack() as exit_stack:
socket, self.__socket = self.__socket, None
if socket is not None:
- async def close_socket(socket: AbstractAsyncDatagramSocketAdapter) -> None:
+ async def close_socket(socket: AsyncDatagramSocketAdapter) -> None:
with _contextlib.suppress(OSError):
await socket.aclose()
@@ -162,15 +182,18 @@ async def shutdown(self) -> None:
finally:
self.__shutdown_asked = False
+ shutdown.__doc__ = AbstractAsyncNetworkServer.shutdown.__doc__
+
def __kill_socket_factory_runner(self) -> None:
if self.__socket_factory_runner is not None:
self.__socket_factory_runner.cancel()
async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) -> None:
async with _contextlib.AsyncExitStack() as server_exit_stack:
+ is_up_callback = server_exit_stack.enter_context(_contextlib.ExitStack())
if is_up_event is not None:
# Force is_up_event to be set, in order not to stuck the waiting task
- server_exit_stack.callback(is_up_event.set)
+ is_up_callback.callback(is_up_event.set)
# Wake up server
if not self.__is_shutdown.is_set():
@@ -197,15 +220,16 @@ async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) ->
################
# Initialize request handler
- self.__request_handler.set_async_backend(self.__backend)
- await self.__request_handler.service_init()
server_exit_stack.callback(self.__client_manager.clear)
- server_exit_stack.push_async_callback(self.__request_handler.service_quit)
+ await self.__request_handler.service_init(
+ await server_exit_stack.enter_async_context(_contextlib.AsyncExitStack()),
+ weakref.proxy(self),
+ )
server_exit_stack.push_async_callback(self.__close_socket)
############################
# Setup task group
- task_group: AbstractTaskGroup = await server_exit_stack.enter_async_context(self.__backend.create_task_group())
+ task_group: TaskGroup = await server_exit_stack.enter_async_context(self.__backend.create_task_group())
server_exit_stack.callback(self.__logger.info, "Server loop break, waiting for remaining tasks...")
##################
@@ -214,9 +238,8 @@ async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) ->
#################
# Server is up
- if is_up_event is not None:
- is_up_event.set()
- task_group.start_soon(self.__service_actions_task)
+ is_up_callback.close()
+ del is_up_callback
##############
# Main loop
@@ -228,15 +251,17 @@ async def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) ->
finally:
self.__mainloop_task = None
+ serve_forever.__doc__ = AbstractAsyncNetworkServer.serve_forever.__doc__
+
async def __receive_datagrams(
self,
- socket: AbstractAsyncDatagramSocketAdapter,
- task_group: AbstractTaskGroup,
+ socket: AsyncDatagramSocketAdapter,
+ task_group: TaskGroup,
) -> None:
backend = self.__backend
socket_family: int = socket.socket().family
datagram_received_task_method = self.__datagram_received_coroutine
- logger: _logging.Logger = self.__logger
+ logger: logging.Logger = self.__logger
bufsize: int = MAX_DATAGRAM_BUFSIZE
client_manager: _ClientAPIManager[_ResponseT] = self.__client_manager
client_task_running_set: set[_ClientAPI[_ResponseT]] = self.__client_task_running
@@ -264,23 +289,10 @@ async def __receive_datagrams(
await backend.coro_yield()
- async def __service_actions_task(self) -> None:
- request_handler = self.__request_handler
- backend = self.__backend
- service_actions_interval = self.__service_actions_interval
- if math.isinf(service_actions_interval):
- return
- while True:
- await backend.sleep(service_actions_interval)
- try:
- await request_handler.service_actions()
- except Exception:
- self.__logger.exception("Error occurred in request_handler.service_actions()")
-
async def __datagram_received_coroutine(
self,
client: _ClientAPI[_ResponseT],
- task_group: AbstractTaskGroup,
+ task_group: TaskGroup,
) -> None:
backend = self.__backend
@@ -289,7 +301,6 @@ async def __datagram_received_coroutine(
condition.notify()
return
request_handler_generator: AsyncGenerator[None, _RequestT] | None = None
- logger: _logging.Logger = self.__logger
with (
self.__suppress_and_log_remaining_exception(client.address),
self.__client_manager.datagram_queue(client) as datagram_queue,
@@ -320,56 +331,24 @@ async def __datagram_received_coroutine(
datagram = datagram_queue.popleft()
try:
action = _RequestAction(self.__protocol.build_packet_from_datagram(datagram))
+ except DatagramProtocolParseError as exc:
+ exc.sender_address = client.address
+ raise
finally:
del datagram
except BaseException as exc:
action = _ErrorAction(exc)
try:
- match action:
- case _RequestAction(request):
- logger.debug("Processing request sent by %s", client.address)
- try:
- await request_handler_generator.asend(request)
- except StopAsyncIteration:
- request_handler_generator = None
- return
- finally:
- del request
- case _ErrorAction(DatagramProtocolParseError() as exception):
- exception.sender_address = client.address
- logger.debug("Malformed request sent by %s", client.address)
- try:
- try:
- _recursively_clear_exception_traceback_frames(exception)
- except RecursionError:
- logger.warning("Recursion depth reached when clearing exception's traceback frames")
- should_close_handle = not (await self.__request_handler.bad_request(client, exception))
- if should_close_handle:
- try:
- await request_handler_generator.aclose()
- finally:
- request_handler_generator = None
- return
- finally:
- del exception
- case _ErrorAction(exception):
- try:
- await request_handler_generator.athrow(exception)
- except StopAsyncIteration:
- request_handler_generator = None
- return
- finally:
- del exception
- case _: # pragma: no cover
- assert_never(action)
+ await action.asend(request_handler_generator)
+ except StopAsyncIteration:
+ return
finally:
del action
await backend.cancel_shielded_coro_yield()
finally:
- if request_handler_generator is not None:
- await request_handler_generator.aclose()
+ await request_handler_generator.aclose()
async def __new_request_handler(self, client: _ClientAPI[_ResponseT]) -> AsyncGenerator[None, _RequestT] | None:
request_handler_generator = self.__request_handler.handle(client)
@@ -412,17 +391,19 @@ def __enqueue_task_at_end(
self,
client: _ClientAPI[_ResponseT],
datagram_queue: deque[bytes],
- task_group: AbstractTaskGroup,
+ task_group: TaskGroup,
) -> Iterator[None]:
default_context: contextvars.Context = contextvars.copy_context()
try:
yield
finally:
if datagram_queue:
- try:
- task_group.start_soon_with_context(default_context, self.__datagram_received_coroutine, client, task_group)
- except NotImplementedError:
- default_context.run(task_group.start_soon, self.__datagram_received_coroutine, client, task_group) # type: ignore[arg-type]
+ task_group.start_soon(
+ self.__datagram_received_coroutine,
+ client,
+ task_group,
+ context=default_context,
+ )
@_contextlib.contextmanager
def __client_task_running_context(self, client: _ClientAPI[_ResponseT]) -> Iterator[None]:
@@ -435,21 +416,32 @@ def __client_task_running_context(self, client: _ClientAPI[_ResponseT]) -> Itera
del client
def get_address(self) -> SocketAddress | None:
+ """
+ Returns the interface to which the datagram socket is bound.
+
+ Returns:
+ A network socket address.
+ If the server is not serving (:meth:`is_serving` returns :data:`False`), :data:`None` is returned.
+ """
if (socket := self.__socket) is None or socket.is_closing():
return None
return new_socket_address(socket.get_local_address(), socket.socket().family)
- def get_backend(self) -> AbstractAsyncBackend:
+ def get_backend(self) -> AsyncBackend:
return self.__backend
+ get_backend.__doc__ = AbstractAsyncNetworkServer.get_backend.__doc__
+
@property
def socket(self) -> SocketProxy | None:
+ """The server socket. Read-only attribute."""
if (socket := self.__socket) is None:
return None
return SocketProxy(socket.socket())
@property
- def logger(self) -> _logging.Logger:
+ def logger(self) -> logging.Logger:
+ """The server's logger."""
return self.__logger
@@ -466,18 +458,18 @@ class _ClientAPIManager(Generic[_ResponseT]):
def __init__(
self,
- backend: AbstractAsyncBackend,
+ backend: AsyncBackend,
protocol: DatagramProtocol[_ResponseT, Any],
send_lock: ILock,
- logger: _logging.Logger,
+ logger: logging.Logger,
) -> None:
super().__init__()
- self.__clients: WeakValueDictionary[tuple[AbstractAsyncDatagramSocketAdapter, SocketAddress], _ClientAPI[_ResponseT]]
+ self.__clients: WeakValueDictionary[tuple[AsyncDatagramSocketAdapter, SocketAddress], _ClientAPI[_ResponseT]]
self.__clients = WeakValueDictionary()
self.__protocol: DatagramProtocol[_ResponseT, Any] = protocol
self.__send_lock: ILock = send_lock
- self.__logger: _logging.Logger = logger
+ self.__logger: logging.Logger = logger
self.__client_lock: _TemporaryValue[_ClientAPI[_ResponseT], ICondition] = _TemporaryValue(backend.create_condition_var)
self.__client_queue: _TemporaryValue[_ClientAPI[_ResponseT], deque[bytes]] = _TemporaryValue(
deque,
@@ -487,7 +479,7 @@ def __init__(
def clear(self) -> None:
self.__clients.clear()
- def get(self, socket: AbstractAsyncDatagramSocketAdapter, address: SocketAddress) -> _ClientAPI[_ResponseT]:
+ def get(self, socket: AsyncDatagramSocketAdapter, address: SocketAddress) -> _ClientAPI[_ResponseT]:
key = (socket, address)
try:
return self.__clients[key]
@@ -555,19 +547,19 @@ class _ClientAPI(AsyncDatagramClient[_ResponseT]):
def __init__(
self,
address: SocketAddress,
- socket: AbstractAsyncDatagramSocketAdapter,
+ socket: AsyncDatagramSocketAdapter,
protocol: DatagramProtocol[_ResponseT, Any],
send_lock: ILock,
- logger: _logging.Logger,
+ logger: logging.Logger,
) -> None:
super().__init__(address)
- self.__socket_ref: weakref.ref[AbstractAsyncDatagramSocketAdapter] = weakref.ref(socket)
+ self.__socket_ref: weakref.ref[AsyncDatagramSocketAdapter] = weakref.ref(socket)
self.__socket_proxy: SocketProxy = SocketProxy(socket.socket())
self.__h: int | None = None
self.__protocol: DatagramProtocol[_ResponseT, Any] = protocol
self.__send_lock: ILock = send_lock
- self.__logger: _logging.Logger = logger
+ self.__logger: logging.Logger = logger
def __hash__(self) -> int:
if (h := self.__h) is None:
@@ -597,7 +589,7 @@ async def send_packet(self, packet: _ResponseT, /) -> None:
finally:
del datagram
- def __check_closed(self) -> AbstractAsyncDatagramSocketAdapter:
+ def __check_closed(self) -> AsyncDatagramSocketAdapter:
socket = self.__socket_ref()
if socket is None or socket.is_closing():
raise ClientClosedError("Closed client")
diff --git a/src/easynetwork/api_sync/__init__.py b/src/easynetwork/api_sync/__init__.py
index c8de5810..6c618271 100644
--- a/src/easynetwork/api_sync/__init__.py
+++ b/src/easynetwork/api_sync/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""
diff --git a/src/easynetwork/api_sync/client/__init__.py b/src/easynetwork/api_sync/client/__init__.py
index a59ec0bf..cfcbea9f 100644
--- a/src/easynetwork/api_sync/client/__init__.py
+++ b/src/easynetwork/api_sync/client/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Network client module"""
diff --git a/src/easynetwork/api_sync/client/abc.py b/src/easynetwork/api_sync/client/abc.py
index 1b4f4fc2..3e107b6b 100644
--- a/src/easynetwork/api_sync/client/abc.py
+++ b/src/easynetwork/api_sync/client/abc.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Network client module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Network client interfaces definition module"""
from __future__ import annotations
@@ -10,24 +21,29 @@
import time
from abc import ABCMeta, abstractmethod
from collections.abc import Iterator
-from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar
+from typing import TYPE_CHECKING, Any, Generic, Self
+from ..._typevars import _ReceivedPacketT, _SentPacketT
from ...tools.socket import SocketAddress
if TYPE_CHECKING:
from types import TracebackType
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
-
class AbstractNetworkClient(Generic[_SentPacketT, _ReceivedPacketT], metaclass=ABCMeta):
+ """
+ The base class for a network client interface.
+ """
+
__slots__ = ("__weakref__",)
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None) -> None:
+ """
+ Calls :meth:`close`.
+ """
self.close()
def __getstate__(self) -> Any: # pragma: no cover
@@ -35,29 +51,134 @@ def __getstate__(self) -> Any: # pragma: no cover
@abstractmethod
def is_closed(self) -> bool:
+ """
+ Checks if the client is in a closed state.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ See Also:
+ :meth:`close` method.
+
+ Returns:
+ the client state.
+ """
raise NotImplementedError
@abstractmethod
def close(self) -> None:
+ """
+ Close the client.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Can be safely called multiple times.
+ """
raise NotImplementedError
@abstractmethod
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's local address.
+ """
raise NotImplementedError
@abstractmethod
def get_remote_address(self) -> SocketAddress:
+ """
+ Returns the remote socket IP address.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's remote address.
+ """
raise NotImplementedError
@abstractmethod
def send_packet(self, packet: _SentPacketT, *, timeout: float | None = ...) -> None:
+ """
+ Sends `packet` to the remote endpoint.
+
+ If `timeout` is not :data:`None`, the entire send operation will take at most `timeout` seconds.
+
+ Warning:
+ A timeout on a send operation is unusual unless the implementation is using a lower-level
+ communication protocol (such as SSL/TLS).
+
+ In the case of a timeout, it is impossible to know if all the packet data has been sent.
+ This would leave the connection in an inconsistent state.
+
+ Parameters:
+ packet: the Python object to send.
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ TimeoutError: the send operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
raise NotImplementedError
@abstractmethod
def recv_packet(self, *, timeout: float | None = ...) -> _ReceivedPacketT:
+ """
+ Waits for a new packet to arrive from the remote endpoint.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ TimeoutError: the receive operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ BaseProtocolParseError: invalid data received.
+
+ Returns:
+ the received packet.
+ """
raise NotImplementedError
def iter_received_packets(self, *, timeout: float | None = 0) -> Iterator[_ReceivedPacketT]:
+ """
+ Returns an :term:`iterator` that waits for a new packet to arrive from the remote endpoint.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds; it defaults to zero.
+
+ Important:
+ The `timeout` is for the entire iterator::
+
+ iterator = client.iter_received_packets(timeout=10)
+
+ # Let's say that this call took 6 seconds...
+ first_packet = next(iterator)
+
+ # ...then this call has a maximum of 4 seconds, not 10.
+ second_packet = next(iterator)
+
+ The time taken outside the iterator object is not decremented to the timeout parameter.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for all the receive operations.
+
+ Yields:
+ the received packet.
+ """
perf_counter = time.perf_counter
while True:
@@ -70,7 +191,14 @@ def iter_received_packets(self, *, timeout: float | None = 0) -> Iterator[_Recei
yield packet
if timeout is not None:
timeout -= _end - _start
+ timeout = max(timeout, 0)
@abstractmethod
def fileno(self) -> int:
+ """
+ Returns the socket's file descriptor, or ``-1`` if client (or socket) is closed.
+
+ Returns:
+ the opened file descriptor.
+ """
raise NotImplementedError
diff --git a/src/easynetwork/api_sync/client/tcp.py b/src/easynetwork/api_sync/client/tcp.py
index 5c92a5ef..ab4b0fb9 100644
--- a/src/easynetwork/api_sync/client/tcp.py
+++ b/src/easynetwork/api_sync/client/tcp.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Network client module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""TCP Network client implementation module"""
from __future__ import annotations
@@ -14,7 +25,7 @@
import threading
import time
from collections.abc import Callable, Iterator
-from typing import TYPE_CHECKING, Any, Generic, NoReturn, TypeGuard, TypeVar, cast, final, overload
+from typing import TYPE_CHECKING, Any, NoReturn, TypeGuard, final, overload
try:
import ssl
@@ -24,6 +35,7 @@
_ssl_module = ssl
del ssl
+from ..._typevars import _ReceivedPacketT, _SentPacketT
from ...exceptions import ClientClosedError
from ...protocol import StreamProtocol
from ...tools._lock import ForkSafeLock
@@ -45,13 +57,14 @@
from .abc import AbstractNetworkClient
if TYPE_CHECKING:
- from ssl import SSLContext as _SSLContext, SSLSocket as _SSLSocket
+ import ssl as _typing_ssl
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
+class TCPNetworkClient(AbstractNetworkClient[_SentPacketT, _ReceivedPacketT]):
+ """
+ A network client interface for TCP connections.
+ """
-class TCPNetworkClient(AbstractNetworkClient[_SentPacketT, _ReceivedPacketT], Generic[_SentPacketT, _ReceivedPacketT]):
__slots__ = (
"__socket",
"__over_ssl",
@@ -80,7 +93,7 @@ def __init__(
*,
connect_timeout: float | None = ...,
local_address: tuple[str, int] | None = ...,
- ssl: _SSLContext | bool | None = ...,
+ ssl: _typing_ssl.SSLContext | bool | None = ...,
server_hostname: str | None = ...,
ssl_handshake_timeout: float | None = ...,
ssl_shutdown_timeout: float | None = ...,
@@ -97,7 +110,7 @@ def __init__(
/,
protocol: StreamProtocol[_SentPacketT, _ReceivedPacketT],
*,
- ssl: _SSLContext | bool | None = ...,
+ ssl: _typing_ssl.SSLContext | bool | None = ...,
server_hostname: str | None = ...,
ssl_handshake_timeout: float | None = ...,
ssl_shutdown_timeout: float | None = ...,
@@ -113,7 +126,7 @@ def __init__(
/,
protocol: StreamProtocol[_SentPacketT, _ReceivedPacketT],
*,
- ssl: _SSLContext | bool | None = None,
+ ssl: _typing_ssl.SSLContext | bool | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
@@ -122,6 +135,41 @@ def __init__(
retry_interval: float = 1.0,
**kwargs: Any,
) -> None:
+ """
+ Common Parameters:
+ protocol: The :term:`protocol object` to use.
+
+ Connection Parameters:
+ address: A pair of ``(host, port)`` for connection.
+ connect_timeout: The connection timeout (in seconds).
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+
+ Socket Parameters:
+ socket: An already connected TCP :class:`socket.socket`. If `socket` is given,
+ none of `connect_timeout` and `local_address` should be specified.
+
+ Keyword Arguments:
+ ssl: If given and not false, a SSL/TLS transport is created (by default a plain TCP transport is created).
+ If ssl is a :class:`ssl.SSLContext` object, this context is used to create the transport;
+ if ssl is :data:`True`, a default context returned from :func:`ssl.create_default_context` is used.
+ server_hostname: sets or overrides the hostname that the target server's certificate will be matched against.
+ Should only be passed if `ssl` is not :data:`None`. By default the value of the host in `address`
+ argument is used. If `socket` is provided instead, there is no default and you must pass a value
+ for `server_hostname`. If `server_hostname` is an empty string, hostname matching is disabled
+ (which is a serious security risk, allowing for potential man-in-the-middle attacks).
+ ssl_handshake_timeout: (for a TLS connection) the time in seconds to wait for the TLS handshake to complete
+ before aborting the connection. ``60.0`` seconds if :data:`None` (default).
+ ssl_shutdown_timeout: the time in seconds to wait for the SSL shutdown to complete before aborting the connection.
+ ``30.0`` seconds if :data:`None` (default).
+ ssl_shared_lock: If :data:`True` (the default), :meth:`send_packet` and :meth:`recv_packet` uses
+ the same lock instance.
+ max_recv_size: Read buffer size. If not given, a default reasonable value is used.
+ retry_interval: The maximum wait time to wait for a blocking operation before retrying.
+ Set it to :data:`math.inf` to disable this feature.
+
+ See Also:
+ :ref:`SSL/TLS security considerations `
+ """
self.__socket: _socket.socket | None = None # If any exception occurs, the client will already be in a closed state
super().__init__()
@@ -206,9 +254,9 @@ def __init__(
if _ssl_module is None:
raise RuntimeError("stdlib ssl module not available")
- ssl_context: _SSLContext
+ ssl_context: _typing_ssl.SSLContext
if isinstance(ssl, bool):
- ssl_context = cast("_SSLContext", _ssl_module.create_default_context())
+ ssl_context = _ssl_module.create_default_context()
if not server_hostname:
ssl_context.check_hostname = False
if hasattr(_ssl_module, "OP_IGNORE_UNEXPECTED_EOF"):
@@ -273,12 +321,30 @@ def __repr__(self) -> str:
return f"<{type(self).__name__} closed>"
return f"<{type(self).__name__} socket={socket!r}>"
- @final
def is_closed(self) -> bool:
+ """
+ Checks if the client is in a closed state. Thread-safe.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ Returns:
+ the client state.
+ """
with self.__socket_lock.get():
return self.__socket is None
def close(self) -> None:
+ """
+ Close the client. Thread-safe.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Can be safely called multiple times.
+
+ Raises:
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
with self.__send_lock.get(), self.__socket_lock.get():
self.__last_ssl_eof_error = None
if (socket := self.__socket) is None:
@@ -303,6 +369,34 @@ def close(self) -> None:
socket.close()
def send_packet(self, packet: _SentPacketT, *, timeout: float | None = None) -> None:
+ """
+ Sends `packet` to the remote endpoint. Thread-safe.
+
+ If `timeout` is not :data:`None`, the entire send operation will take at most `timeout` seconds.
+
+ Warning:
+ A timeout on a send operation is unusual unless you have a SSL/TLS context.
+
+ In the case of a timeout, it is impossible to know if all the packet data has been sent.
+ This would leave the connection in an inconsistent state.
+
+ Important:
+ The lock acquisition time is included in the `timeout`.
+
+ This means that you may get a :exc:`TimeoutError` because it took too long to get the lock.
+
+ Parameters:
+ packet: the Python object to send.
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ TimeoutError: the send operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ RuntimeError: :meth:`send_eof` has been called earlier.
+ """
with _lock_with_timeout(self.__send_lock.get(), timeout, error_message="send_packet() timed out") as timeout:
socket = self.__ensure_connected()
if self.__eof_sent:
@@ -337,9 +431,18 @@ def send_packet(self, packet: _SentPacketT, *, timeout: float | None = None) ->
del buffer, data
def send_eof(self) -> None:
+ """
+ Close the write end of the stream after the buffered write data is flushed. Thread-safe.
+
+ Can be safely called multiple times.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
if self.__over_ssl:
# ssl.SSLSocket.shutdown() would shutdown both read and write streams
- raise NotImplementedError
+ raise NotImplementedError("SSL/TLS API does not support sending EOF.")
with self.__send_lock.get(), self.__socket_lock.get():
if self.__eof_sent:
@@ -349,6 +452,30 @@ def send_eof(self) -> None:
socket.shutdown(_socket.SHUT_WR)
def recv_packet(self, *, timeout: float | None = None) -> _ReceivedPacketT:
+ """
+ Waits for a new packet to arrive from the remote endpoint. Thread-safe.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds.
+
+ Important:
+ The lock acquisition time is included in the `timeout`.
+
+ This means that you may get a :exc:`TimeoutError` because it took too long to get the lock.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ ConnectionError: connection unexpectedly closed during operation.
+ You should not attempt any further operation and close the client object.
+ TimeoutError: the receive operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ StreamProtocolParseError: invalid data received.
+
+ Returns:
+ the received packet.
+ """
with _lock_with_timeout(self.__receive_lock.get(), timeout, error_message="recv_packet() timed out") as timeout:
consumer = self.__consumer
try:
@@ -442,18 +569,44 @@ def __abort_eof(self) -> NoReturn:
self.__abort(None)
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address. Thread-safe.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's local address.
+ """
return self.__addr
def get_remote_address(self) -> SocketAddress:
+ """
+ Returns the remote socket IP address. Thread-safe.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's remote address.
+ """
return self.__peer
def fileno(self) -> int:
+ """
+ Returns the socket's file descriptor, or ``-1`` if the client (or the socket) is closed. Thread-safe.
+
+ Returns:
+ the opened file descriptor.
+ """
with self.__socket_lock.get():
if (socket := self.__socket) is None:
return -1
return socket.fileno()
- def __is_ssl_socket(self, socket: _socket.socket) -> TypeGuard[_SSLSocket]:
+ def __is_ssl_socket(self, socket: _socket.socket) -> TypeGuard[_typing_ssl.SSLSocket]:
# Optimization: Instead of always do a isinstance(), do it once then use the TypeGuard to cast the socket type
# for static type checkers
return self.__over_ssl
@@ -461,9 +614,11 @@ def __is_ssl_socket(self, socket: _socket.socket) -> TypeGuard[_SSLSocket]:
@property
@final
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute."""
return self.__socket_proxy
@property
@final
def max_recv_size(self) -> int:
+ """Read buffer size. Read-only attribute."""
return self.__max_recv_size
diff --git a/src/easynetwork/api_sync/client/udp.py b/src/easynetwork/api_sync/client/udp.py
index 8cd30d1d..73d3a71e 100644
--- a/src/easynetwork/api_sync/client/udp.py
+++ b/src/easynetwork/api_sync/client/udp.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Network client module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""UDP Network client implementation module"""
from __future__ import annotations
@@ -15,8 +26,9 @@
import time
from collections.abc import Iterator
from operator import itemgetter as _itemgetter
-from typing import TYPE_CHECKING, Any, Generic, Self, TypeVar, final, overload
+from typing import TYPE_CHECKING, Any, Generic, Self, final, overload
+from ..._typevars import _ReceivedPacketT, _SentPacketT
from ...exceptions import ClientClosedError, DatagramProtocolParseError
from ...protocol import DatagramProtocol
from ...tools._lock import ForkSafeLock
@@ -38,11 +50,10 @@
if TYPE_CHECKING:
from types import TracebackType
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
-
class UDPNetworkEndpoint(Generic[_SentPacketT, _ReceivedPacketT]):
+ """Generic UDP endpoint interface."""
+
__slots__ = (
"__socket",
"__socket_proxy",
@@ -59,7 +70,6 @@ class UDPNetworkEndpoint(Generic[_SentPacketT, _ReceivedPacketT]):
@overload
def __init__(
self,
- /,
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
*,
local_address: tuple[str, int] | None = ...,
@@ -72,7 +82,6 @@ def __init__(
@overload
def __init__(
self,
- /,
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
*,
socket: _socket.socket,
@@ -82,11 +91,31 @@ def __init__(
def __init__(
self,
- /,
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
+ *,
retry_interval: float = 1.0,
**kwargs: Any,
) -> None:
+ """
+ Common Parameters:
+ protocol: The :term:`protocol object` to use.
+
+ Connection Parameters:
+ remote_address: If given, is a ``(host, port)`` tuple used to connect the socket.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ reuse_port: Tells the kernel to allow this endpoint to be bound to the same port as other existing
+ endpoints are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows and some Unixes.
+ If the SO_REUSEPORT constant is not defined then this capability is unsupported.
+
+ Socket Parameters:
+ socket: An already connected UDP :class:`socket.socket`. If `socket` is given,
+ none of and `local_address`, `remote_address` and `reuse_port` should be specified.
+
+ Keyword Arguments:
+ retry_interval: The maximum wait time to wait for a blocking operation before retrying.
+ Set it to :data:`math.inf` to disable this feature.
+ """
self.__socket: _socket.socket | None = None # If any exception occurs, the client will already be in a closed state
super().__init__()
@@ -161,17 +190,37 @@ def __enter__(self) -> Self:
return self
def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None) -> None:
+ """
+ Calls :meth:`close`.
+ """
self.close()
def __getstate__(self) -> Any: # pragma: no cover
raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
- @final
def is_closed(self) -> bool:
+ """
+ Checks if the endpoint is in a closed state. Thread-safe.
+
+ If :data:`True`, all future operations on the endpoint object will raise a :exc:`.ClientClosedError`.
+
+ Returns:
+ the endpoint state.
+ """
with self.__socket_lock.get():
return self.__socket is None
def close(self) -> None:
+ """
+ Close the endpoint. Thread-safe.
+
+ Once that happens, all future operations on the endpoint object will raise a :exc:`.ClientClosedError`.
+
+ Can be safely called multiple times.
+
+ Raises:
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
with self.__send_lock.get(), self.__socket_lock.get():
if (socket := self.__socket) is None:
return
@@ -185,6 +234,34 @@ def send_packet_to(
*,
timeout: float | None = None,
) -> None:
+ """
+ Sends `packet` to the remote endpoint `address`. Thread-safe.
+
+ If a remote address is configured, `address` must be :data:`None` or the same as the remote address,
+ otherwise `address` must not be :data:`None`.
+
+ If `timeout` is not :data:`None`, the entire send operation will take at most `timeout` seconds.
+
+ Warning:
+ A timeout on a send operation is unusual.
+
+ In the case of a timeout, it is impossible to know if all the packet data has been sent.
+
+ Important:
+ The lock acquisition time is included in the `timeout`.
+
+ This means that you may get a :exc:`TimeoutError` because it took too long to get the lock.
+
+ Parameters:
+ packet: the Python object to send.
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ TimeoutError: the send operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ ValueError: Invalid `address` value.
+ """
with (
_lock_with_timeout(self.__send_lock.get(), timeout, error_message="send_packet() timed out") as timeout,
self.__convert_socket_error(),
@@ -212,6 +289,28 @@ def send_packet_to(
del data
def recv_packet_from(self, *, timeout: float | None = None) -> tuple[_ReceivedPacketT, SocketAddress]:
+ """
+ Waits for a new packet to arrive from another endpoint. Thread-safe.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds.
+
+ Important:
+ The lock acquisition time is included in the `timeout`.
+
+ This means that you may get a :exc:`TimeoutError` because it took too long to get the lock.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ TimeoutError: the receive operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ DatagramProtocolParseError: invalid data received.
+
+ Returns:
+ A ``(packet, address)`` tuple, where `address` is the endpoint that delivered this packet.
+ """
with (
_lock_with_timeout(self.__receive_lock.get(), timeout, error_message="recv_packet() timed out") as timeout,
self.__convert_socket_error(),
@@ -234,6 +333,30 @@ def recv_packet_from(self, *, timeout: float | None = None) -> tuple[_ReceivedPa
del data
def iter_received_packets_from(self, *, timeout: float | None = 0) -> Iterator[tuple[_ReceivedPacketT, SocketAddress]]:
+ """
+ Returns an :term:`iterator` that waits for a new packet to arrive from another endpoint.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds; it defaults to zero.
+
+ Important:
+ The `timeout` is for the entire iterator::
+
+ iterator = endpoint.iter_received_packets_from(timeout=10)
+
+ # Let's say that this call took 6 seconds...
+ first_packet = next(iterator)
+
+ # ...then this call has a maximum of 4 seconds, not 10.
+ second_packet = next(iterator)
+
+ The time taken outside the iterator object is not decremented to the timeout parameter.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for all the receive operations.
+
+ Yields:
+ A ``(packet, address)`` tuple, where `address` is the endpoint that delivered this packet.
+ """
perf_counter = time.perf_counter
while True:
try:
@@ -245,6 +368,7 @@ def iter_received_packets_from(self, *, timeout: float | None = 0) -> Iterator[t
yield packet_tuple
if timeout is not None:
timeout -= _end - _start
+ timeout = max(timeout, 0)
@_contextlib.contextmanager
def __convert_socket_error(self) -> Iterator[None]:
@@ -256,12 +380,38 @@ def __convert_socket_error(self) -> Iterator[None]:
raise
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address. Thread-safe.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the endpoint's local address.
+ """
return self.__addr
def get_remote_address(self) -> SocketAddress | None:
+ """
+ Returns the remote socket IP address. Thread-safe.
+
+ Raises:
+ ClientClosedError: the endpoint object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the endpoint's remote address if configured, :data:`None` otherwise.
+ """
return self.__peer
def fileno(self) -> int:
+ """
+ Returns the socket's file descriptor, or ``-1`` if the endpoint (or the socket) is closed. Thread-safe.
+
+ Returns:
+ the opened file descriptor.
+ """
with self.__socket_lock.get():
if (socket := self.__socket) is None:
return -1
@@ -270,10 +420,15 @@ def fileno(self) -> int:
@property
@final
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute."""
return self.__socket_proxy
-class UDPNetworkClient(AbstractNetworkClient[_SentPacketT, _ReceivedPacketT], Generic[_SentPacketT, _ReceivedPacketT]):
+class UDPNetworkClient(AbstractNetworkClient[_SentPacketT, _ReceivedPacketT]):
+ """
+ A network client interface for UDP communication.
+ """
+
__slots__ = ("__endpoint", "__peer")
@overload
@@ -307,6 +462,26 @@ def __init__(
protocol: DatagramProtocol[_SentPacketT, _ReceivedPacketT],
**kwargs: Any,
) -> None:
+ """
+ Common Parameters:
+ protocol: The :term:`protocol object` to use.
+
+ Connection Parameters:
+ address: A pair of ``(host, port)`` for connection.
+ local_address: If given, is a ``(local_host, local_port)`` tuple used to bind the socket locally.
+ reuse_port: Tells the kernel to allow this endpoint to be bound to the same port as other existing
+ endpoints are bound to, so long as they all set this flag when being created.
+ This option is not supported on Windows and some Unixes.
+ If the SO_REUSEPORT constant is not defined then this capability is unsupported.
+
+ Socket Parameters:
+ socket: An already connected UDP :class:`socket.socket`. If `socket` is given,
+ none of and `local_address` and `reuse_port` should be specified.
+
+ Keyword Arguments:
+ retry_interval: The maximum wait time to wait for a blocking operation before retrying.
+ Set it to :data:`math.inf` to disable this feature.
+ """
super().__init__()
endpoint: UDPNetworkEndpoint[_SentPacketT, _ReceivedPacketT]
@@ -334,8 +509,15 @@ def __repr__(self) -> str:
except AttributeError:
return f"<{type(self).__name__} (partially initialized)>"
- @final
def is_closed(self) -> bool:
+ """
+ Checks if the client is in a closed state. Thread-safe.
+
+ If :data:`True`, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+
+ Returns:
+ the client state.
+ """
try:
endpoint = self.__endpoint
except AttributeError: # pragma: no cover
@@ -343,6 +525,17 @@ def is_closed(self) -> bool:
return endpoint.is_closed()
def close(self) -> None:
+ """
+ Close the client. Thread-safe.
+
+ Once that happens, all future operations on the client object will raise a :exc:`.ClientClosedError`.
+ The remote end will receive no more data (after queued data is flushed).
+
+ Can be safely called multiple times.
+
+ Raises:
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
try:
endpoint = self.__endpoint
except AttributeError: # pragma: no cover
@@ -350,27 +543,102 @@ def close(self) -> None:
return endpoint.close()
def get_local_address(self) -> SocketAddress:
+ """
+ Returns the local socket IP address. Thread-safe.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's local address.
+ """
return self.__endpoint.get_local_address()
def get_remote_address(self) -> SocketAddress:
+ """
+ Returns the remote socket IP address. Thread-safe.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+
+ Returns:
+ the client's remote address.
+ """
return self.__peer
def send_packet(self, packet: _SentPacketT, *, timeout: float | None = None) -> None:
+ """
+ Sends `packet` to the remote endpoint. Thread-safe.
+
+ If `timeout` is not :data:`None`, the entire send operation will take at most `timeout` seconds.
+
+ Warning:
+ A timeout on a send operation is unusual.
+
+ In the case of a timeout, it is impossible to know if all the packet data has been sent.
+
+ Important:
+ The lock acquisition time is included in the `timeout`.
+
+ This means that you may get a :exc:`TimeoutError` because it took too long to get the lock.
+
+ Parameters:
+ packet: the Python object to send.
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ TimeoutError: the send operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ """
return self.__endpoint.send_packet_to(packet, None, timeout=timeout)
def recv_packet(self, *, timeout: float | None = None) -> _ReceivedPacketT:
+ """
+ Waits for a new packet from the remote endpoint. Thread-safe.
+
+ If `timeout` is not :data:`None`, the entire receive operation will take at most `timeout` seconds.
+
+ Important:
+ The lock acquisition time is included in the `timeout`.
+
+ This means that you may get a :exc:`TimeoutError` because it took too long to get the lock.
+
+ Parameters:
+ timeout: the allowed time (in seconds) for blocking operations.
+
+ Raises:
+ ClientClosedError: the client object is closed.
+ TimeoutError: the receive operation does not end up after `timeout` seconds.
+ OSError: unrelated OS error occurred. You should check :attr:`OSError.errno`.
+ DatagramProtocolParseError: invalid data received.
+
+ Returns:
+ the received packet.
+ """
packet, _ = self.__endpoint.recv_packet_from(timeout=timeout)
return packet
def iter_received_packets(self, *, timeout: float | None = 0) -> Iterator[_ReceivedPacketT]:
return map(_itemgetter(0), self.__endpoint.iter_received_packets_from(timeout=timeout))
+ iter_received_packets.__doc__ = AbstractNetworkClient.iter_received_packets.__doc__
+
def fileno(self) -> int:
+ """
+ Returns the socket's file descriptor, or ``-1`` if the client (or the socket) is closed. Thread-safe.
+
+ Returns:
+ the opened file descriptor.
+ """
return self.__endpoint.fileno()
@property
@final
def socket(self) -> SocketProxy:
+ """A view to the underlying socket instance. Read-only attribute."""
return self.__endpoint.socket
diff --git a/src/easynetwork/api_sync/server/__init__.py b/src/easynetwork/api_sync/server/__init__.py
index c2f9d87d..5e5af546 100644
--- a/src/easynetwork/api_sync/server/__init__.py
+++ b/src/easynetwork/api_sync/server/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Network server module"""
@@ -6,11 +17,10 @@
from __future__ import annotations
__all__ = [
- "AbstractStandaloneNetworkServer",
- "StandaloneNetworkServerThread",
+ "AbstractNetworkServer",
+ "NetworkServerThread",
"StandaloneTCPNetworkServer",
"StandaloneUDPNetworkServer",
- "SupportsEventSet",
]
from .abc import *
diff --git a/src/easynetwork/api_sync/server/_base.py b/src/easynetwork/api_sync/server/_base.py
index 1d30a6c5..a34e1618 100644
--- a/src/easynetwork/api_sync/server/_base.py
+++ b/src/easynetwork/api_sync/server/_base.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -13,13 +24,14 @@
from collections.abc import Callable, Coroutine, Iterator
from typing import TYPE_CHECKING, Any, ParamSpec, Self, TypeVar, final
-from ...api_async.backend.abc import AbstractThreadsPortal
+from ...api_async.backend.abc import ThreadsPortal
+from ...api_async.server.abc import SupportsEventSet
from ...exceptions import ServerAlreadyRunning, ServerClosedError
from ...tools._lock import ForkSafeLock
-from .abc import AbstractStandaloneNetworkServer, SupportsEventSet
+from .abc import AbstractNetworkServer
if TYPE_CHECKING:
- from ...api_async.backend.abc import AbstractAsyncBackend, AbstractRunner
+ from ...api_async.backend.abc import AsyncBackend, Runner
from ...api_async.server.abc import AbstractAsyncNetworkServer
@@ -27,7 +39,7 @@
_T = TypeVar("_T")
-class BaseStandaloneNetworkServerImpl(AbstractStandaloneNetworkServer):
+class BaseStandaloneNetworkServerImpl(AbstractNetworkServer):
__slots__ = (
"__server",
"__runner",
@@ -43,7 +55,7 @@ def __init__(self, server: AbstractAsyncNetworkServer) -> None:
self.__threads_portal: _ServerThreadsPortal | None = None
self.__is_shutdown = _threading.Event()
self.__is_shutdown.set()
- self.__runner: AbstractRunner | None = self.__server.get_backend().new_runner()
+ self.__runner: Runner | None = self.__server.get_backend().new_runner()
self.__close_lock = ForkSafeLock()
self.__bootstrap_lock = ForkSafeLock()
@@ -58,6 +70,8 @@ def is_serving(self) -> bool:
return portal.run_sync(self.__server.is_serving)
return False
+ is_serving.__doc__ = AbstractNetworkServer.is_serving.__doc__
+
def server_close(self) -> None:
with self.__close_lock.get(), _contextlib.ExitStack() as stack, _contextlib.suppress(RuntimeError):
if (portal := self._portal) is not None:
@@ -72,6 +86,8 @@ def server_close(self) -> None:
self.__is_shutdown.wait() # Ensure we are not in the interval between the server shutdown and the scheduler shutdown
runner.run(self.__server.server_close)
+ server_close.__doc__ = AbstractNetworkServer.server_close.__doc__
+
def shutdown(self, timeout: float | None = None) -> None:
if (portal := self._portal) is not None:
CancelledError = self.__server.get_backend().get_cancelled_exc_class()
@@ -87,6 +103,8 @@ def shutdown(self, timeout: float | None = None) -> None:
timeout -= time.perf_counter() - _start
self.__is_shutdown.wait(timeout)
+ shutdown.__doc__ = AbstractNetworkServer.shutdown.__doc__
+
async def __do_shutdown_with_timeout(self, timeout_delay: float) -> None:
backend = self.__server.get_backend()
async with backend.move_on_after(timeout_delay):
@@ -115,7 +133,7 @@ def serve_forever(self, *, is_up_event: SupportsEventSet | None = None) -> None:
self.__is_shutdown.clear()
server_exit_stack.callback(self.__is_shutdown.set)
- async def serve_forever(runner: AbstractRunner) -> None:
+ async def serve_forever(runner: Runner) -> None:
try:
self.__threads_portal = _ServerThreadsPortal(backend, runner)
server_exit_stack.callback(self.__threads_portal._wait_for_all_requests)
@@ -133,25 +151,27 @@ async def serve_forever(runner: AbstractRunner) -> None:
# Acquire the bootstrap lock at teardown, before calling is_shutdown.set().
locks_stack.enter_context(self.__bootstrap_lock.get())
+ serve_forever.__doc__ = AbstractNetworkServer.serve_forever.__doc__
+
@property
def _server(self) -> AbstractAsyncNetworkServer:
return self.__server
@property
- def _portal(self) -> AbstractThreadsPortal | None:
+ def _portal(self) -> ThreadsPortal | None:
with self.__bootstrap_lock.get():
return self.__threads_portal
@final
-class _ServerThreadsPortal(AbstractThreadsPortal):
+class _ServerThreadsPortal(ThreadsPortal):
__slots__ = ("__backend", "__runner", "__portal", "__request_count", "__request_count_lock")
- def __init__(self, backend: AbstractAsyncBackend, runner: AbstractRunner) -> None:
+ def __init__(self, backend: AsyncBackend, runner: Runner) -> None:
super().__init__()
- self.__backend: AbstractAsyncBackend = backend
- self.__runner: AbstractRunner = runner
- self.__portal: AbstractThreadsPortal = backend.create_threads_portal()
+ self.__backend: AsyncBackend = backend
+ self.__runner: Runner = runner
+ self.__portal: ThreadsPortal = backend.create_threads_portal()
self.__request_count: int = 0
self.__request_count_lock = ForkSafeLock()
diff --git a/src/easynetwork/api_sync/server/abc.py b/src/easynetwork/api_sync/server/abc.py
index 6c0d886f..90b95b68 100644
--- a/src/easynetwork/api_sync/server/abc.py
+++ b/src/easynetwork/api_sync/server/abc.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -6,8 +17,7 @@
from __future__ import annotations
__all__ = [
- "AbstractStandaloneNetworkServer",
- "SupportsEventSet",
+ "AbstractNetworkServer",
]
from abc import ABCMeta, abstractmethod
@@ -19,7 +29,11 @@
from types import TracebackType
-class AbstractStandaloneNetworkServer(metaclass=ABCMeta):
+class AbstractNetworkServer(metaclass=ABCMeta):
+ """
+ The base class for a network server.
+ """
+
__slots__ = ("__weakref__",)
def __enter__(self) -> Self:
@@ -31,6 +45,7 @@ def __exit__(
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
+ """Calls :meth:`server_close`."""
self.server_close()
def __getstate__(self) -> Any: # pragma: no cover
@@ -38,16 +53,43 @@ def __getstate__(self) -> Any: # pragma: no cover
@abstractmethod
def is_serving(self) -> bool:
+ """
+ Checks whether the server is up and accepting new clients. Thread-safe.
+ """
raise NotImplementedError
@abstractmethod
def serve_forever(self, *, is_up_event: SupportsEventSet | None = ...) -> None:
+ """
+ Starts the server's main loop.
+
+ Parameters:
+ is_up_event: If given, will be triggered when the server is ready to accept new clients.
+
+ Raises:
+ ServerClosedError: The server is closed.
+ ServerAlreadyRunning: Another task already called :meth:`serve_forever`.
+ """
raise NotImplementedError
@abstractmethod
def server_close(self) -> None:
+ """
+ Closes the server. Thread-safe.
+ """
raise NotImplementedError
@abstractmethod
def shutdown(self, timeout: float | None = ...) -> None:
+ """
+ Asks for the server to stop. Thread-safe.
+
+ All active client tasks will be cancelled.
+
+ Warning:
+ Do not call this method in the :meth:`serve_forever` thread; it will cause a deadlock.
+
+ Parameters:
+ timeout: The maximum amount of seconds to wait.
+ """
raise NotImplementedError
diff --git a/src/easynetwork/api_sync/server/tcp.py b/src/easynetwork/api_sync/server/tcp.py
index 9b247723..477f0fb7 100644
--- a/src/easynetwork/api_sync/server/tcp.py
+++ b/src/easynetwork/api_sync/server/tcp.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -11,25 +22,29 @@
import contextlib as _contextlib
from collections.abc import Mapping, Sequence
-from typing import TYPE_CHECKING, Any, Generic, TypeVar
+from typing import TYPE_CHECKING, Any, Generic
+from ..._typevars import _RequestT, _ResponseT
from ...api_async.server.tcp import AsyncTCPNetworkServer
from ...tools.socket import SocketAddress, SocketProxy
from . import _base
if TYPE_CHECKING:
- import logging as _logging
+ import logging
from ssl import SSLContext as _SSLContext
- from ...api_async.backend.abc import AbstractAsyncBackend
+ from ...api_async.backend.abc import AsyncBackend
from ...api_async.server.handler import AsyncStreamRequestHandler
from ...protocol import StreamProtocol
-_RequestT = TypeVar("_RequestT")
-_ResponseT = TypeVar("_ResponseT")
-
class StandaloneTCPNetworkServer(_base.BaseStandaloneNetworkServerImpl, Generic[_RequestT, _ResponseT]):
+ """
+ A network server for TCP connections.
+
+ It embeds an :class:`.AsyncTCPNetworkServer` instance.
+ """
+
__slots__ = ()
def __init__(
@@ -38,7 +53,7 @@ def __init__(
port: int,
protocol: StreamProtocol[_ResponseT, _RequestT],
request_handler: AsyncStreamRequestHandler[_RequestT, _ResponseT],
- backend: str | AbstractAsyncBackend = "asyncio",
+ backend: str | AsyncBackend = "asyncio",
*,
ssl: _SSLContext | None = None,
ssl_handshake_timeout: float | None = None,
@@ -46,12 +61,19 @@ def __init__(
backlog: int | None = None,
reuse_port: bool = False,
max_recv_size: int | None = None,
- service_actions_interval: float | None = None,
- backend_kwargs: Mapping[str, Any] | None = None,
log_client_connection: bool | None = None,
- logger: _logging.Logger | None = None,
+ logger: logging.Logger | None = None,
+ backend_kwargs: Mapping[str, Any] | None = None,
**kwargs: Any,
) -> None:
+ """
+ For the arguments, see :class:`.AsyncTCPNetworkServer` documentation.
+
+ Note:
+ The backend interface must be explicitly given. It defaults to ``asyncio``.
+
+ :exc:`ValueError` is raised if :data:`None` is given.
+ """
if backend is None:
raise ValueError("You must explicitly give a backend name or instance")
super().__init__(
@@ -66,21 +88,35 @@ def __init__(
backlog=backlog,
reuse_port=reuse_port,
max_recv_size=max_recv_size,
- service_actions_interval=service_actions_interval,
- backend=backend,
- backend_kwargs=backend_kwargs,
log_client_connection=log_client_connection,
logger=logger,
+ backend=backend,
+ backend_kwargs=backend_kwargs,
**kwargs,
)
)
def stop_listening(self) -> None:
+ """
+ Schedules the shutdown of all listener sockets. Thread-safe.
+
+ After that, all new connections will be refused, but the server will continue to run and handle
+ previously accepted connections.
+
+ Further calls to :meth:`is_serving` will return :data:`False`.
+ """
if (portal := self._portal) is not None:
with _contextlib.suppress(RuntimeError):
portal.run_sync(self._server.stop_listening)
def get_addresses(self) -> Sequence[SocketAddress]:
+ """
+ Returns all interfaces to which the listeners are bound. Thread-safe.
+
+ Returns:
+ A sequence of network socket address.
+ If the server is not serving (:meth:`is_serving` returns :data:`False`), an empty sequence is returned.
+ """
if (portal := self._portal) is not None:
with _contextlib.suppress(RuntimeError):
return portal.run_sync(self._server.get_addresses)
@@ -88,6 +124,7 @@ def get_addresses(self) -> Sequence[SocketAddress]:
@property
def sockets(self) -> Sequence[SocketProxy]:
+ """The listeners sockets. Read-only attribute."""
if (portal := self._portal) is not None:
with _contextlib.suppress(RuntimeError):
sockets = portal.run_sync(lambda: self._server.sockets)
@@ -95,7 +132,8 @@ def sockets(self) -> Sequence[SocketProxy]:
return ()
@property
- def logger(self) -> _logging.Logger:
+ def logger(self) -> logging.Logger:
+ """The server's logger."""
return self._server.logger
if TYPE_CHECKING:
diff --git a/src/easynetwork/api_sync/server/thread.py b/src/easynetwork/api_sync/server/thread.py
index df057986..9b6caaf1 100644
--- a/src/easynetwork/api_sync/server/thread.py
+++ b/src/easynetwork/api_sync/server/thread.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -6,26 +17,26 @@
from __future__ import annotations
__all__ = [
- "StandaloneNetworkServerThread",
+ "NetworkServerThread",
]
import threading as _threading
import time
-from .abc import AbstractStandaloneNetworkServer
+from .abc import AbstractNetworkServer
-class StandaloneNetworkServerThread(_threading.Thread):
+class NetworkServerThread(_threading.Thread):
def __init__(
self,
- server: AbstractStandaloneNetworkServer,
+ server: AbstractNetworkServer,
group: None = None,
name: str | None = None,
*,
daemon: bool | None = None,
) -> None:
super().__init__(group=group, target=None, name=name, daemon=daemon)
- self.__server: AbstractStandaloneNetworkServer | None = server
+ self.__server: AbstractNetworkServer | None = server
self.__is_up_event: _threading.Event = _threading.Event()
def start(self) -> None:
diff --git a/src/easynetwork/api_sync/server/udp.py b/src/easynetwork/api_sync/server/udp.py
index 8ff60753..b5a99797 100644
--- a/src/easynetwork/api_sync/server/udp.py
+++ b/src/easynetwork/api_sync/server/udp.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Asynchronous network server module"""
@@ -11,24 +22,28 @@
import contextlib as _contextlib
from collections.abc import Mapping
-from typing import TYPE_CHECKING, Any, Generic, TypeVar
+from typing import TYPE_CHECKING, Any, Generic
+from ..._typevars import _RequestT, _ResponseT
from ...api_async.server.udp import AsyncUDPNetworkServer
from ...tools.socket import SocketAddress, SocketProxy
from . import _base
if TYPE_CHECKING:
- import logging as _logging
+ import logging
- from ...api_async.backend.abc import AbstractAsyncBackend
+ from ...api_async.backend.abc import AsyncBackend
from ...api_async.server.handler import AsyncDatagramRequestHandler
from ...protocol import DatagramProtocol
-_RequestT = TypeVar("_RequestT")
-_ResponseT = TypeVar("_ResponseT")
-
class StandaloneUDPNetworkServer(_base.BaseStandaloneNetworkServerImpl, Generic[_RequestT, _ResponseT]):
+ """
+ A network server for UDP communication.
+
+ It embeds an :class:`.AsyncUDPNetworkServer` instance.
+ """
+
__slots__ = ()
def __init__(
@@ -37,14 +52,21 @@ def __init__(
port: int,
protocol: DatagramProtocol[_ResponseT, _RequestT],
request_handler: AsyncDatagramRequestHandler[_RequestT, _ResponseT],
- backend: str | AbstractAsyncBackend = "asyncio",
+ backend: str | AsyncBackend = "asyncio",
*,
reuse_port: bool = False,
+ logger: logging.Logger | None = None,
backend_kwargs: Mapping[str, Any] | None = None,
- service_actions_interval: float | None = None,
- logger: _logging.Logger | None = None,
**kwargs: Any,
) -> None:
+ """
+ For the arguments, see :class:`.AsyncUDPNetworkServer` documentation.
+
+ Note:
+ The backend interface must be explicitly given. It defaults to ``asyncio``.
+
+ :exc:`ValueError` is raised if :data:`None` is given.
+ """
if backend is None:
raise ValueError("You must explicitly give a backend name or instance")
super().__init__(
@@ -54,15 +76,21 @@ def __init__(
protocol=protocol,
request_handler=request_handler,
reuse_port=reuse_port,
+ logger=logger,
backend=backend,
backend_kwargs=backend_kwargs,
- service_actions_interval=service_actions_interval,
- logger=logger,
**kwargs,
)
)
def get_address(self) -> SocketAddress | None:
+ """
+ Returns the interface to which the datagram socket is bound. Thread-safe.
+
+ Returns:
+ A network socket address.
+ If the server is not serving (:meth:`is_serving` returns :data:`False`), :data:`None` is returned.
+ """
if (portal := self._portal) is not None:
with _contextlib.suppress(RuntimeError):
return portal.run_sync(self._server.get_address)
@@ -70,6 +98,7 @@ def get_address(self) -> SocketAddress | None:
@property
def socket(self) -> SocketProxy | None:
+ """The server socket. Read-only attribute."""
if (portal := self._portal) is not None:
with _contextlib.suppress(RuntimeError):
socket = portal.run_sync(lambda: self._server.socket)
@@ -77,7 +106,8 @@ def socket(self) -> SocketProxy | None:
return None
@property
- def logger(self) -> _logging.Logger:
+ def logger(self) -> logging.Logger:
+ """The server's logger."""
return self._server.logger
if TYPE_CHECKING:
diff --git a/src/easynetwork/converter.py b/src/easynetwork/converter.py
index 4cfa0adc..698f0a16 100644
--- a/src/easynetwork/converter.py
+++ b/src/easynetwork/converter.py
@@ -1,97 +1,133 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Network packet converter module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""EasyNetwork's packet converters module"""
from __future__ import annotations
-__all__ = ["AbstractPacketConverter", "AbstractPacketConverterComposite", "RequestResponseConverterBuilder"]
+__all__ = [
+ "AbstractPacketConverter",
+ "AbstractPacketConverterComposite",
+ "StapledPacketConverter",
+]
from abc import ABCMeta, abstractmethod
-from collections.abc import Callable
-from typing import Any, Generic, TypeVar, final
+from dataclasses import dataclass
+from typing import Any, Generic, final
+
+from ._typevars import _DTOPacketT, _PacketT, _ReceivedPacketT, _SentPacketT
-_SentDTOPacketT = TypeVar("_SentDTOPacketT")
-_ReceivedDTOPacketT = TypeVar("_ReceivedDTOPacketT")
-_DTOPacketT = TypeVar("_DTOPacketT")
-_SentPacketT = TypeVar("_SentPacketT")
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-_PacketT = TypeVar("_PacketT")
+class AbstractPacketConverterComposite(Generic[_SentPacketT, _ReceivedPacketT, _DTOPacketT], metaclass=ABCMeta):
+ """
+ The base class for implementing a :term:`composite converter`.
+ See Also:
+ The :class:`AbstractPacketConverter` class.
+ """
-class AbstractPacketConverterComposite(
- Generic[_SentPacketT, _SentDTOPacketT, _ReceivedPacketT, _ReceivedDTOPacketT], metaclass=ABCMeta
-):
__slots__ = ("__weakref__",)
@abstractmethod
- def create_from_dto_packet(self, packet: _ReceivedDTOPacketT) -> _ReceivedPacketT:
+ def create_from_dto_packet(self, packet: _DTOPacketT, /) -> _ReceivedPacketT:
+ """
+ Constructs the business object from the :term:`DTO` `packet`.
+
+ Parameters:
+ packet: The :term:`data transfer object`.
+
+ Raises:
+ PacketConversionError: `packet` is invalid.
+
+ Returns:
+ the business object.
+ """
raise NotImplementedError
@abstractmethod
- def convert_to_dto_packet(self, obj: _SentPacketT) -> _SentDTOPacketT:
+ def convert_to_dto_packet(self, obj: _SentPacketT, /) -> _DTOPacketT:
+ """
+ Creates the :term:`DTO` packet from the business object `obj`.
+
+ Parameters:
+ obj: The business object.
+
+ Returns:
+ the :term:`data transfer object`.
+ """
raise NotImplementedError
-class PacketConverterComposite(
- AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, _ReceivedPacketT, _ReceivedDTOPacketT]
-):
- __slots__ = ("__create_from_dto", "__convert_to_dto")
+@dataclass(frozen=True, slots=True)
+class StapledPacketConverter(AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, _DTOPacketT]):
+ """
+ A :term:`composite converter` that merges two converters.
+ """
+
+ sent_packet_converter: AbstractPacketConverterComposite[_SentPacketT, Any, _DTOPacketT]
+ """Sent packet converter."""
- def __init__(
- self,
- convert_to_dto: Callable[[_SentPacketT], _SentDTOPacketT],
- create_from_dto: Callable[[_ReceivedDTOPacketT], _ReceivedPacketT],
- ) -> None:
- super().__init__()
- self.__create_from_dto: Callable[[_ReceivedDTOPacketT], _ReceivedPacketT] = create_from_dto
- self.__convert_to_dto: Callable[[_SentPacketT], _SentDTOPacketT] = convert_to_dto
+ received_packet_converter: AbstractPacketConverterComposite[Any, _ReceivedPacketT, _DTOPacketT]
+ """Received packet converter."""
@final
- def create_from_dto_packet(self, packet: _ReceivedDTOPacketT) -> _ReceivedPacketT:
- return self.__create_from_dto(packet)
+ def create_from_dto_packet(self, packet: _DTOPacketT, /) -> _ReceivedPacketT:
+ """
+ Calls ``self.received_packet_converter.create_from_dto_packet(packet)``.
+
+ Parameters:
+ packet: The :term:`data transfer object`.
+
+ Raises:
+ PacketConversionError: `packet` is invalid.
+
+ Returns:
+ the business object.
+ """
+ return self.received_packet_converter.create_from_dto_packet(packet)
@final
- def convert_to_dto_packet(self, obj: _SentPacketT) -> _SentDTOPacketT:
- return self.__convert_to_dto(obj)
+ def convert_to_dto_packet(self, obj: _SentPacketT, /) -> _DTOPacketT:
+ """
+ Calls ``self.sent_packet_converter.convert_to_dto_packet(obj)``.
+
+ Parameters:
+ obj: The business object.
+ Returns:
+ the :term:`data transfer object`.
+ """
+ return self.sent_packet_converter.convert_to_dto_packet(obj)
+
+
+class AbstractPacketConverter(AbstractPacketConverterComposite[_PacketT, _PacketT, _DTOPacketT], Generic[_PacketT, _DTOPacketT]):
+ """
+ The base class for implementing a :term:`converter`.
+
+ See Also:
+ The :class:`AbstractPacketConverterComposite` class.
+ """
-class AbstractPacketConverter(
- AbstractPacketConverterComposite[_PacketT, _DTOPacketT, _PacketT, _DTOPacketT], Generic[_PacketT, _DTOPacketT]
-):
__slots__ = ()
@abstractmethod
- def create_from_dto_packet(self, packet: _DTOPacketT) -> _PacketT:
+ def create_from_dto_packet(self, packet: _DTOPacketT, /) -> _PacketT:
raise NotImplementedError
@abstractmethod
- def convert_to_dto_packet(self, obj: _PacketT) -> _DTOPacketT:
+ def convert_to_dto_packet(self, obj: _PacketT, /) -> _DTOPacketT:
raise NotImplementedError
-
-@final
-class RequestResponseConverterBuilder:
- def __init_subclass__(cls) -> None: # pragma: no cover
- raise TypeError("RequestResponseConverterBuilder cannot be subclassed")
-
- @staticmethod
- def build_for_client(
- request_converter: AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, Any, Any],
- response_converter: AbstractPacketConverterComposite[Any, Any, _ReceivedPacketT, _ReceivedDTOPacketT],
- ) -> AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, _ReceivedPacketT, _ReceivedDTOPacketT]:
- return PacketConverterComposite(
- create_from_dto=response_converter.create_from_dto_packet,
- convert_to_dto=request_converter.convert_to_dto_packet,
- )
-
- @staticmethod
- def build_for_server(
- request_converter: AbstractPacketConverterComposite[Any, Any, _ReceivedPacketT, _ReceivedDTOPacketT],
- response_converter: AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, Any, Any],
- ) -> AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, _ReceivedPacketT, _ReceivedDTOPacketT]:
- return PacketConverterComposite(
- create_from_dto=request_converter.create_from_dto_packet,
- convert_to_dto=response_converter.convert_to_dto_packet,
- )
+ create_from_dto_packet.__doc__ = AbstractPacketConverterComposite.create_from_dto_packet.__doc__
+ convert_to_dto_packet.__doc__ = AbstractPacketConverterComposite.convert_to_dto_packet.__doc__
diff --git a/src/easynetwork/exceptions.py b/src/easynetwork/exceptions.py
index ae8fee5d..32261f12 100644
--- a/src/easynetwork/exceptions.py
+++ b/src/easynetwork/exceptions.py
@@ -1,7 +1,21 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Exceptions definition module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Exceptions definition module.
+
+Here are all the exception classes defined and used by the library.
+"""
from __future__ import annotations
@@ -24,47 +38,102 @@
class ClientClosedError(ConnectionError):
- """Error raised when trying to do an operation on a closed client"""
+ """Error raised when trying to do an operation on a closed client."""
class ServerClosedError(RuntimeError):
- """Error raised when trying to do an operation on a closed server"""
+ """Error raised when trying to do an operation on a closed server."""
class ServerAlreadyRunning(RuntimeError):
- """Error raised if serve_forever() is called twice"""
+ """The server is already running."""
class DeserializeError(Exception):
+ """Error raised by a :term:`serializer` if the data format is invalid."""
+
def __init__(self, message: str, error_info: Any = None) -> None:
+ """
+ Parameters:
+ message: Error message.
+ error_info: Additional error data.
+ """
+
super().__init__(message)
+
self.error_info: Any = error_info
+ """Additional error data."""
class IncrementalDeserializeError(DeserializeError):
+ """Error raised by an :term:`incremental serializer` if the data format is invalid."""
+
def __init__(self, message: str, remaining_data: bytes, error_info: Any = None) -> None:
+ """
+ Parameters:
+ message: Error message.
+ remaining_data: Unused trailing data.
+ error_info: Additional error data.
+ """
+
super().__init__(message, error_info=error_info)
+
self.remaining_data: bytes = remaining_data
+ """Unused trailing data."""
class PacketConversionError(Exception):
+ """The deserialized :term:`packet` is invalid."""
+
def __init__(self, message: str, error_info: Any = None) -> None:
+ """
+ Parameters:
+ message: Error message.
+ error_info: Additional error data.
+ """
+
super().__init__(message)
+
self.error_info: Any = error_info
+ """Additional error data"""
class BaseProtocolParseError(Exception):
+ """Parsing error raised by a :term:`protocol object`."""
+
def __init__(self, error: DeserializeError | PacketConversionError) -> None:
+ """
+ Parameters:
+ error: Error instance.
+ """
+
super().__init__(f"Error while parsing data: {error}")
+
self.error: DeserializeError | PacketConversionError = error
+ """Error instance."""
class DatagramProtocolParseError(BaseProtocolParseError):
+ """Parsing error raised by :class:`easynetwork.protocol.DatagramProtocol`."""
+
sender_address: SocketAddress
+ """Address of the sender."""
class StreamProtocolParseError(BaseProtocolParseError):
+ """Parsing error raised by :class:`easynetwork.protocol.StreamProtocol`."""
+
def __init__(self, remaining_data: bytes, error: IncrementalDeserializeError | PacketConversionError) -> None:
+ """
+ Parameters:
+ remaining_data: Unused trailing data.
+ error: Error instance.
+ """
+
super().__init__(error)
+
self.error: IncrementalDeserializeError | PacketConversionError
+ """Error instance."""
+
self.remaining_data: bytes = remaining_data
+ """Unused trailing data."""
diff --git a/src/easynetwork/protocol.py b/src/easynetwork/protocol.py
index d7b34ddd..3db69213 100644
--- a/src/easynetwork/protocol.py
+++ b/src/easynetwork/protocol.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Network protocol module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Communication protocol objects module"""
from __future__ import annotations
@@ -11,8 +22,9 @@
]
from collections.abc import Generator
-from typing import Any, Generic, TypeVar, overload
+from typing import Any, Generic, overload
+from ._typevars import _DTOPacketT, _ReceivedPacketT, _SentPacketT
from .converter import AbstractPacketConverterComposite
from .exceptions import (
DatagramProtocolParseError,
@@ -23,20 +35,16 @@
)
from .serializers.abc import AbstractIncrementalPacketSerializer, AbstractPacketSerializer
-_SentDTOPacketT = TypeVar("_SentDTOPacketT")
-_ReceivedDTOPacketT = TypeVar("_ReceivedDTOPacketT")
-
-_SentPacketT = TypeVar("_SentPacketT")
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-
class DatagramProtocol(Generic[_SentPacketT, _ReceivedPacketT]):
+ """A :term:`protocol object` class for datagram communication."""
+
__slots__ = ("__serializer", "__converter", "__weakref__")
@overload
def __init__(
self,
- serializer: AbstractPacketSerializer[_SentPacketT, _ReceivedPacketT],
+ serializer: AbstractPacketSerializer[_SentPacketT | _ReceivedPacketT],
converter: None = ...,
) -> None:
...
@@ -44,29 +52,59 @@ def __init__(
@overload
def __init__(
self,
- serializer: AbstractPacketSerializer[_SentDTOPacketT, _ReceivedDTOPacketT],
- converter: AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, _ReceivedPacketT, _ReceivedDTOPacketT],
+ serializer: AbstractPacketSerializer[_DTOPacketT],
+ converter: AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, _DTOPacketT],
) -> None:
...
def __init__(
self,
- serializer: AbstractPacketSerializer[Any, Any],
- converter: AbstractPacketConverterComposite[_SentPacketT, Any, _ReceivedPacketT, Any] | None = None,
+ serializer: AbstractPacketSerializer[Any],
+ converter: AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, Any] | None = None,
) -> None:
+ """
+ Parameters:
+ serializer: The :term:`serializer` to use.
+ converter: The :term:`converter` to use.
+ """
+
if not isinstance(serializer, AbstractPacketSerializer):
raise TypeError(f"Expected a serializer instance, got {serializer!r}")
if converter is not None and not isinstance(converter, AbstractPacketConverterComposite):
raise TypeError(f"Expected a converter instance, got {converter!r}")
- self.__serializer: AbstractPacketSerializer[Any, Any] = serializer
- self.__converter: AbstractPacketConverterComposite[_SentPacketT, Any, _ReceivedPacketT, Any] | None = converter
+ self.__serializer: AbstractPacketSerializer[Any] = serializer
+ self.__converter: AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, Any] | None = converter
def make_datagram(self, packet: _SentPacketT) -> bytes:
+ """
+ Serializes a Python object to a raw datagram :term:`packet`.
+
+ Parameters:
+ packet: The :term:`packet` as a Python object to serialize.
+
+ Returns:
+ the serialized :term:`packet`.
+ """
+
if (converter := self.__converter) is not None:
packet = converter.convert_to_dto_packet(packet)
return self.__serializer.serialize(packet)
def build_packet_from_datagram(self, datagram: bytes) -> _ReceivedPacketT:
+ """
+ Creates a Python object representing the raw datagram :term:`packet`.
+
+ Parameters:
+ datagram: The datagram :term:`packet` to deserialize.
+
+ Raises:
+ DatagramProtocolParseError: in case of deserialization error.
+ DatagramProtocolParseError: in case of conversion error (if there is a :term:`converter`).
+
+ Returns:
+ the deserialized :term:`packet`.
+ """
+
try:
packet: _ReceivedPacketT = self.__serializer.deserialize(datagram)
except DeserializeError as exc:
@@ -80,12 +118,14 @@ def build_packet_from_datagram(self, datagram: bytes) -> _ReceivedPacketT:
class StreamProtocol(Generic[_SentPacketT, _ReceivedPacketT]):
+ """A :term:`protocol object` class for connection-oriented stream communication."""
+
__slots__ = ("__serializer", "__converter", "__weakref__")
@overload
def __init__(
self,
- serializer: AbstractIncrementalPacketSerializer[_SentPacketT, _ReceivedPacketT],
+ serializer: AbstractIncrementalPacketSerializer[_SentPacketT | _ReceivedPacketT],
converter: None = ...,
) -> None:
...
@@ -93,29 +133,60 @@ def __init__(
@overload
def __init__(
self,
- serializer: AbstractIncrementalPacketSerializer[_SentDTOPacketT, _ReceivedDTOPacketT],
- converter: AbstractPacketConverterComposite[_SentPacketT, _SentDTOPacketT, _ReceivedPacketT, _ReceivedDTOPacketT],
+ serializer: AbstractIncrementalPacketSerializer[_DTOPacketT],
+ converter: AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, _DTOPacketT],
) -> None:
...
def __init__(
self,
- serializer: AbstractIncrementalPacketSerializer[Any, Any],
- converter: AbstractPacketConverterComposite[_SentPacketT, Any, _ReceivedPacketT, Any] | None = None,
+ serializer: AbstractIncrementalPacketSerializer[Any],
+ converter: AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, Any] | None = None,
) -> None:
+ """
+ Parameters:
+ serializer: The :term:`incremental serializer` to use.
+ converter: The :term:`converter` to use.
+ """
+
if not isinstance(serializer, AbstractIncrementalPacketSerializer):
raise TypeError(f"Expected an incremental serializer instance, got {serializer!r}")
if converter is not None and not isinstance(converter, AbstractPacketConverterComposite):
raise TypeError(f"Expected a converter instance, got {converter!r}")
- self.__serializer: AbstractIncrementalPacketSerializer[Any, Any] = serializer
- self.__converter: AbstractPacketConverterComposite[_SentPacketT, Any, _ReceivedPacketT, Any] | None = converter
+ self.__serializer: AbstractIncrementalPacketSerializer[Any] = serializer
+ self.__converter: AbstractPacketConverterComposite[_SentPacketT, _ReceivedPacketT, Any] | None = converter
def generate_chunks(self, packet: _SentPacketT) -> Generator[bytes, None, None]:
+ """
+ Serializes a Python object to a raw :term:`packet` part by part.
+
+ Parameters:
+ packet: The :term:`packet` as a Python object to serialize.
+
+ Yields:
+ all the parts of the :term:`packet`.
+ """
+
if (converter := self.__converter) is not None:
packet = converter.convert_to_dto_packet(packet)
return (yield from self.__serializer.incremental_serialize(packet))
def build_packet_from_chunks(self) -> Generator[None, bytes, tuple[_ReceivedPacketT, bytes]]:
+ """
+ Creates a Python object representing the raw :term:`packet`.
+
+ Raises:
+ StreamProtocolParseError: in case of deserialization error.
+ StreamProtocolParseError: in case of conversion error (if there is a :term:`converter`).
+ RuntimeError: The :term:`serializer` raised :exc:`.DeserializeError` instead of :exc:`.IncrementalDeserializeError`.
+
+ Yields:
+ :data:`None` until the whole :term:`packet` has been deserialized.
+
+ Returns:
+ a tuple with the deserialized Python object and the unused trailing data.
+ """
+
packet: _ReceivedPacketT
try:
packet, remaining_data = yield from self.__serializer.incremental_deserialize()
diff --git a/src/easynetwork/serializers/__init__.py b/src/easynetwork/serializers/__init__.py
index b2618e50..22a9e0ed 100644
--- a/src/easynetwork/serializers/__init__.py
+++ b/src/easynetwork/serializers/__init__.py
@@ -1,24 +1,26 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""EasyNetwork's packet serializer module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""EasyNetwork's packet serializers module"""
from __future__ import annotations
__all__ = [
- "AbstractCompressorSerializer",
- "AbstractIncrementalPacketSerializer",
- "AbstractPacketSerializer",
"AbstractStructSerializer",
- "AutoSeparatedPacketSerializer",
- "BZ2CompressorSerializer",
- "Base64EncoderSerializer",
"CBORDecoderConfig",
"CBOREncoderConfig",
"CBORSerializer",
- "EncryptorSerializer",
- "FileBasedPacketSerializer",
- "FixedSizePacketSerializer",
"JSONDecoderConfig",
"JSONEncoderConfig",
"JSONSerializer",
@@ -30,17 +32,13 @@
"PicklerConfig",
"StringLineSerializer",
"UnpicklerConfig",
- "ZlibCompressorSerializer",
]
############ Package initialization ############
-from .abc import *
-from .base_stream import *
from .cbor import *
from .json import *
from .line import *
from .msgpack import *
from .pickle import *
from .struct import *
-from .wrapper import *
diff --git a/src/easynetwork/serializers/abc.py b/src/easynetwork/serializers/abc.py
index e3fc5254..94367e7f 100644
--- a/src/easynetwork/serializers/abc.py
+++ b/src/easynetwork/serializers/abc.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""Abstract base network packet serializer module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""Network packet serializer base classes module"""
from __future__ import annotations
@@ -12,50 +23,131 @@
from abc import ABCMeta, abstractmethod
from collections.abc import Generator
-from typing import Any, Generic, TypeVar
+from typing import Any, Generic
+from .._typevars import _DTOPacketT
from ..exceptions import DeserializeError
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
+class AbstractPacketSerializer(Generic[_DTOPacketT], metaclass=ABCMeta):
+ """
+ The base class for implementing a :term:`serializer`.
+
+ Implementing this interface would create a :term:`one-shot serializer`.
+ """
-class AbstractPacketSerializer(Generic[_ST_contra, _DT_co], metaclass=ABCMeta):
__slots__ = ("__weakref__",)
def __getstate__(self) -> Any: # pragma: no cover
raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
@abstractmethod
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT, /) -> bytes:
+ """
+ Returns the byte representation of the Python object `packet`.
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
raise NotImplementedError
@abstractmethod
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes, /) -> _DTOPacketT:
+ """
+ Creates a Python object representing the raw :term:`packet` from `data`.
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: An unrelated deserialization error occurred.
+
+ Returns:
+ the deserialized Python object.
+ """
raise NotImplementedError
-class AbstractIncrementalPacketSerializer(AbstractPacketSerializer[_ST_contra, _DT_co]):
+class AbstractIncrementalPacketSerializer(AbstractPacketSerializer[_DTOPacketT]):
+ """
+ The base class for implementing an :term:`incremental serializer`.
+ """
+
__slots__ = ()
@abstractmethod
- def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, None]:
+ def incremental_serialize(self, packet: _DTOPacketT, /) -> Generator[bytes, None, None]:
+ """
+ Returns the byte representation of the Python object `packet`.
+
+ The generator should :keyword:`yield` non-empty byte sequences.
+
+ The main purpose of this method is to add metadata that could not be included in the output of :meth:`serialize`,
+ such as headers, separators, and so on. It is used in the :meth:`incremental_deserialize` method.
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Yields:
+ all the parts of the :term:`packet`.
+ """
raise NotImplementedError
@abstractmethod
- def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]]:
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DTOPacketT, bytes]]:
+ """
+ Creates a Python object representing the raw :term:`packet`.
+
+ Raises:
+ IncrementalDeserializeError: An unrelated deserialization error occurred.
+
+ Yields:
+ :data:`None` until the whole :term:`packet` has been deserialized.
+
+ Returns:
+ a tuple with the deserialized Python object and the unused trailing data.
+ """
raise NotImplementedError
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT, /) -> bytes:
+ """
+ Returns the byte representation of the Python object `packet`.
+
+ The default implementation concatenates and returns the parts sent by :meth:`incremental_serialize`.
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
return b"".join(self.incremental_serialize(packet))
- def deserialize(self, data: bytes) -> _DT_co:
- consumer: Generator[None, bytes, tuple[_DT_co, bytes]] = self.incremental_deserialize()
+ def deserialize(self, data: bytes, /) -> _DTOPacketT:
+ """
+ Creates a Python object representing the raw :term:`packet` from `data`.
+
+ The default implementation uses :meth:`incremental_deserialize` and expects it to deserialize ``data`` at once.
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: Too little or too much data to parse.
+ DeserializeError: An unrelated deserialization error occurred.
+
+ Returns:
+ the deserialized Python object.
+ """
+ consumer: Generator[None, bytes, tuple[_DTOPacketT, bytes]] = self.incremental_deserialize()
try:
next(consumer)
except StopIteration:
raise RuntimeError("self.incremental_deserialize() generator did not yield") from None
- packet: _DT_co
+ packet: _DTOPacketT
remaining: bytes
try:
consumer.send(data)
@@ -63,7 +155,7 @@ def deserialize(self, data: bytes) -> _DT_co:
packet, remaining = exc.value
else:
consumer.close()
- raise DeserializeError("Missing data to create packet") from None
+ raise DeserializeError("Missing data to create packet", error_info={"data": data}) from None
if remaining:
- raise DeserializeError("Extra data caught")
+ raise DeserializeError("Extra data caught", error_info={"packet": packet, "extra": remaining})
return packet
diff --git a/src/easynetwork/serializers/base_stream.py b/src/easynetwork/serializers/base_stream.py
index 1e40a66c..c01af6e8 100644
--- a/src/easynetwork/serializers/base_stream.py
+++ b/src/easynetwork/serializers/base_stream.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Stream network packet serializer handler module"""
@@ -14,19 +25,33 @@
from abc import abstractmethod
from collections.abc import Generator
from io import BytesIO
-from typing import IO, Any, TypeVar, final
+from typing import IO, Any, final
+from .._typevars import _DTOPacketT
from ..exceptions import DeserializeError, IncrementalDeserializeError
from .abc import AbstractIncrementalPacketSerializer, AbstractPacketSerializer
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
+class AutoSeparatedPacketSerializer(AbstractIncrementalPacketSerializer[_DTOPacketT]):
+ """
+ Base class for stream protocols that separates sent information by a byte sequence.
+ """
-class AutoSeparatedPacketSerializer(AbstractIncrementalPacketSerializer[_ST_contra, _DT_co]):
__slots__ = ("__separator", "__incremental_serialize_check_separator")
def __init__(self, separator: bytes, *, incremental_serialize_check_separator: bool = True, **kwargs: Any) -> None:
+ """
+ Parameters:
+ separator: Byte sequence that indicates the end of the token.
+ incremental_serialize_check_separator: If `True` (the default), checks that the data returned by
+ :meth:`serialize` does not contain `separator`,
+ and removes superfluous `separator` added at the end.
+ kwargs: Extra options given to ``super().__init__()``.
+
+ Raises:
+ TypeError: Invalid arguments.
+ ValueError: Empty separator sequence.
+ """
super().__init__(**kwargs)
separator = bytes(separator)
if len(separator) < 1:
@@ -35,11 +60,23 @@ def __init__(self, separator: bytes, *, incremental_serialize_check_separator: b
self.__incremental_serialize_check_separator = bool(incremental_serialize_check_separator)
@abstractmethod
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT, /) -> bytes:
+ """
+ See :meth:`.AbstractPacketSerializer.serialize` documentation.
+ """
raise NotImplementedError
@final
- def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, None]:
+ def incremental_serialize(self, packet: _DTOPacketT, /) -> Generator[bytes, None, None]:
+ """
+ Yields the data returned by :meth:`serialize` and appends `separator`.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_serialize` documentation for details.
+
+ Raises:
+ ValueError: If `incremental_serialize_check_separator` is `True` and `separator` is in the returned data.
+ Exception: Any error raised by :meth:`serialize`.
+ """
data: bytes = self.serialize(packet)
separator: bytes = self.__separator
if self.__incremental_serialize_check_separator:
@@ -52,11 +89,23 @@ def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, No
yield separator
@abstractmethod
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes, /) -> _DTOPacketT:
+ """
+ See :meth:`.AbstractPacketSerializer.deserialize` documentation.
+ """
raise NotImplementedError
@final
- def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]]:
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DTOPacketT, bytes]]:
+ """
+ Yields until `separator` is found and calls :meth:`deserialize` **without** `separator`.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_deserialize` documentation for details.
+
+ Raises:
+ IncrementalDeserializeError: :meth:`deserialize` raised :exc:`.DeserializeError`.
+ Exception: Any error raised by :meth:`deserialize`.
+ """
buffer: bytes = yield
separator: bytes = self.__separator
separator_length: int = len(separator)
@@ -86,13 +135,29 @@ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]
@property
@final
def separator(self) -> bytes:
+ """
+ Byte sequence that indicates the end of the token. Read-only attribute.
+ """
return self.__separator
-class FixedSizePacketSerializer(AbstractIncrementalPacketSerializer[_ST_contra, _DT_co]):
+class FixedSizePacketSerializer(AbstractIncrementalPacketSerializer[_DTOPacketT]):
+ """
+ A base class for stream protocols in which the packets are of a fixed size.
+ """
+
__slots__ = ("__size",)
def __init__(self, size: int, **kwargs: Any) -> None:
+ """
+ Parameters:
+ size: The expected data size.
+ kwargs: Extra options given to ``super().__init__()``.
+
+ Raises:
+ TypeError: Invalid integer.
+ ValueError: `size` is negative or null.
+ """
super().__init__(**kwargs)
size = int(size)
if size <= 0:
@@ -100,22 +165,46 @@ def __init__(self, size: int, **kwargs: Any) -> None:
self.__size: int = size
@abstractmethod
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT, /) -> bytes:
+ """
+ See :meth:`.AbstractPacketSerializer.serialize` documentation.
+ """
raise NotImplementedError
@final
- def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, None]:
+ def incremental_serialize(self, packet: _DTOPacketT, /) -> Generator[bytes, None, None]:
+ """
+ Yields the data returned by :meth:`serialize`.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_serialize` documentation for details.
+
+ Raises:
+ ValueError: If the returned data size is not equal to `packet_size`.
+ Exception: Any error raised by :meth:`serialize`.
+ """
data = self.serialize(packet)
if len(data) != self.__size:
raise ValueError("serialized data size does not meet expectation")
yield data
@abstractmethod
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes, /) -> _DTOPacketT:
+ """
+ See :meth:`.AbstractPacketSerializer.deserialize` documentation.
+ """
raise NotImplementedError
@final
- def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]]:
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DTOPacketT, bytes]]:
+ """
+ Yields until there is enough data and calls :meth:`deserialize`.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_deserialize` documentation for details.
+
+ Raises:
+ IncrementalDeserializeError: :meth:`deserialize` raised :exc:`.DeserializeError`.
+ Exception: Any error raised by :meth:`deserialize`.
+ """
buffer: bytes = yield
packet_size: int = self.__size
while (buffer_size := len(buffer)) < packet_size:
@@ -143,13 +232,26 @@ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]
@property
@final
def packet_size(self) -> int:
+ """
+ The expected data size. Read-only attribute.
+ """
return self.__size
-class FileBasedPacketSerializer(AbstractPacketSerializer[_ST_contra, _DT_co]):
+class FileBasedPacketSerializer(AbstractPacketSerializer[_DTOPacketT]):
+ """
+ Base class for APIs requiring a :std:term:`file object` for serialization/deserialization.
+ """
+
__slots__ = ("__expected_errors",)
def __init__(self, expected_load_error: type[Exception] | tuple[type[Exception], ...], **kwargs: Any) -> None:
+ """
+ Parameters:
+ expected_load_error: Errors that can be raised by :meth:`load_from_file` implementation,
+ which must be considered as deserialization errors.
+ kwargs: Extra options given to ``super().__init__()``.
+ """
super().__init__(**kwargs)
if not isinstance(expected_load_error, tuple):
expected_load_error = (expected_load_error,)
@@ -157,24 +259,63 @@ def __init__(self, expected_load_error: type[Exception] | tuple[type[Exception],
self.__expected_errors: tuple[type[Exception], ...] = expected_load_error
@abstractmethod
- def dump_to_file(self, packet: _ST_contra, file: IO[bytes]) -> None:
+ def dump_to_file(self, packet: _DTOPacketT, file: IO[bytes], /) -> None:
+ """
+ Write the serialized `packet` to `file`.
+
+ Parameters:
+ packet: The Python object to serialize.
+ file: The :std:term:`binary file` to write to.
+ """
raise NotImplementedError
@abstractmethod
- def load_from_file(self, file: IO[bytes]) -> _DT_co:
+ def load_from_file(self, file: IO[bytes], /) -> _DTOPacketT:
+ """
+ Read from `file` to deserialize the raw :term:`packet`.
+
+ Parameters:
+ file: The :std:term:`binary file` to read from.
+
+ Raises:
+ EOFError: Missing data to create the :term:`packet`.
+ Exception: Any error from the underlying API that is interpreted with the `expected_load_error` value.
+
+ Returns:
+ the deserialized Python object.
+ """
raise NotImplementedError
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT, /) -> bytes:
+ """
+ Calls :meth:`dump_to_file` and returns the result.
+
+ See :meth:`.AbstractPacketSerializer.serialize` documentation for details.
+
+ Raises:
+ Exception: Any error raised by :meth:`dump_to_file`.
+ """
with BytesIO() as buffer:
self.dump_to_file(packet, buffer)
return buffer.getvalue()
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes, /) -> _DTOPacketT:
+ """
+ Calls :meth:`load_from_file` and returns the result.
+
+ See :meth:`.AbstractPacketSerializer.deserialize` documentation for details.
+
+ Raises:
+ DeserializeError: :meth:`load_from_file` raised :class:`EOFError`.
+ DeserializeError: :meth:`load_from_file` does not read until EOF (unused trailing data).
+ DeserializeError: :meth:`load_from_file` raised an error that matches `expected_load_error`.
+ Exception: Any other error raised by :meth:`load_from_file`.
+ """
with BytesIO(data) as buffer:
try:
- packet: _DT_co = self.load_from_file(buffer)
+ packet: _DTOPacketT = self.load_from_file(buffer)
except EOFError as exc:
raise DeserializeError("Missing data to create packet", error_info={"data": data}) from exc
except self.__expected_errors as exc:
@@ -186,7 +327,15 @@ def deserialize(self, data: bytes) -> _DT_co:
return packet
@final
- def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, None]:
+ def incremental_serialize(self, packet: _DTOPacketT, /) -> Generator[bytes, None, None]:
+ """
+ Calls :meth:`dump_to_file` and yields the result.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_serialize` documentation for details.
+
+ Raises:
+ Exception: Any error raised by :meth:`dump_to_file`.
+ """
with BytesIO() as buffer:
self.dump_to_file(packet, buffer)
if buffer.getbuffer().nbytes == 0:
@@ -195,7 +344,19 @@ def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, No
yield data
@final
- def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]]:
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DTOPacketT, bytes]]:
+ """
+ Calls :meth:`load_from_file` and returns the result.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_deserialize` documentation for details.
+
+ Note:
+ The generator will always :keyword:`yield` if :meth:`load_from_file` raises :class:`EOFError`.
+
+ Raises:
+ IncrementalDeserializeError: :meth:`load_from_file` raised an error that matches `expected_load_error`.
+ Exception: Any other error raised by :meth:`load_from_file`.
+ """
with BytesIO((yield)) as buffer:
initial: bool = True
while True:
@@ -203,7 +364,7 @@ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]
buffer.write((yield))
buffer.seek(0)
try:
- packet: _DT_co = self.load_from_file(buffer)
+ packet: _DTOPacketT = self.load_from_file(buffer)
except EOFError:
continue
except self.__expected_errors as exc:
diff --git a/src/easynetwork/serializers/cbor.py b/src/easynetwork/serializers/cbor.py
index f5993c24..c5feee1d 100644
--- a/src/easynetwork/serializers/cbor.py
+++ b/src/easynetwork/serializers/cbor.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""cbor-based network packet serializer module"""
@@ -14,19 +25,22 @@
from collections.abc import Callable
from dataclasses import asdict as dataclass_asdict, dataclass
from functools import partial
-from typing import IO, TYPE_CHECKING, Any, TypeVar, final
+from typing import IO, TYPE_CHECKING, Any, final
from .base_stream import FileBasedPacketSerializer
if TYPE_CHECKING:
import datetime
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
-
@dataclass(kw_only=True)
class CBOREncoderConfig:
+ """
+ A dataclass with the CBOR encoder options.
+
+ See :class:`cbor2.encoder.CBOREncoder` for details.
+ """
+
datetime_as_timestamp: bool = False
timezone: datetime.tzinfo | None = None
value_sharing: bool = False
@@ -38,12 +52,24 @@ class CBOREncoderConfig:
@dataclass(kw_only=True)
class CBORDecoderConfig:
+ """
+ A dataclass with the CBOR decoder options.
+
+ See :class:`cbor2.decoder.CBORDecoder` for details.
+ """
+
object_hook: Callable[..., Any] | None = None
tag_hook: Callable[..., Any] | None = None
str_errors: str = "strict"
-class CBORSerializer(FileBasedPacketSerializer[_ST_contra, _DT_co]):
+class CBORSerializer(FileBasedPacketSerializer[Any]):
+ """
+ A :term:`serializer` built on top of the :mod:`cbor2` module.
+
+ Needs ``cbor`` extra dependencies.
+ """
+
__slots__ = ("__encoder_cls", "__decoder_cls")
def __init__(
@@ -51,6 +77,11 @@ def __init__(
encoder_config: CBOREncoderConfig | None = None,
decoder_config: CBORDecoderConfig | None = None,
) -> None:
+ """
+ Parameters:
+ encoder_config: Parameter object to configure the :class:`~cbor.encoder.CBOREncoder`.
+ decoder_config: Parameter object to configure the :class:`~cbor.decoder.CBORDecoder`.
+ """
try:
import cbor2
except ModuleNotFoundError as exc: # pragma: no cover
@@ -74,9 +105,35 @@ def __init__(
self.__decoder_cls = partial(cbor2.CBORDecoder, **dataclass_asdict(decoder_config))
@final
- def dump_to_file(self, packet: _ST_contra, file: IO[bytes]) -> None:
+ def dump_to_file(self, packet: Any, file: IO[bytes]) -> None:
+ """
+ Write the CBOR representation of `packet` to `file`.
+
+ Roughly equivalent to::
+
+ def dump_to_file(self, packet, file):
+ cbor2.dump(packet, file)
+
+ Parameters:
+ packet: The Python object to serialize.
+ file: The :std:term:`binary file` to write to.
+ """
self.__encoder_cls(file).encode(packet)
@final
- def load_from_file(self, file: IO[bytes]) -> _DT_co:
+ def load_from_file(self, file: IO[bytes]) -> Any:
+ """
+ Read from `file` to deserialize the raw CBOR :term:`packet`.
+
+ Roughly equivalent to::
+
+ def load_from_file(self, file):
+ return cbor2.load(file)
+
+ Parameters:
+ file: The :std:term:`binary file` to read from.
+
+ Returns:
+ the deserialized Python object.
+ """
return self.__decoder_cls(file).decode()
diff --git a/src/easynetwork/serializers/json.py b/src/easynetwork/serializers/json.py
index 718f2108..a7b764de 100644
--- a/src/easynetwork/serializers/json.py
+++ b/src/easynetwork/serializers/json.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""json-based network packet serializer module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""json-based packet serializer module"""
from __future__ import annotations
@@ -16,16 +27,12 @@
from collections import Counter
from collections.abc import Callable, Generator
from dataclasses import asdict as dataclass_asdict, dataclass
-from typing import Any, TypeVar, final
+from typing import Any, final
from ..exceptions import DeserializeError, IncrementalDeserializeError
from ..tools._utils import iter_bytes
from .abc import AbstractIncrementalPacketSerializer
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
-
-
_JSON_VALUE_BYTES: frozenset[int] = frozenset(bytes(string.digits + string.ascii_letters + string.punctuation, "ascii"))
_ESCAPE_BYTE: int = b"\\"[0]
@@ -34,6 +41,12 @@
@dataclass(kw_only=True)
class JSONEncoderConfig:
+ """
+ A dataclass with the JSON encoder options.
+
+ See :class:`json.JSONEncoder` for details.
+ """
+
skipkeys: bool = False
check_circular: bool = True
ensure_ascii: bool = True
@@ -45,6 +58,12 @@ class JSONEncoderConfig:
@dataclass(kw_only=True)
class JSONDecoderConfig:
+ """
+ A dataclass with the JSON decoder options.
+
+ See :class:`json.JSONDecoder` for details.
+ """
+
object_hook: Callable[..., Any] | None = None
parse_int: Callable[[str], Any] | None = None
parse_float: Callable[[str], Any] | None = None
@@ -107,7 +126,7 @@ def raw_parse() -> Generator[None, bytes, tuple[bytes, bytes]]:
# partial_document not complete
start = partial_document_view.nbytes
- # yield out of view scope
+ # yield outside view scope
partial_document += yield
except _JSONParser._PlainValueError:
@@ -132,7 +151,11 @@ def _split_partial_document(partial_document: bytes, index: int) -> tuple[bytes,
return partial_document[:index], partial_document[index:]
-class JSONSerializer(AbstractIncrementalPacketSerializer[_ST_contra, _DT_co]):
+class JSONSerializer(AbstractIncrementalPacketSerializer[Any]):
+ """
+ A :term:`serializer` built on top of the :mod:`json` module.
+ """
+
__slots__ = ("__encoder", "__decoder", "__decoder_error_cls", "__encoding", "__unicode_errors")
def __init__(
@@ -143,6 +166,17 @@ def __init__(
encoding: str = "utf-8",
unicode_errors: str = "strict",
) -> None:
+ """
+ Parameters:
+ encoder_config: Parameter object to configure the :class:`~json.JSONEncoder`.
+ decoder_config: Parameter object to configure the :class:`~json.JSONDecoder`.
+ encoding: String encoding.
+ unicode_errors: Controls how encoding errors are handled.
+
+ See Also:
+ :ref:`standard-encodings` and :ref:`error-handlers`.
+
+ """
from json import JSONDecodeError, JSONDecoder, JSONEncoder
super().__init__()
@@ -167,16 +201,71 @@ def __init__(
self.__unicode_errors: str = unicode_errors
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: Any) -> bytes:
+ """
+ Returns the JSON representation of the Python object `packet`.
+
+ Roughly equivalent to::
+
+ def serialize(self, packet):
+ return json.dumps(packet)
+
+ Example:
+ >>> s = JSONSerializer()
+ >>> s.serialize({"key": [1, 2, 3], "data": None})
+ b'{"key":[1,2,3],"data":null}'
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
return self.__encoder.encode(packet).encode(self.__encoding, self.__unicode_errors)
@final
- def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, None]:
+ def incremental_serialize(self, packet: Any) -> Generator[bytes, None, None]:
+ r"""
+ Returns the JSON representation of the Python object `packet`.
+
+ Example:
+ >>> s = JSONSerializer()
+ >>> b"".join(s.incremental_serialize({"key": [1, 2, 3], "data": None}))
+ b'{"key":[1,2,3],"data":null}\n'
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Yields:
+ all the parts of the JSON :term:`packet`.
+ """
yield self.__encoder.encode(packet).encode(self.__encoding, self.__unicode_errors)
yield b"\n"
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes) -> Any:
+ """
+ Creates a Python object representing the raw JSON :term:`packet` from `data`.
+
+ Roughly equivalent to::
+
+ def deserialize(self, data):
+ return json.loads(data)
+
+ Example:
+ >>> s = JSONSerializer()
+ >>> s.deserialize(b'{"key":[1,2,3],"data":null}')
+ {'key': [1, 2, 3], 'data': None}
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: A :class:`UnicodeError` or :class:`~json.JSONDecodeError` have been raised.
+
+ Returns:
+ the deserialized Python object.
+ """
try:
document: str = data.decode(self.__encoding, self.__unicode_errors)
except UnicodeError as exc:
@@ -184,7 +273,7 @@ def deserialize(self, data: bytes) -> _DT_co:
finally:
del data
try:
- packet: _DT_co = self.__decoder.decode(document)
+ packet: Any = self.__decoder.decode(document)
except self.__decoder_error_cls as exc:
raise DeserializeError(
f"JSON decode error: {exc}",
@@ -198,7 +287,29 @@ def deserialize(self, data: bytes) -> _DT_co:
return packet
@final
- def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]]:
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[Any, bytes]]:
+ """
+ Creates a Python object representing the raw JSON :term:`packet`.
+
+ Example:
+ >>> s = JSONSerializer()
+ >>> consumer = s.incremental_deserialize()
+ >>> next(consumer)
+ >>> consumer.send(b'{"key":[1,2,3]')
+ >>> consumer.send(b',"data":null}{"something":"remaining"}')
+ Traceback (most recent call last):
+ ...
+ StopIteration: ({'key': [1, 2, 3], 'data': None}, b'{"something":"remaining"}')
+
+ Yields:
+ :data:`None` until the whole :term:`packet` has been deserialized.
+
+ Raises:
+ IncrementalDeserializeError: A :class:`UnicodeError` or :class:`~json.JSONDecodeError` have been raised.
+
+ Returns:
+ a tuple with the deserialized Python object and the unused trailing data.
+ """
complete_document, remaining_data = yield from _JSONParser.raw_parse()
if not complete_document:
@@ -206,7 +317,7 @@ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]
complete_document = remaining_data
remaining_data = b""
- packet: _DT_co
+ packet: Any
try:
document: str = complete_document.decode(self.__encoding, self.__unicode_errors)
except UnicodeError as exc:
diff --git a/src/easynetwork/serializers/line.py b/src/easynetwork/serializers/line.py
index 37d91543..f6e7e711 100644
--- a/src/easynetwork/serializers/line.py
+++ b/src/easynetwork/serializers/line.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""string line network packet serializer module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""string line packet serializer module"""
from __future__ import annotations
@@ -13,7 +24,11 @@
from .base_stream import AutoSeparatedPacketSerializer
-class StringLineSerializer(AutoSeparatedPacketSerializer[str, str]):
+class StringLineSerializer(AutoSeparatedPacketSerializer[str]):
+ """
+ A :term:`serializer` to handle ASCII-based protocols.
+ """
+
__slots__ = ("__encoding", "__unicode_errors")
def __init__(
@@ -23,6 +38,22 @@ def __init__(
encoding: str = "ascii",
unicode_errors: str = "strict",
) -> None:
+ r"""
+ Parameters:
+ newline: Magic string indicating the newline character sequence.
+ Possible values are:
+
+ - ``"LF"`` (the default): Line feed character (``"\n"``).
+
+ - ``"CR"``: Carriage return character (``"\r"``).
+
+ - ``"CRLF"``: Carriage return + line feed character sequence (``"\r\n"``).
+ encoding: String encoding. Defaults to ``"ascii"``.
+ unicode_errors: Controls how encoding errors are handled.
+
+ See Also:
+ :ref:`standard-encodings` and :ref:`error-handlers`.
+ """
separator: bytes
match newline:
case "LF":
@@ -39,6 +70,33 @@ def __init__(
@final
def serialize(self, packet: str) -> bytes:
+ """
+ Encodes the given string to bytes.
+
+ Roughly equivalent to::
+
+ def serialize(self, packet):
+ return packet.encode()
+
+ Example:
+ >>> s = StringLineSerializer()
+ >>> s.serialize("character string")
+ b'character string'
+
+ Parameters:
+ packet: The string to encode.
+
+ Raises:
+ TypeError: `packet` is not a :class:`str`.
+ UnicodeError: Invalid string.
+ ValueError: `newline` found in `packet`.
+
+ Returns:
+ the byte sequence.
+
+ Important:
+ The output **does not** contain `newline`.
+ """
if not isinstance(packet, str):
raise TypeError(f"Expected a string, got {packet!r}")
data = packet.encode(self.__encoding, self.__unicode_errors)
@@ -48,6 +106,31 @@ def serialize(self, packet: str) -> bytes:
@final
def deserialize(self, data: bytes) -> str:
+ r"""
+ Decodes `data` and returns the string.
+
+ Roughly equivalent to::
+
+ def deserialize(self, data):
+ return data.decode()
+
+ Example:
+ >>> s = StringLineSerializer()
+ >>> s.deserialize(b"character string")
+ 'character string'
+
+ Parameters:
+ packet: The data to decode.
+
+ Raises:
+ DeserializeError: :class:`UnicodeError` raised when decoding `data`.
+
+ Returns:
+ the string.
+
+ Important:
+ Trailing `newline` sequences are **removed**.
+ """
separator: bytes = self.separator
while data.endswith(separator):
data = data.removesuffix(separator)
@@ -59,9 +142,15 @@ def deserialize(self, data: bytes) -> str:
@property
@final
def encoding(self) -> str:
+ """
+ String encoding. Read-only attribute.
+ """
return self.__encoding
@property
@final
def unicode_errors(self) -> str:
+ """
+ Controls how encoding errors are handled. Read-only attribute.
+ """
return self.__unicode_errors
diff --git a/src/easynetwork/serializers/msgpack.py b/src/easynetwork/serializers/msgpack.py
index d27248fb..781a059d 100644
--- a/src/easynetwork/serializers/msgpack.py
+++ b/src/easynetwork/serializers/msgpack.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""msgpack-based network packet serializer module"""
@@ -14,14 +25,11 @@
from collections.abc import Callable
from dataclasses import asdict as dataclass_asdict, dataclass, field
from functools import partial
-from typing import Any, TypeVar, final
+from typing import Any, final
from ..exceptions import DeserializeError
from .abc import AbstractPacketSerializer
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
-
def _get_default_ext_hook() -> Callable[[int, bytes], Any]:
try:
@@ -33,6 +41,12 @@ def _get_default_ext_hook() -> Callable[[int, bytes], Any]:
@dataclass(kw_only=True)
class MessagePackerConfig:
+ """
+ A dataclass with the Packer options.
+
+ See :class:`msgpack.Packer` for details.
+ """
+
default: Callable[[Any], Any] | None = None
use_single_float: bool = False
use_bin_type: bool = True
@@ -43,6 +57,12 @@ class MessagePackerConfig:
@dataclass(kw_only=True)
class MessageUnpackerConfig:
+ """
+ A dataclass with the Unpacker options.
+
+ See :class:`msgpack.Unpacker` for details.
+ """
+
raw: bool = False
use_list: bool = True
timestamp: int = 0
@@ -53,7 +73,13 @@ class MessageUnpackerConfig:
ext_hook: Callable[[int, bytes], Any] = field(default_factory=_get_default_ext_hook)
-class MessagePackSerializer(AbstractPacketSerializer[_ST_contra, _DT_co]):
+class MessagePackSerializer(AbstractPacketSerializer[Any]):
+ """
+ A :term:`one-shot serializer` built on top of the :mod:`msgpack` module.
+
+ Needs ``msgpack`` extra dependencies.
+ """
+
__slots__ = ("__packb", "__unpackb", "__unpack_out_of_data_cls", "__unpack_extra_data_cls")
def __init__(
@@ -61,14 +87,19 @@ def __init__(
packer_config: MessagePackerConfig | None = None,
unpacker_config: MessageUnpackerConfig | None = None,
) -> None:
+ """
+ Parameters:
+ packer_config: Parameter object to configure the :class:`~msgpack.Packer`.
+ unpacker_config: Parameter object to configure the :class:`~msgpack.Unpacker`.
+ """
try:
import msgpack
except ModuleNotFoundError as exc: # pragma: no cover
raise ModuleNotFoundError("message-pack dependencies are missing. Consider adding 'msgpack' extra") from exc
super().__init__()
- self.__packb: Callable[[_ST_contra], bytes]
- self.__unpackb: Callable[[bytes], _DT_co]
+ self.__packb: Callable[[Any], bytes]
+ self.__unpackb: Callable[[bytes], Any]
if packer_config is None:
packer_config = MessagePackerConfig()
@@ -88,11 +119,43 @@ def __init__(
self.__unpack_extra_data_cls = msgpack.ExtraData
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: Any) -> bytes:
+ """
+ Returns the MessagePack representation of the Python object `packet`.
+
+ Roughly equivalent to::
+
+ def serialize(self, packet):
+ return msgpack.packb(packet)
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
return self.__packb(packet)
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes) -> Any:
+ """
+ Creates a Python object representing the raw MessagePack :term:`packet` from `data`.
+
+ Roughly equivalent to::
+
+ def deserialize(self, data):
+ return msgpack.unpackb(data)
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: Too little or too much data to parse.
+ DeserializeError: An unrelated deserialization error occurred.
+
+ Returns:
+ the deserialized Python object.
+ """
try:
return self.__unpackb(data)
except self.__unpack_out_of_data_cls as exc:
diff --git a/src/easynetwork/serializers/pickle.py b/src/easynetwork/serializers/pickle.py
index f4551693..22008298 100644
--- a/src/easynetwork/serializers/pickle.py
+++ b/src/easynetwork/serializers/pickle.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""pickle-based network packet serializer module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""pickle-based packet serializer module"""
from __future__ import annotations
@@ -15,16 +26,13 @@
from dataclasses import asdict as dataclass_asdict, dataclass, field
from functools import partial
from io import BytesIO
-from typing import IO, TYPE_CHECKING, TypeVar, final
+from typing import IO, TYPE_CHECKING, Any, final
from ..exceptions import DeserializeError
from .abc import AbstractPacketSerializer
if TYPE_CHECKING:
- from pickle import Pickler as _Pickler, Unpickler as _Unpickler
-
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
+ import pickle as _typing_pickle
def _get_default_pickler_protocol() -> int:
@@ -35,18 +43,34 @@ def _get_default_pickler_protocol() -> int:
@dataclass(kw_only=True)
class PicklerConfig:
+ """
+ A dataclass with the Pickler options.
+
+ See :class:`pickle.Pickler` for details.
+ """
+
protocol: int = field(default_factory=_get_default_pickler_protocol)
fix_imports: bool = False
@dataclass(kw_only=True)
class UnpicklerConfig:
+ """
+ A dataclass with the Unpickler options.
+
+ See :class:`pickle.Unpickler` for details.
+ """
+
fix_imports: bool = False
encoding: str = "utf-8"
errors: str = "strict"
-class PickleSerializer(AbstractPacketSerializer[_ST_contra, _DT_co]):
+class PickleSerializer(AbstractPacketSerializer[Any]):
+ """
+ A :term:`one-shot serializer` built on top of the :mod:`pickle` module.
+ """
+
__slots__ = ("__optimize", "__pickler_cls", "__unpickler_cls")
def __init__(
@@ -54,10 +78,18 @@ def __init__(
pickler_config: PicklerConfig | None = None,
unpickler_config: UnpicklerConfig | None = None,
*,
- pickler_cls: type[_Pickler] | None = None,
- unpickler_cls: type[_Unpickler] | None = None,
+ pickler_cls: type[_typing_pickle.Pickler] | None = None,
+ unpickler_cls: type[_typing_pickle.Unpickler] | None = None,
pickler_optimize: bool = False,
) -> None:
+ """
+ Parameters:
+ pickler_config: Parameter object to configure the :class:`~pickle.Pickler`.
+ unpickler_config: Parameter object to configure the :class:`~pickle.Unpickler`.
+ pickler_cls: The :class:`~pickle.Pickler` class to use (see :ref:`pickle-inst`).
+ unpickler_cls: The :class:`~pickle.Unpickler` class to use (see :ref:`pickle-restrict`).
+ pickler_optimize: If `True`, :func:`pickletools.optimize` will be applied to :meth:`pickle.Pickler.dump` output.
+ """
super().__init__()
import pickle
@@ -67,8 +99,8 @@ def __init__(
import pickletools
self.__optimize = pickletools.optimize
- self.__pickler_cls: Callable[[IO[bytes]], _Pickler]
- self.__unpickler_cls: Callable[[IO[bytes]], _Unpickler]
+ self.__pickler_cls: Callable[[IO[bytes]], _typing_pickle.Pickler]
+ self.__unpickler_cls: Callable[[IO[bytes]], _typing_pickle.Unpickler]
if pickler_config is None:
pickler_config = PicklerConfig()
@@ -86,7 +118,21 @@ def __init__(
self.__unpickler_cls = partial(unpickler_cls or pickle.Unpickler, **dataclass_asdict(unpickler_config), buffers=None)
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: Any) -> bytes:
+ """
+ Returns the pickle representation of the Python object `packet`.
+
+ Roughly equivalent to::
+
+ def serialize(self, packet):
+ return pickle.dumps(packet)
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
with BytesIO() as buffer:
self.__pickler_cls(buffer).dump(packet)
pickle: bytes = buffer.getvalue()
@@ -95,10 +141,28 @@ def serialize(self, packet: _ST_contra) -> bytes:
return pickle
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes) -> Any:
+ """
+ Creates a Python object representing the raw pickle :term:`packet` from `data`.
+
+ Roughly equivalent to::
+
+ def deserialize(self, data):
+ return pickle.loads(data)
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: Too little or too much data to parse.
+ DeserializeError: An unrelated deserialization error occurred.
+
+ Returns:
+ the deserialized Python object.
+ """
with BytesIO(data) as buffer:
try:
- packet: _DT_co = self.__unpickler_cls(buffer).load()
+ packet: Any = self.__unpickler_cls(buffer).load()
except Exception as exc:
raise DeserializeError(str(exc) or "Invalid token", error_info={"data": data}) from exc
finally:
diff --git a/src/easynetwork/serializers/struct.py b/src/easynetwork/serializers/struct.py
index da6d3276..14daf000 100644
--- a/src/easynetwork/serializers/struct.py
+++ b/src/easynetwork/serializers/struct.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""struct.Struct-based network packet serializer module"""
@@ -9,50 +20,143 @@
from abc import abstractmethod
from collections.abc import Iterable
-from typing import TYPE_CHECKING, Any, Generic, NamedTuple, TypeVar, final
+from typing import TYPE_CHECKING, Any, NamedTuple, TypeVar, final
+from .._typevars import _DTOPacketT
from ..exceptions import DeserializeError
from .base_stream import FixedSizePacketSerializer
if TYPE_CHECKING:
- from struct import Struct as _Struct
+ import struct as _typing_struct
from _typeshed import SupportsKeysAndGetItem
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
-
_ENDIANNESS_CHARACTERS: frozenset[str] = frozenset({"@", "=", "<", ">", "!"})
-class AbstractStructSerializer(FixedSizePacketSerializer[_ST_contra, _DT_co]):
+class AbstractStructSerializer(FixedSizePacketSerializer[_DTOPacketT]):
+ r"""
+ A base class for structured data.
+
+ To use the serializer directly without additional layers, it is possible to create a subclass with the minimal requirements::
+
+ >>> class StructSerializer(AbstractStructSerializer):
+ ... __slots__ = ()
+ ... def iter_values(self, packet):
+ ... return packet
+ ... def from_tuple(self, packet_tuple):
+ ... return packet_tuple
+ ...
+
+ And then::
+
+ >>> s = StructSerializer(">ii")
+ >>> data = s.serialize((10, 20))
+ >>> data
+ b'\x00\x00\x00\n\x00\x00\x00\x14'
+ >>> s.deserialize(data)
+ (10, 20)
+
+ This is an abstract class in order to allow you to include fancy structures like :class:`ctypes.Structure` subclasses.
+
+ Note:
+ If the endianness is not specified, the network byte-order is used::
+
+ >>> s = StructSerializer("qq")
+ >>> s.struct.format
+ '!qq'
+
+ See Also:
+ The :class:`.NamedTupleStructSerializer` class.
+ """
+
__slots__ = ("__s", "__error_cls")
def __init__(self, format: str) -> None:
+ """
+ Parameters:
+ format: The :class:`struct.Struct` format definition string.
+ """
from struct import Struct, error
if format and format[0] not in _ENDIANNESS_CHARACTERS:
format = f"!{format}" # network byte order
struct = Struct(format)
super().__init__(struct.size)
- self.__s: _Struct = struct
+ self.__s: _typing_struct.Struct = struct
self.__error_cls = error
@abstractmethod
- def iter_values(self, packet: _ST_contra) -> Iterable[Any]:
- raise NotImplementedError
+ def iter_values(self, packet: _DTOPacketT, /) -> Iterable[Any]:
+ """
+ Returns an object suitable for :meth:`struct.Struct.pack`.
- @final
- def serialize(self, packet: _ST_contra) -> bytes:
- return self.__s.pack(*self.iter_values(packet))
+ See :meth:`serialize` for details.
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ an iterable object yielding the structure values
+ """
+ raise NotImplementedError
@abstractmethod
- def from_tuple(self, t: tuple[Any, ...]) -> _DT_co:
+ def from_tuple(self, packet_tuple: tuple[Any, ...], /) -> _DTOPacketT:
+ """
+ Finishes the packet deserialization by parsing the tuple obtained by :meth:`struct.Struct.unpack`.
+
+ See :meth:`deserialize` for details.
+
+ Parameters:
+ packet_tuple: A tuple of each elements extracted from the structure.
+
+ Returns:
+ the deserialized Python object.
+ """
raise NotImplementedError
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def serialize(self, packet: _DTOPacketT) -> bytes:
+ """
+ Returns the structured data representation of the Python object `packet`.
+
+ Roughly equivalent to::
+
+ def serialize(self, packet):
+ to_pack = self.iter_values(packet)
+ return self.struct.pack(*to_pack)
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
+ return self.__s.pack(*self.iter_values(packet))
+
+ @final
+ def deserialize(self, data: bytes) -> _DTOPacketT:
+ """
+ Creates a Python object representing the structure from `data`.
+
+ Roughly equivalent to::
+
+ def deserialize(self, data):
+ unpacked_data = self.struct.unpack(data)
+ return self.from_tuple(unpacked_data)
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: A :class:`struct.error` have been raised.
+ DeserializeError: :meth:`from_tuple` crashed.
+
+ Returns:
+ the deserialized Python object.
+ """
try:
packet_tuple: tuple[Any, ...] = self.__s.unpack(data)
except self.__error_cls as exc:
@@ -67,46 +171,119 @@ def deserialize(self, data: bytes) -> _DT_co:
@property
@final
- def struct(self) -> _Struct:
+ def struct(self) -> _typing_struct.Struct:
+ """The underlying :class:`struct.Struct` instance. Read-only attribute."""
return self.__s
-_NT = TypeVar("_NT", bound=NamedTuple)
+_NamedTupleVar = TypeVar("_NamedTupleVar", bound=NamedTuple)
+
+
+class NamedTupleStructSerializer(AbstractStructSerializer[_NamedTupleVar]):
+ r"""
+ Generic class to handle a :term:`named tuple` with a :class:`struct.Struct` object.
+
+ Accepts classes created directly from :func:`collections.namedtuple` factory::
+
+ >>> import collections
+ >>> Point = collections.namedtuple("Point", ("x", "y"))
+
+ ...or declared with :class:`typing.NamedTuple`::
+ from typing import NamedTuple
+
+ class Point(NamedTuple):
+ x: int
+ y: int
+
+ They are used like this::
+
+ >>> s = NamedTupleStructSerializer(Point, {"x": "i", "y": "i"}, format_endianness=">")
+ >>> s.struct.format
+ '>ii'
+ >>> data = s.serialize(Point(x=10, y=20))
+ >>> data
+ b'\x00\x00\x00\n\x00\x00\x00\x14'
+ >>> s.deserialize(data)
+ Point(x=10, y=20)
+ """
-class NamedTupleStructSerializer(AbstractStructSerializer[_NT, _NT], Generic[_NT]):
__slots__ = ("__namedtuple_cls", "__string_fields", "__encoding", "__unicode_errors", "__strip_trailing_nul")
def __init__(
self,
- namedtuple_cls: type[_NT],
+ namedtuple_cls: type[_NamedTupleVar],
field_formats: SupportsKeysAndGetItem[str, str],
format_endianness: str = "",
encoding: str | None = "utf-8",
unicode_errors: str = "strict",
strip_string_trailing_nul_bytes: bool = True,
) -> None:
+ r"""
+ Parameters:
+ namedtuple_cls: A :term:`named tuple` type.
+ field_formats: A mapping of string format for the `namedtuple_cls` fields.
+ format_endianness: The endianness character. Defaults to empty string.
+ encoding: String fields encoding. Can be disabled by setting it to :data:`None`.
+ unicode_errors: Controls how encoding errors are handled. Ignored if `encoding` is set to :data:`None`.
+ strip_string_trailing_nul_bytes: If `True` (the default), removes ``\0`` characters at the end of a string field.
+
+ See Also:
+ :ref:`standard-encodings` and :ref:`error-handlers`.
+ """
string_fields: set[str] = set()
+ if format_endianness:
+ if format_endianness not in _ENDIANNESS_CHARACTERS:
+ raise ValueError("Invalid endianness character")
+
for field in field_formats.keys():
field_fmt = field_formats[field]
if any(c in _ENDIANNESS_CHARACTERS for c in field_fmt):
raise ValueError(f"{field!r}: Invalid field format")
- if field_fmt and field_fmt[-1] == "s":
+ if field_fmt[-1:] == "s":
if len(field_fmt) > 1 and not field_fmt[:-1].isdecimal():
raise ValueError(f"{field!r}: Invalid field format")
string_fields.add(field)
elif len(field_fmt) != 1 or not field_fmt.isalpha():
raise ValueError(f"{field!r}: Invalid field format")
- super().__init__(f"{format_endianness}{''.join(map(field_formats.__getitem__, namedtuple_cls._fields))}")
- self.__namedtuple_cls: type[_NT] = namedtuple_cls
+ super().__init__(f"{format_endianness}{''.join(field_formats[field] for field in namedtuple_cls._fields)}")
+ self.__namedtuple_cls: type[_NamedTupleVar] = namedtuple_cls
self.__string_fields: frozenset[str] = frozenset(string_fields)
self.__encoding: str | None = encoding
self.__unicode_errors: str = unicode_errors
self.__strip_trailing_nul = bool(strip_string_trailing_nul_bytes)
@final
- def iter_values(self, packet: _NT) -> _NT:
+ def iter_values(self, packet: _NamedTupleVar) -> _NamedTupleVar:
+ """
+ Returns the named tuple to pack using :meth:`struct.Struct.pack`.
+
+ In most case, this method will directly return `packet`.
+
+ If there are string fields and `encoding` is not :data:`None`, this method will return a shallow copy of
+ the named tuple with the encoded strings.
+
+ Example:
+ The named tuple::
+
+ >>> from typing import NamedTuple
+ >>> class Person(NamedTuple):
+ ... name: str
+ ... age: int
+
+ In application::
+
+ >>> s = NamedTupleStructSerializer(Person, {"name": "10s", "age": "I"})
+ >>> s.iter_values(Person(name="John", age=20))
+ Person(name=b'John', age=20)
+
+ Parameters:
+ packet: The `namedtuple_cls` instance.
+
+ Returns:
+ a `namedtuple_cls` instance.
+ """
if not isinstance(packet, self.__namedtuple_cls):
namedtuple_name = self.__namedtuple_cls.__name__
raise TypeError(f"Expected a {namedtuple_name} instance, got {packet!r}")
@@ -117,8 +294,39 @@ def iter_values(self, packet: _NT) -> _NT:
return packet
@final
- def from_tuple(self, t: tuple[Any, ...]) -> _NT:
- p = self.__namedtuple_cls._make(t)
+ def from_tuple(self, packet_tuple: tuple[Any, ...], /) -> _NamedTupleVar:
+ r"""
+ Constructs the named tuple from the given tuple.
+
+ If there are string fields and `encoding` is not :data:`None`, their values will be decoded.
+
+ If `strip_string_trailing_nul_bytes` was set to :data:`True`, the ``"\0"`` characters at the end of the string fields,
+ added for padding, will be removed.
+
+ Example:
+ The named tuple::
+
+ >>> from typing import NamedTuple
+ >>> class Person(NamedTuple):
+ ... name: str
+ ... age: int
+
+ In application::
+
+ >>> s = NamedTupleStructSerializer(Person, {"name": "10s", "age": "I"})
+ >>> t = s.struct.unpack(b'John\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14')
+ >>> t
+ (b'John\x00\x00\x00\x00\x00\x00', 20)
+ >>> s.from_tuple(t)
+ Person(name='John', age=20)
+
+ Parameters:
+ packet_tuple: A tuple of each elements extracted from the structure.
+
+ Returns:
+ a `namedtuple_cls` instance.
+ """
+ p = self.__namedtuple_cls._make(packet_tuple)
string_fields: dict[str, bytes] = {field: getattr(p, field) for field in self.__string_fields}
if string_fields:
to_replace: dict[str, Any] | None = None
diff --git a/src/easynetwork/serializers/wrapper/__init__.py b/src/easynetwork/serializers/wrapper/__init__.py
index e6cd2e80..f3fdf552 100644
--- a/src/easynetwork/serializers/wrapper/__init__.py
+++ b/src/easynetwork/serializers/wrapper/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""EasyNetwork's packet serializer module"""
@@ -6,7 +17,6 @@
from __future__ import annotations
__all__ = [
- "AbstractCompressorSerializer",
"BZ2CompressorSerializer",
"Base64EncoderSerializer",
"EncryptorSerializer",
diff --git a/src/easynetwork/serializers/wrapper/base64.py b/src/easynetwork/serializers/wrapper/base64.py
index 6242e7dd..163cd65c 100644
--- a/src/easynetwork/serializers/wrapper/base64.py
+++ b/src/easynetwork/serializers/wrapper/base64.py
@@ -1,7 +1,18 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
+# http://www.apache.org/licenses/LICENSE-2.0
#
-"""base64 encoding serializer module"""
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
+"""base64 encoder serializer module"""
from __future__ import annotations
@@ -11,43 +22,62 @@
import os
from collections.abc import Callable
-from typing import Literal, TypeVar, assert_never, final
+from typing import Literal, assert_never, final
+from ..._typevars import _DTOPacketT
from ...exceptions import DeserializeError
from ..abc import AbstractPacketSerializer
from ..base_stream import AutoSeparatedPacketSerializer
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
+class Base64EncoderSerializer(AutoSeparatedPacketSerializer[_DTOPacketT]):
+ """
+ A :term:`serializer wrapper` to handle base64 encoded data, built on top of :mod:`base64` module.
+ """
-class Base64EncoderSerializer(AutoSeparatedPacketSerializer[_ST_contra, _DT_co]):
__slots__ = ("__serializer", "__encode", "__decode", "__compare_digest", "__decode_error_cls", "__checksum")
def __init__(
self,
- serializer: AbstractPacketSerializer[_ST_contra, _DT_co],
+ serializer: AbstractPacketSerializer[_DTOPacketT],
*,
alphabet: Literal["standard", "urlsafe"] = "urlsafe",
checksum: bool | str | bytes = False,
separator: bytes = b"\r\n",
) -> None:
+ """
+ Parameters:
+ serializer: The serializer to wrap.
+ alphabet: The base64 alphabet to use. Possible values are:
+
+ - ``"standard"``: Use standard alphabet.
+
+ - ``"urlsafe"``: Use URL- and filesystem-safe alphabet.
+
+ Defaults to ``"urlsafe"``.
+ checksum: If `True`, appends a sha256 checksum to the serialized data.
+ `checksum` can also be a URL-safe base64-encoded 32-byte key for a signed checksum.
+ separator: Token for :class:`AutoSeparatedPacketSerializer`. Used in incremental serialization context.
+ """
import base64
import binascii
- from hashlib import sha256 as hashlib_sha256
- from hmac import compare_digest, digest as hmac_digest
+ from hmac import compare_digest
super().__init__(separator=separator, incremental_serialize_check_separator=not separator.isspace())
if not isinstance(serializer, AbstractPacketSerializer):
raise TypeError(f"Expected a serializer instance, got {serializer!r}")
- self.__serializer: AbstractPacketSerializer[_ST_contra, _DT_co] = serializer
+ self.__serializer: AbstractPacketSerializer[_DTOPacketT] = serializer
self.__checksum: Callable[[bytes], bytes] | None
match checksum:
case False:
self.__checksum = None
case True:
+ from hashlib import sha256 as hashlib_sha256
+
self.__checksum = lambda data: hashlib_sha256(data).digest()
case str() | bytes():
+ from hmac import digest as hmac_digest
+
try:
key: bytes = base64.urlsafe_b64decode(checksum)
except binascii.Error as exc:
@@ -73,19 +103,46 @@ def __init__(
@classmethod
def generate_key(cls) -> bytes:
+ """
+ Generates a fresh key suitable for signed checksums.
+
+ Keep this some place safe!
+ """
import base64
return base64.urlsafe_b64encode(os.urandom(32))
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT) -> bytes:
+ """
+ Serializes `packet` and encodes the result in base64.
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
data = self.__serializer.serialize(packet)
if (checksum := self.__checksum) is not None:
data += checksum(data)
return self.__encode(data)
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes) -> _DTOPacketT:
+ """
+ Decodes base64 token `data` and deserializes the result.
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: Invalid base64 token.
+ Exception: The underlying serializer raised an exception.
+
+ Returns:
+ the deserialized Python object.
+ """
try:
data = self.__decode(data)
except self.__decode_error_cls:
diff --git a/src/easynetwork/serializers/wrapper/compressor.py b/src/easynetwork/serializers/wrapper/compressor.py
index dfa44548..e8f67c7f 100644
--- a/src/easynetwork/serializers/wrapper/compressor.py
+++ b/src/easynetwork/serializers/wrapper/compressor.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Data compressor serializer module"""
@@ -6,7 +17,6 @@
from __future__ import annotations
__all__ = [
- "AbstractCompressorSerializer",
"BZ2CompressorSerializer",
"ZlibCompressorSerializer",
]
@@ -14,13 +24,15 @@
import abc
from collections import deque
from collections.abc import Generator
-from typing import Final, Protocol, TypeVar, final
+from typing import TYPE_CHECKING, Protocol, final
+from ..._typevars import _DTOPacketT
from ...exceptions import DeserializeError, IncrementalDeserializeError
from ..abc import AbstractIncrementalPacketSerializer, AbstractPacketSerializer
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
+if TYPE_CHECKING:
+ import bz2 as _typing_bz2
+ import zlib as _typing_zlib
class CompressorInterface(Protocol, metaclass=abc.ABCMeta):
@@ -49,44 +61,82 @@ def unused_data(self) -> bytes:
raise NotImplementedError
-class AbstractCompressorSerializer(AbstractIncrementalPacketSerializer[_ST_contra, _DT_co]):
+class AbstractCompressorSerializer(AbstractIncrementalPacketSerializer[_DTOPacketT]):
+ """
+ A :term:`serializer wrapper` base class for compressors.
+ """
+
__slots__ = ("__serializer", "__expected_error")
def __init__(
self,
- serializer: AbstractPacketSerializer[_ST_contra, _DT_co],
+ serializer: AbstractPacketSerializer[_DTOPacketT],
expected_decompress_error: type[Exception] | tuple[type[Exception], ...],
) -> None:
+ """
+ Parameters:
+ serializer: The serializer to wrap.
+ expected_decompress_error: Errors that can be raised by :meth:`DecompressorInterface.decompress` implementation,
+ which must be considered as deserialization errors.
+ """
super().__init__()
if not isinstance(serializer, AbstractPacketSerializer):
raise TypeError(f"Expected a serializer instance, got {serializer!r}")
if not isinstance(expected_decompress_error, tuple):
expected_decompress_error = (expected_decompress_error,)
assert all(issubclass(e, Exception) for e in expected_decompress_error) # nosec assert_used
- self.__serializer: AbstractPacketSerializer[_ST_contra, _DT_co] = serializer
+ self.__serializer: AbstractPacketSerializer[_DTOPacketT] = serializer
self.__expected_error: tuple[type[Exception], ...] = expected_decompress_error
@abc.abstractmethod
def new_compressor_stream(self) -> CompressorInterface:
+ """
+ Returns:
+ an object suitable for data compression.
+ """
raise NotImplementedError
@abc.abstractmethod
def new_decompressor_stream(self) -> DecompressorInterface:
+ """
+ Returns:
+ an object suitable for data decompression.
+ """
raise NotImplementedError
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT) -> bytes:
+ """
+ Serializes `packet` and returns the compressed data parts.
+
+ See :meth:`.AbstractPacketSerializer.serialize` documentation for details.
+ """
compressor: CompressorInterface = self.new_compressor_stream()
return compressor.compress(self.__serializer.serialize(packet)) + compressor.flush()
@final
- def incremental_serialize(self, packet: _ST_contra) -> Generator[bytes, None, None]:
+ def incremental_serialize(self, packet: _DTOPacketT) -> Generator[bytes, None, None]:
+ """
+ Serializes `packet` and yields the compressed data parts.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_serialize` documentation for details.
+ """
compressor: CompressorInterface = self.new_compressor_stream()
yield compressor.compress(self.__serializer.serialize(packet))
yield compressor.flush()
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes) -> _DTOPacketT:
+ """
+ Decompresses `data` and returns the deserialized packet.
+
+ See :meth:`.AbstractPacketSerializer.deserialize` documentation for details.
+
+ Raises:
+ DeserializeError: :meth:`DecompressorInterface.decompress` does not read until EOF (unused trailing data).
+ DeserializeError: :meth:`DecompressorInterface.decompress` raised an error that matches `expected_decompress_error`.
+ Exception: Any other error raised by :meth:`DecompressorInterface.decompress` or the underlying serializer.
+ """
decompressor: DecompressorInterface = self.new_decompressor_stream()
try:
data = decompressor.decompress(data)
@@ -106,7 +156,17 @@ def deserialize(self, data: bytes) -> _DT_co:
return self.__serializer.deserialize(data)
@final
- def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]]:
+ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DTOPacketT, bytes]]:
+ """
+ Yields until data decompression is finished and deserializes the decompressed data using the underlying serializer.
+
+ See :meth:`.AbstractIncrementalPacketSerializer.incremental_deserialize` documentation for details.
+
+ Raises:
+ IncrementalDeserializeError: :meth:`DecompressorInterface.decompress` raised an error
+ that matches `expected_decompress_error`.
+ Exception: Any other error raised by :meth:`DecompressorInterface.decompress` or the underlying serializer.
+ """
results: deque[bytes] = deque()
decompressor: DecompressorInterface = self.new_decompressor_stream()
while not decompressor.eof:
@@ -131,7 +191,7 @@ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]
del results, decompressor
try:
- packet: _DT_co = self.__serializer.deserialize(data)
+ packet: _DTOPacketT = self.__serializer.deserialize(data)
except DeserializeError as exc:
raise IncrementalDeserializeError(
f"Error while deserializing decompressed data: {exc}",
@@ -142,37 +202,64 @@ def incremental_deserialize(self) -> Generator[None, bytes, tuple[_DT_co, bytes]
return packet, unused_data
-class BZ2CompressorSerializer(AbstractCompressorSerializer[_ST_contra, _DT_co]):
- __slots__ = ("__compresslevel", "__compressor_factory", "__decompressor_factory")
+class BZ2CompressorSerializer(AbstractCompressorSerializer[_DTOPacketT]):
+ """
+ A :term:`serializer wrapper` to handle bzip2 compressed data, built on top of :mod:`bz2` module.
+ """
- BEST_COMPRESSION_LEVEL: Final[int] = 9
+ __slots__ = ("__compresslevel", "__compressor_factory", "__decompressor_factory")
- def __init__(self, serializer: AbstractPacketSerializer[_ST_contra, _DT_co], *, compress_level: int | None = None) -> None:
+ def __init__(
+ self,
+ serializer: AbstractPacketSerializer[_DTOPacketT],
+ *,
+ compress_level: int | None = None,
+ ) -> None:
+ """
+ Parameters:
+ serializer: The serializer to wrap.
+ compress_level: bzip2 compression level. Defaults to ``9``.
+ """
import bz2
super().__init__(serializer=serializer, expected_decompress_error=OSError)
- self.__compresslevel: int = compress_level if compress_level is not None else self.BEST_COMPRESSION_LEVEL
+ self.__compresslevel: int = compress_level if compress_level is not None else 9
self.__compressor_factory = bz2.BZ2Compressor
self.__decompressor_factory = bz2.BZ2Decompressor
@final
- def new_compressor_stream(self) -> CompressorInterface:
+ def new_compressor_stream(self) -> _typing_bz2.BZ2Compressor:
+ """
+ See :meth:`.AbstractCompressorSerializer.new_compressor_stream` documentation for details.
+ """
return self.__compressor_factory(self.__compresslevel)
@final
- def new_decompressor_stream(self) -> DecompressorInterface:
+ def new_decompressor_stream(self) -> _typing_bz2.BZ2Decompressor:
+ """
+ See :meth:`.AbstractCompressorSerializer.new_decompressor_stream` documentation for details.
+ """
return self.__decompressor_factory()
-class ZlibCompressorSerializer(AbstractCompressorSerializer[_ST_contra, _DT_co]):
+class ZlibCompressorSerializer(AbstractCompressorSerializer[_DTOPacketT]):
+ """
+ A :term:`serializer wrapper` to handle zlib compressed data, built on top of :mod:`zlib` module.
+ """
+
__slots__ = ("__compresslevel", "__compressor_factory", "__decompressor_factory")
def __init__(
self,
- serializer: AbstractPacketSerializer[_ST_contra, _DT_co],
+ serializer: AbstractPacketSerializer[_DTOPacketT],
*,
compress_level: int | None = None,
) -> None:
+ """
+ Parameters:
+ serializer: The serializer to wrap.
+ compress_level: bzip2 compression level. Defaults to :data:`zlib.Z_BEST_COMPRESSION`.
+ """
import zlib
super().__init__(serializer=serializer, expected_decompress_error=zlib.error)
@@ -181,9 +268,15 @@ def __init__(
self.__decompressor_factory = zlib.decompressobj
@final
- def new_compressor_stream(self) -> CompressorInterface:
+ def new_compressor_stream(self) -> _typing_zlib._Compress:
+ """
+ See :meth:`.AbstractCompressorSerializer.new_compressor_stream` documentation for details.
+ """
return self.__compressor_factory(self.__compresslevel)
@final
- def new_decompressor_stream(self) -> DecompressorInterface:
+ def new_decompressor_stream(self) -> _typing_zlib._Decompress:
+ """
+ See :meth:`.AbstractCompressorSerializer.new_decompressor_stream` documentation for details.
+ """
return self.__decompressor_factory()
diff --git a/src/easynetwork/serializers/wrapper/encryptor.py b/src/easynetwork/serializers/wrapper/encryptor.py
index 34758ab9..d43c44b0 100644
--- a/src/easynetwork/serializers/wrapper/encryptor.py
+++ b/src/easynetwork/serializers/wrapper/encryptor.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""encrypted data serializer module"""
@@ -9,27 +20,38 @@
"EncryptorSerializer",
]
-from typing import TypeVar, final
+from typing import final
+from ..._typevars import _DTOPacketT
from ...exceptions import DeserializeError
from ..abc import AbstractPacketSerializer
from ..base_stream import AutoSeparatedPacketSerializer
-_ST_contra = TypeVar("_ST_contra", contravariant=True)
-_DT_co = TypeVar("_DT_co", covariant=True)
+class EncryptorSerializer(AutoSeparatedPacketSerializer[_DTOPacketT]):
+ """
+ A :term:`serializer wrapper` to handle encrypted data, built on top of :mod:`cryptography.fernet` module.
+
+ Needs ``encryption`` extra dependencies.
+ """
-class EncryptorSerializer(AutoSeparatedPacketSerializer[_ST_contra, _DT_co]):
__slots__ = ("__serializer", "__fernet", "__token_ttl", "__invalid_token_cls")
def __init__(
self,
- serializer: AbstractPacketSerializer[_ST_contra, _DT_co],
+ serializer: AbstractPacketSerializer[_DTOPacketT],
key: str | bytes,
*,
token_ttl: int | None = None,
separator: bytes = b"\r\n",
) -> None:
+ """
+ Parameters:
+ serializer: The serializer to wrap.
+ key: A URL-safe base64-encoded 32-byte key.
+ token_ttl: Token time-to-live. See :meth:`cryptography.fernet.Fernet.decrypt` for details.
+ separator: Token for :class:`AutoSeparatedPacketSerializer`. Used in incremental serialization context.
+ """
try:
import cryptography.fernet
except ModuleNotFoundError as exc: # pragma: no cover
@@ -38,13 +60,21 @@ def __init__(
super().__init__(separator=separator, incremental_serialize_check_separator=not separator.isspace())
if not isinstance(serializer, AbstractPacketSerializer):
raise TypeError(f"Expected a serializer instance, got {serializer!r}")
- self.__serializer: AbstractPacketSerializer[_ST_contra, _DT_co] = serializer
+ self.__serializer: AbstractPacketSerializer[_DTOPacketT] = serializer
self.__fernet = cryptography.fernet.Fernet(key)
self.__token_ttl = token_ttl
self.__invalid_token_cls = cryptography.fernet.InvalidToken
@classmethod
def generate_key(cls) -> bytes:
+ """
+ Generates a fresh key suitable for encryption.
+
+ Keep this some place safe!
+
+ Implementation details:
+ Delegates to :meth:`cryptography.fernet.Fernet.generate_key`.
+ """
try:
import cryptography.fernet
except ModuleNotFoundError as exc: # pragma: no cover
@@ -53,12 +83,34 @@ def generate_key(cls) -> bytes:
return cryptography.fernet.Fernet.generate_key()
@final
- def serialize(self, packet: _ST_contra) -> bytes:
+ def serialize(self, packet: _DTOPacketT) -> bytes:
+ """
+ Serializes `packet` and encrypt the result.
+
+ Parameters:
+ packet: The Python object to serialize.
+
+ Returns:
+ a byte sequence.
+ """
data = self.__serializer.serialize(packet)
return self.__fernet.encrypt(data)
@final
- def deserialize(self, data: bytes) -> _DT_co:
+ def deserialize(self, data: bytes) -> _DTOPacketT:
+ """
+ Decrypts token `data` and deserializes the result.
+
+ Parameters:
+ data: The byte sequence to deserialize.
+
+ Raises:
+ DeserializeError: Invalid base64 token.
+ Exception: The underlying serializer raised an exception.
+
+ Returns:
+ the deserialized Python object.
+ """
try:
data = self.__fernet.decrypt(data, ttl=self.__token_ttl)
except self.__invalid_token_cls:
diff --git a/src/easynetwork/tools/__init__.py b/src/easynetwork/tools/__init__.py
index d7a34d08..f7b2fe06 100644
--- a/src/easynetwork/tools/__init__.py
+++ b/src/easynetwork/tools/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""EasyNetwork tools module"""
diff --git a/src/easynetwork/tools/_lock.py b/src/easynetwork/tools/_lock.py
index 34d30b4c..f5b4acca 100644
--- a/src/easynetwork/tools/_lock.py
+++ b/src/easynetwork/tools/_lock.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""
@@ -14,10 +25,10 @@
from collections.abc import Callable
from typing import Generic, TypeVar, cast, overload
-_L = TypeVar("_L", bound="threading.RLock | threading.Lock")
+_LockType = TypeVar("_LockType", bound="threading.RLock | threading.Lock")
-class ForkSafeLock(Generic[_L]):
+class ForkSafeLock(Generic[_LockType]):
__slots__ = ("__pid", "__unsafe_lock", "__lock_factory", "__weakref__")
@overload
@@ -25,17 +36,17 @@ def __init__(self: ForkSafeLock[threading.RLock], lock_factory: None = ...) -> N
...
@overload
- def __init__(self, lock_factory: Callable[[], _L]) -> None:
+ def __init__(self, lock_factory: Callable[[], _LockType]) -> None:
...
- def __init__(self, lock_factory: Callable[[], _L] | None = None) -> None:
+ def __init__(self, lock_factory: Callable[[], _LockType] | None = None) -> None:
if lock_factory is None:
- lock_factory = cast(Callable[[], _L], threading.RLock)
- self.__unsafe_lock: _L = lock_factory()
+ lock_factory = cast(Callable[[], _LockType], threading.RLock)
+ self.__unsafe_lock: _LockType = lock_factory()
self.__pid: int = os.getpid()
- self.__lock_factory: Callable[[], _L] = lock_factory
+ self.__lock_factory: Callable[[], _LockType] = lock_factory
- def get(self) -> _L:
+ def get(self) -> _LockType:
if self.__pid != os.getpid():
self.__unsafe_lock = self.__lock_factory()
self.__pid = os.getpid()
diff --git a/src/easynetwork/tools/_stream.py b/src/easynetwork/tools/_stream.py
index 1997d1a0..ac569041 100644
--- a/src/easynetwork/tools/_stream.py
+++ b/src/easynetwork/tools/_stream.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Stream network packet serializer handler module"""
@@ -12,14 +23,12 @@
from collections import deque
from collections.abc import Generator, Iterator
-from typing import Any, Generic, TypeVar, final
+from typing import Any, Generic, final
+from .._typevars import _ReceivedPacketT, _SentPacketT
from ..exceptions import StreamProtocolParseError
from ..protocol import StreamProtocol
-_SentPacketT = TypeVar("_SentPacketT")
-_ReceivedPacketT = TypeVar("_ReceivedPacketT")
-
@final
@Iterator.register
diff --git a/src/easynetwork/tools/_utils.py b/src/easynetwork/tools/_utils.py
index 32dc02d9..3962ee02 100644
--- a/src/easynetwork/tools/_utils.py
+++ b/src/easynetwork/tools/_utils.py
@@ -1,3 +1,17 @@
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#
from __future__ import annotations
__all__ = [
@@ -29,7 +43,6 @@
import socket as _socket
import threading
import time
-import traceback
from collections.abc import Callable, Iterator
from math import isinf, isnan
from typing import TYPE_CHECKING, Any, Literal, ParamSpec, TypeGuard, TypeVar, assert_never
@@ -295,21 +308,6 @@ def transform_future_exception(exc: BaseException) -> BaseException:
return exc
-def recursively_clear_exception_traceback_frames(exc: BaseException) -> None:
- _recursively_clear_exception_traceback_frames_with_memo(exc, set())
-
-
-def _recursively_clear_exception_traceback_frames_with_memo(exc: BaseException, memo: set[int]) -> None:
- if id(exc) in memo:
- return
- memo.add(id(exc))
- traceback.clear_frames(exc.__traceback__)
- if exc.__context__ is not None:
- _recursively_clear_exception_traceback_frames_with_memo(exc.__context__, memo)
- if exc.__cause__ is not exc.__context__ and exc.__cause__ is not None:
- _recursively_clear_exception_traceback_frames_with_memo(exc.__cause__, memo)
-
-
def remove_traceback_frames_in_place(exc: _ExcType, n: int) -> _ExcType:
tb = exc.__traceback__
for _ in range(n):
@@ -331,12 +329,15 @@ def lock_with_timeout(
yield None
return
timeout = validate_timeout_delay(timeout, positive_check=False)
- _start = time.perf_counter()
- if not lock.acquire(True, timeout if timeout > 0 else 0.0):
- raise TimeoutError(error_message)
- try:
- _end = time.perf_counter()
- timeout -= _end - _start
+ with contextlib.ExitStack() as stack:
+ # Try to acquire without blocking first
+ if lock.acquire(blocking=False):
+ stack.push(lock)
+ else:
+ _start = time.perf_counter()
+ if timeout <= 0 or not lock.acquire(True, timeout):
+ raise TimeoutError(error_message)
+ stack.push(lock)
+ _end = time.perf_counter()
+ timeout -= _end - _start
yield timeout if timeout > 0 else 0.0
- finally:
- lock.release()
diff --git a/src/easynetwork/tools/constants.py b/src/easynetwork/tools/constants.py
index 4003c349..a6bc7f03 100644
--- a/src/easynetwork/tools/constants.py
+++ b/src/easynetwork/tools/constants.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""EasyNetwork's constants module"""
diff --git a/src/easynetwork/tools/socket.py b/src/easynetwork/tools/socket.py
index 7891ef22..e944ee95 100644
--- a/src/easynetwork/tools/socket.py
+++ b/src/easynetwork/tools/socket.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""Network socket module"""
@@ -7,6 +18,8 @@
__all__ = [
"AddressFamily",
+ "IPv4SocketAddress",
+ "IPv6SocketAddress",
"ISocket",
"SocketProxy",
"SupportsSocketOptions",
@@ -22,14 +35,23 @@
import functools
import os
import socket as _socket
+import threading
from collections.abc import Callable
from enum import IntEnum, unique
from struct import Struct
-from typing import TYPE_CHECKING, Any, Literal, NamedTuple, ParamSpec, Protocol, TypeAlias, TypeVar, assert_never, final, overload
-
-if TYPE_CHECKING:
- import threading as _threading
-
+from typing import (
+ Any,
+ Literal,
+ NamedTuple,
+ ParamSpec,
+ Protocol,
+ TypeAlias,
+ TypeVar,
+ assert_never,
+ final,
+ overload,
+ runtime_checkable,
+)
_P = ParamSpec("_P")
_R = TypeVar("_R")
@@ -37,6 +59,10 @@
@unique
class AddressFamily(IntEnum):
+ """
+ Enumeration of supported socket address families.
+ """
+
AF_INET = _socket.AF_INET
AF_INET6 = _socket.AF_INET6
@@ -55,6 +81,10 @@ def __str__(self) -> str: # pragma: no cover
return f"({self.host!r}, {self.port:d})"
def for_connection(self) -> tuple[str, int]:
+ """
+ Returns:
+ A pair of (host, port)
+ """
return self.host, self.port
@@ -68,10 +98,15 @@ def __str__(self) -> str: # pragma: no cover
return f"({self.host!r}, {self.port:d})"
def for_connection(self) -> tuple[str, int]:
+ """
+ Returns:
+ A pair of (host, port)
+ """
return self.host, self.port
SocketAddress: TypeAlias = IPv4SocketAddress | IPv6SocketAddress
+"""Alias for :class:`IPv4SocketAddress` | :class:`IPv6SocketAddress`"""
@overload
@@ -92,6 +127,33 @@ def new_socket_address(addr: tuple[Any, ...], family: int) -> SocketAddress:
def new_socket_address(addr: tuple[Any, ...], family: int) -> SocketAddress:
+ """
+ Factory to create a :data:`SocketAddress` from `addr`.
+
+ Example:
+ >>> import socket
+ >>> new_socket_address(("127.0.0.1", 12345), socket.AF_INET)
+ IPv4SocketAddress(host='127.0.0.1', port=12345)
+ >>> new_socket_address(("::1", 12345), socket.AF_INET6)
+ IPv6SocketAddress(host='::1', port=12345, flowinfo=0, scope_id=0)
+ >>> new_socket_address(("::1", 12345, 12, 345), socket.AF_INET6)
+ IPv6SocketAddress(host='::1', port=12345, flowinfo=12, scope_id=345)
+ >>> new_socket_address(("127.0.0.1", 12345), socket.AF_APPLETALK)
+ Traceback (most recent call last):
+ ...
+ ValueError: is not a valid AddressFamily
+
+ Parameters:
+ addr: The address in the form ``(host, port)`` or ``(host, port, flow, scope_id)``.
+ family: The socket family.
+
+ Raises:
+ ValueError: Invalid `family`.
+ TypeError: Invalid `addr`.
+
+ Returns:
+ a :data:`SocketAddress` named tuple.
+ """
family = AddressFamily(family)
match family:
case AddressFamily.AF_INET:
@@ -102,6 +164,7 @@ def new_socket_address(addr: tuple[Any, ...], family: int) -> SocketAddress:
assert_never(family)
+@runtime_checkable
class SupportsSocketOptions(Protocol):
@overload
def getsockopt(self, level: int, optname: int, /) -> int:
@@ -111,6 +174,12 @@ def getsockopt(self, level: int, optname: int, /) -> int:
def getsockopt(self, level: int, optname: int, buflen: int, /) -> bytes:
...
+ def getsockopt(self, *args: Any) -> int | bytes: # pragma: no cover
+ """
+ Similar to :meth:`socket.socket.getsockopt`.
+ """
+ ...
+
@overload
def setsockopt(self, level: int, optname: int, value: int | bytes, /) -> None:
...
@@ -119,35 +188,71 @@ def setsockopt(self, level: int, optname: int, value: int | bytes, /) -> None:
def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> None:
...
+ def setsockopt(self, *args: Any) -> None: # pragma: no cover
+ """
+ Similar to :meth:`socket.socket.setsockopt`.
+ """
+ ...
+
+@runtime_checkable
class ISocket(SupportsSocketOptions, Protocol):
def fileno(self) -> int: # pragma: no cover
+ """
+ Similar to :meth:`socket.socket.fileno`.
+ """
...
def get_inheritable(self) -> bool: # pragma: no cover
+ """
+ Similar to :meth:`socket.socket.get_inheritable`.
+ """
...
def getpeername(self) -> _socket._RetAddress: # pragma: no cover
+ """
+ Similar to :meth:`socket.socket.getpeername`.
+ """
...
def getsockname(self) -> _socket._RetAddress: # pragma: no cover
+ """
+ Similar to :meth:`socket.socket.getsockname`.
+ """
...
@property # pragma: no cover
def family(self) -> int:
+ """
+ Similar to :attr:`socket.socket.family`.
+ """
...
@property # pragma: no cover
def type(self) -> int:
+ """
+ Similar to :attr:`socket.socket.type`.
+ """
...
@property # pragma: no cover
def proto(self) -> int:
+ """
+ Similar to :attr:`socket.socket.proto`.
+ """
...
@final
class SocketProxy:
+ """
+ A socket-like wrapper for exposing real transport sockets.
+
+ These objects can be safely returned by APIs like
+ `client.socket`. All potentially disruptive
+ operations (like :meth:`socket.socket.close`) are banned.
+ """
+
__slots__ = ("__socket", "__lock_ctx", "__runner", "__weakref__")
def __init_subclass__(cls) -> None: # pragma: no cover
@@ -157,11 +262,43 @@ def __init__(
self,
socket: ISocket,
*,
- lock: Callable[[], _threading.Lock | _threading.RLock] | None = None,
+ lock: Callable[[], threading.Lock | threading.RLock] | None = None,
runner: Callable[[Callable[[], Any]], Any] | None = None,
) -> None:
+ """
+ Parameters:
+ socket: The socket-like object to wrap.
+ lock: A callback function to use when a lock is required to gain access to the wrapped socket.
+ runner: A callback function to use to execute the socket method.
+
+ Warning:
+ If `lock` is ommitted, the proxy object is *not* thread-safe.
+
+ `runner` can be used for concurrent call management.
+
+ Example:
+ Examples of how :meth:`ISocket.fileno` would be called according to `lock` and `runner` values.
+
+ Neither `lock` nor `runner`::
+
+ return socket.fileno()
+
+ `lock` but no `runner`::
+
+ with lock():
+ return socket.fileno()
+
+ `runner` but no `lock`::
+
+ return runner(socket.fileno)
+
+ Both `lock` and `runner`::
+
+ with lock():
+ return runner(socket.fileno)
+ """
self.__socket: ISocket = socket
- self.__lock_ctx: Callable[[], _threading.Lock | _threading.RLock] | None = lock
+ self.__lock_ctx: Callable[[], threading.Lock | threading.RLock] | None = lock
self.__runner: Callable[[Callable[[], Any]], Any] | None = runner
def __repr__(self) -> str:
@@ -196,14 +333,15 @@ def __execute(self, func: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwar
return func(*args, **kwargs)
def fileno(self) -> int:
+ """
+ Calls :meth:`ISocket.fileno`.
+ """
return self.__execute(self.__socket.fileno)
- def dup(self) -> _socket.socket:
- new_socket = _socket.fromfd(self.fileno(), self.family, self.type, self.proto)
- new_socket.setblocking(False)
- return new_socket
-
def get_inheritable(self) -> bool:
+ """
+ Calls :meth:`ISocket.get_inheritable`.
+ """
return self.__execute(self.__socket.get_inheritable)
@overload
@@ -215,6 +353,9 @@ def getsockopt(self, level: int, optname: int, buflen: int, /) -> bytes:
...
def getsockopt(self, *args: Any) -> int | bytes:
+ """
+ Calls :meth:`ISocket.getsockopt `.
+ """
return self.__execute(self.__socket.getsockopt, *args)
@overload
@@ -226,36 +367,86 @@ def setsockopt(self, level: int, optname: int, value: None, optlen: int, /) -> N
...
def setsockopt(self, *args: Any) -> None:
+ """
+ Calls :meth:`ISocket.setsockopt `.
+ """
return self.__execute(self.__socket.setsockopt, *args)
- def getpeername(self) -> SocketAddress:
- socket = self.__socket
- return new_socket_address(self.__execute(socket.getpeername), socket.family)
+ def getpeername(self) -> _socket._RetAddress:
+ """
+ Calls :meth:`ISocket.getpeername`.
+ """
+ return self.__execute(self.__socket.getpeername)
- def getsockname(self) -> SocketAddress:
- socket = self.__socket
- return new_socket_address(self.__execute(socket.getsockname), socket.family)
+ def getsockname(self) -> _socket._RetAddress:
+ """
+ Calls :meth:`ISocket.getsockname`.
+ """
+ return self.__execute(self.__socket.getsockname)
@property
- def family(self) -> _socket.AddressFamily:
- return _socket.AddressFamily(self.__socket.family)
+ def family(self) -> int:
+ """The socket family."""
+ family: int = self.__socket.family
+ try:
+ return _socket.AddressFamily(family)
+ except ValueError:
+ return family
@property
- def type(self) -> _socket.SocketKind:
- return _socket.SocketKind(self.__socket.type)
+ def type(self) -> int:
+ """The socket type."""
+ socket_type = self.__socket.type
+ try:
+ return _socket.SocketKind(socket_type)
+ except ValueError:
+ return socket_type
@property
def proto(self) -> int:
+ """The socket protocol."""
return self.__socket.proto
def set_tcp_nodelay(sock: SupportsSocketOptions, state: bool) -> None:
+ """
+ Enables/Disable Nagle's algorithm on a TCP socket.
+
+ This is equivalent to::
+
+ sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, state)
+
+ *except* that if :data:`socket.TCP_NODELAY` is not defined, it is silently ignored.
+
+ Parameters:
+ sock: The socket.
+ state: :data:`True` to disable, :data:`False` to enable.
+
+ Note:
+ Modern operating systems enable it by default.
+ """
state = bool(state)
with contextlib.suppress(AttributeError):
sock.setsockopt(_socket.IPPROTO_TCP, _socket.TCP_NODELAY, state)
def set_tcp_keepalive(sock: SupportsSocketOptions, state: bool) -> None:
+ """
+ Enables/Disable keep-alive protocol on a TCP socket.
+
+ This is equivalent to::
+
+ sock.setsockopt(SOL_SOCKET, SO_KEEPALIVE, state)
+
+ *except* that if :data:`socket.SO_KEEPALIVE` is not defined, it is silently ignored.
+
+ Parameters:
+ sock: The socket.
+ state: :data:`True` to enable, :data:`False` to disable.
+
+ Note:
+ Modern operating systems enable it by default.
+ """
state = bool(state)
with contextlib.suppress(AttributeError):
sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_KEEPALIVE, state)
@@ -272,12 +463,56 @@ def set_tcp_keepalive(sock: SupportsSocketOptions, state: bool) -> None:
def get_socket_linger_struct() -> Struct:
+ """
+ Returns a :class:`~struct.Struct` representation of the SO_LINGER structure. See :manpage:`socket(7)` for details.
+
+ The format of the returned struct may vary depending on the operating system.
+ """
return _linger_struct
def enable_socket_linger(sock: SupportsSocketOptions, timeout: int) -> None:
+ """
+ Enables socket linger.
+
+ This is equivalent to::
+
+ sock.setsockopt(SOL_SOCKET, SO_LINGER, linger_struct)
+
+ ``linger_struct`` is determined by the operating system. See :func:`get_socket_linger_struct` for details.
+
+ See the Unix manual page :manpage:`socket(7)` for the meaning of the argument `timeout`.
+
+ Parameters:
+ sock: The socket.
+ timeout: The linger timeout.
+
+ Note:
+ Modern operating systems disable it by default.
+
+ See Also:
+ :func:`disable_socket_linger`
+ """
sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_LINGER, _linger_struct.pack(True, timeout))
def disable_socket_linger(sock: SupportsSocketOptions) -> None:
+ """
+ Disables socket linger.
+
+ This is equivalent to::
+
+ sock.setsockopt(SOL_SOCKET, SO_LINGER, linger_struct)
+
+ ``linger_struct`` is determined by the operating system. See :func:`get_socket_linger_struct` for details.
+
+ Parameters:
+ sock: The socket.
+
+ Note:
+ Modern operating systems disable it by default.
+
+ See Also:
+ :func:`enable_socket_linger`
+ """
sock.setsockopt(_socket.SOL_SOCKET, _socket.SO_LINGER, _linger_struct.pack(False, 0))
diff --git a/src/easynetwork_asyncio/__init__.py b/src/easynetwork_asyncio/__init__.py
index b28ca3d1..361cc7bc 100644
--- a/src/easynetwork_asyncio/__init__.py
+++ b/src/easynetwork_asyncio/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.api_async
diff --git a/src/easynetwork_asyncio/_utils.py b/src/easynetwork_asyncio/_utils.py
index be03faeb..e14740da 100644
--- a/src/easynetwork_asyncio/_utils.py
+++ b/src/easynetwork_asyncio/_utils.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.api_async
diff --git a/src/easynetwork_asyncio/backend.py b/src/easynetwork_asyncio/backend.py
index 29ba67ac..df5751f0 100644
--- a/src/easynetwork_asyncio/backend.py
+++ b/src/easynetwork_asyncio/backend.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.api_async
@@ -28,7 +39,7 @@
ssl = _ssl
del _ssl
-from easynetwork.api_async.backend.abc import AbstractAsyncBackend
+from easynetwork.api_async.backend.abc import AsyncBackend as AbstractAsyncBackend
from easynetwork.api_async.backend.sniffio import current_async_library_cvar as _sniffio_current_async_library_cvar
from ._utils import create_connection, create_datagram_socket, ensure_resolved, open_listener_sockets_from_getaddrinfo_result
@@ -44,7 +55,7 @@
import concurrent.futures
from ssl import SSLContext as _SSLContext
- from easynetwork.api_async.backend.abc import AbstractAcceptedSocket, ILock
+ from easynetwork.api_async.backend.abc import AcceptedSocket as AbstractAcceptedSocket, ILock
_P = ParamSpec("_P")
_T = TypeVar("_T")
@@ -165,12 +176,12 @@ async def sleep_forever(self) -> NoReturn:
def spawn_task(
self,
- coro_func: Callable[_P, Coroutine[Any, Any, _T]],
+ coro_func: Callable[..., Coroutine[Any, Any, _T]],
/,
- *args: _P.args,
- **kwargs: _P.kwargs,
+ *args: Any,
+ context: contextvars.Context | None = None,
) -> SystemTask[_T]:
- return SystemTask(coro_func(*args, **kwargs))
+ return SystemTask(coro_func(*args), context=context)
def create_task_group(self) -> TaskGroup:
return TaskGroup()
@@ -456,7 +467,7 @@ async def wait_future(self, future: concurrent.futures.Future[_T_co]) -> _T_co:
if future.cancelled():
# Task cancellation prevails over future cancellation
await asyncio.sleep(0)
- return future.result()
+ return future.result(timeout=0)
finally:
del future
diff --git a/src/easynetwork_asyncio/datagram/__init__.py b/src/easynetwork_asyncio/datagram/__init__.py
index 85068fc2..77a43053 100644
--- a/src/easynetwork_asyncio/datagram/__init__.py
+++ b/src/easynetwork_asyncio/datagram/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
diff --git a/src/easynetwork_asyncio/datagram/endpoint.py b/src/easynetwork_asyncio/datagram/endpoint.py
index d652feca..94b1c6ba 100644
--- a/src/easynetwork_asyncio/datagram/endpoint.py
+++ b/src/easynetwork_asyncio/datagram/endpoint.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
diff --git a/src/easynetwork_asyncio/datagram/socket.py b/src/easynetwork_asyncio/datagram/socket.py
index c95bd2b4..bb6318f1 100644
--- a/src/easynetwork_asyncio/datagram/socket.py
+++ b/src/easynetwork_asyncio/datagram/socket.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
@@ -13,7 +24,7 @@
import socket as _socket
from typing import TYPE_CHECKING, Any, final
-from easynetwork.api_async.backend.abc import AbstractAsyncDatagramSocketAdapter
+from easynetwork.api_async.backend.abc import AsyncDatagramSocketAdapter as AbstractAsyncDatagramSocketAdapter
from easynetwork.tools._utils import error_from_errno as _error_from_errno
from ..socket import AsyncSocket
diff --git a/src/easynetwork_asyncio/runner.py b/src/easynetwork_asyncio/runner.py
index 719bf5ce..b3077351 100644
--- a/src/easynetwork_asyncio/runner.py
+++ b/src/easynetwork_asyncio/runner.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.api_async
@@ -13,7 +24,7 @@
from collections.abc import Callable, Coroutine
from typing import Any, Self, TypeVar
-from easynetwork.api_async.backend.abc import AbstractRunner
+from easynetwork.api_async.backend.abc import Runner as AbstractRunner
_T = TypeVar("_T")
diff --git a/src/easynetwork_asyncio/socket.py b/src/easynetwork_asyncio/socket.py
index 083b7b2f..8a744ea8 100644
--- a/src/easynetwork_asyncio/socket.py
+++ b/src/easynetwork_asyncio/socket.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
@@ -131,7 +142,7 @@ async def shutdown(self, how: int, /) -> None:
if did_shutdown_SHUT_WR:
self.__shutdown_write = True
- # Yield out of the conflict detections scopes
+ # Yield outside the conflict detections scopes
await asyncio.sleep(0)
@contextlib.contextmanager
diff --git a/src/easynetwork_asyncio/stream/__init__.py b/src/easynetwork_asyncio/stream/__init__.py
index 85068fc2..77a43053 100644
--- a/src/easynetwork_asyncio/stream/__init__.py
+++ b/src/easynetwork_asyncio/stream/__init__.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
diff --git a/src/easynetwork_asyncio/stream/listener.py b/src/easynetwork_asyncio/stream/listener.py
index 5371b75b..6dcd0334 100644
--- a/src/easynetwork_asyncio/stream/listener.py
+++ b/src/easynetwork_asyncio/stream/listener.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
@@ -15,9 +26,9 @@
from typing import TYPE_CHECKING, Any, final
from easynetwork.api_async.backend.abc import (
- AbstractAcceptedSocket,
- AbstractAsyncListenerSocketAdapter,
- AbstractAsyncStreamSocketAdapter,
+ AcceptedSocket as AbstractAcceptedSocket,
+ AsyncListenerSocketAdapter as AbstractAsyncListenerSocketAdapter,
+ AsyncStreamSocketAdapter as AbstractAsyncStreamSocketAdapter,
)
from ..socket import AsyncSocket
diff --git a/src/easynetwork_asyncio/stream/socket.py b/src/easynetwork_asyncio/stream/socket.py
index 684e5eee..fc4c8031 100644
--- a/src/easynetwork_asyncio/stream/socket.py
+++ b/src/easynetwork_asyncio/stream/socket.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.async
@@ -14,7 +25,10 @@
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any, Self, cast, final
-from easynetwork.api_async.backend.abc import AbstractAsyncHalfCloseableStreamSocketAdapter, AbstractAsyncStreamSocketAdapter
+from easynetwork.api_async.backend.abc import (
+ AsyncHalfCloseableStreamSocketAdapter as AbstractAsyncHalfCloseableStreamSocketAdapter,
+ AsyncStreamSocketAdapter as AbstractAsyncStreamSocketAdapter,
+)
from easynetwork.tools._utils import error_from_errno as _error_from_errno
from ..socket import AsyncSocket
diff --git a/src/easynetwork_asyncio/tasks.py b/src/easynetwork_asyncio/tasks.py
index 89e2c296..b25c557f 100644
--- a/src/easynetwork_asyncio/tasks.py
+++ b/src/easynetwork_asyncio/tasks.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.api_async
@@ -16,7 +27,12 @@
from typing import TYPE_CHECKING, Any, ParamSpec, Self, TypeVar, final
from weakref import WeakKeyDictionary
-from easynetwork.api_async.backend.abc import AbstractSystemTask, AbstractTask, AbstractTaskGroup, AbstractTimeoutHandle
+from easynetwork.api_async.backend.abc import (
+ SystemTask as AbstractSystemTask,
+ Task as AbstractTask,
+ TaskGroup as AbstractTaskGroup,
+ TimeoutHandle as AbstractTimeoutHandle,
+)
if TYPE_CHECKING:
from types import TracebackType
@@ -82,8 +98,13 @@ def _asyncio_task(self) -> asyncio.Task[_T_co]:
class SystemTask(Task[_T_co], AbstractSystemTask[_T_co]):
__slots__ = ()
- def __init__(self, coroutine: Coroutine[Any, Any, _T_co]) -> None:
- super().__init__(asyncio.create_task(coroutine))
+ def __init__(
+ self,
+ coroutine: Coroutine[Any, Any, _T_co],
+ *,
+ context: contextvars.Context | None = None,
+ ) -> None:
+ super().__init__(asyncio.create_task(coroutine, context=context))
async def join_or_cancel(self) -> _T_co:
task = self._asyncio_task
@@ -120,22 +141,12 @@ async def __aexit__(
def start_soon(
self,
- __coro_func: Callable[_P, Coroutine[Any, Any, _T]],
- /,
- *args: _P.args,
- **kwargs: _P.kwargs,
- ) -> AbstractTask[_T]:
- return Task(self.__asyncio_tg.create_task(__coro_func(*args, **kwargs)))
-
- def start_soon_with_context(
- self,
- context: contextvars.Context,
coro_func: Callable[_P, Coroutine[Any, Any, _T]],
/,
- *args: _P.args,
- **kwargs: _P.kwargs,
+ *args: Any,
+ context: contextvars.Context | None = None,
) -> AbstractTask[_T]:
- return Task(self.__asyncio_tg.create_task(coro_func(*args, **kwargs), context=context))
+ return Task(self.__asyncio_tg.create_task(coro_func(*args), context=context))
@final
diff --git a/src/easynetwork_asyncio/threads.py b/src/easynetwork_asyncio/threads.py
index cbdd9f11..05a16b11 100644
--- a/src/easynetwork_asyncio/threads.py
+++ b/src/easynetwork_asyncio/threads.py
@@ -1,4 +1,15 @@
-# Copyright (c) 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Copyright 2021-2023, Francis Clairicia-Rose-Claire-Josephine
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
#
#
"""asyncio engine for easynetwork.api_async
@@ -14,7 +25,7 @@
from collections.abc import Callable, Coroutine
from typing import Any, ParamSpec, TypeVar, final
-from easynetwork.api_async.backend.abc import AbstractThreadsPortal
+from easynetwork.api_async.backend.abc import ThreadsPortal as AbstractThreadsPortal
from easynetwork.api_async.backend.sniffio import current_async_library_cvar as _sniffio_current_async_library_cvar
from easynetwork.tools._utils import transform_future_exception as _transform_future_exception
@@ -34,12 +45,7 @@ def __init__(self, *, loop: asyncio.AbstractEventLoop | None = None) -> None:
def run_coroutine(self, coro_func: Callable[_P, Coroutine[Any, Any, _T]], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
self.__check_running_loop()
- future = self.__run_coroutine_soon(coro_func, *args, **kwargs)
- del coro_func, args, kwargs
- try:
- return self.__get_result(future)
- finally:
- del future
+ return self.__get_result(self.__run_coroutine_soon(coro_func, *args, **kwargs))
def __run_coroutine_soon(
self,
@@ -58,15 +64,10 @@ def __run_coroutine_soon(
def run_sync(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> _T:
self.__check_running_loop()
- future = self.__run_sync_soon(func, *args, **kwargs)
- del func, args, kwargs
- try:
- return self.__get_result(future)
- finally:
- del future
+ return self.__get_result(self.__run_sync_soon(func, *args, **kwargs))
def __run_sync_soon(self, func: Callable[_P, _T], /, *args: _P.args, **kwargs: _P.kwargs) -> concurrent.futures.Future[_T]:
- def callback(future: concurrent.futures.Future[_T]) -> None:
+ def callback() -> None:
try:
result = func(*args, **kwargs)
except BaseException as exc:
@@ -75,8 +76,6 @@ def callback(future: concurrent.futures.Future[_T]) -> None:
raise
else:
future.set_result(result)
- finally:
- del future
ctx = contextvars.copy_context()
@@ -86,7 +85,7 @@ def callback(future: concurrent.futures.Future[_T]) -> None:
future: concurrent.futures.Future[_T] = concurrent.futures.Future()
future.set_running_or_notify_cancel()
- self.__loop.call_soon_threadsafe(callback, future, context=ctx)
+ self.__loop.call_soon_threadsafe(callback, context=ctx)
return future
@staticmethod
diff --git a/tests/functional_test/test_async/test_backend/test_asyncio_backend.py b/tests/functional_test/test_async/test_backend/test_asyncio_backend.py
index ef5eb890..2246aa02 100644
--- a/tests/functional_test/test_async/test_backend/test_asyncio_backend.py
+++ b/tests/functional_test/test_async/test_backend/test_asyncio_backend.py
@@ -309,6 +309,20 @@ async def coroutine(value: int) -> int:
with pytest.raises(ZeroDivisionError):
await task.join()
+ async def test____spawn_task____with_context(
+ self,
+ backend: AsyncioBackend,
+ ) -> None:
+ async def coroutine(value: str) -> None:
+ cvar_for_test.set(value)
+
+ cvar_for_test.set("something")
+ ctx = contextvars.copy_context()
+ task = backend.spawn_task(coroutine, "other", context=ctx)
+ await task.wait()
+ assert cvar_for_test.get() == "something"
+ assert ctx.run(cvar_for_test.get) == "other"
+
async def test____create_task_group____task_pool(
self,
backend: AsyncioBackend,
@@ -399,7 +413,7 @@ async def coroutine(value: str) -> None:
async with backend.create_task_group() as task_group:
cvar_for_test.set("something")
ctx = contextvars.copy_context()
- task = task_group.start_soon_with_context(ctx, coroutine, value="other")
+ task = task_group.start_soon(coroutine, "other", context=ctx)
await task.wait()
assert cvar_for_test.get() == "something"
assert ctx.run(cvar_for_test.get) == "other"
diff --git a/tests/functional_test/test_async/test_backend/test_futures.py b/tests/functional_test/test_async/test_backend/test_futures.py
index 420adfea..e26eb656 100644
--- a/tests/functional_test/test_async/test_backend/test_futures.py
+++ b/tests/functional_test/test_async/test_backend/test_futures.py
@@ -5,21 +5,14 @@
import time
from collections.abc import AsyncIterator
-from easynetwork.api_async.backend.abc import AbstractAsyncBackend
-from easynetwork.api_async.backend.factory import AsyncBackendFactory
-from easynetwork.api_async.backend.futures import AsyncThreadPoolExecutor
+from easynetwork.api_async.backend.futures import AsyncExecutor
import pytest
import pytest_asyncio
@pytest.mark.asyncio
-class TestAsyncThreadPoolExecutor:
- @pytest.fixture
- @staticmethod
- def backend() -> AbstractAsyncBackend:
- return AsyncBackendFactory.new("asyncio")
-
+class TestAsyncExecutor:
@pytest.fixture
@staticmethod
def max_workers(request: pytest.FixtureRequest) -> int | None:
@@ -27,13 +20,16 @@ def max_workers(request: pytest.FixtureRequest) -> int | None:
@pytest_asyncio.fixture
@staticmethod
- async def executor(backend: AbstractAsyncBackend, max_workers: int | None) -> AsyncIterator[AsyncThreadPoolExecutor]:
- async with AsyncThreadPoolExecutor(backend, max_workers=max_workers) as executor:
+ async def executor(max_workers: int | None) -> AsyncIterator[AsyncExecutor]:
+ async with AsyncExecutor(
+ concurrent.futures.ThreadPoolExecutor(max_workers=max_workers),
+ handle_contexts=True,
+ ) as executor:
yield executor
async def test____run____submit_and_wait(
self,
- executor: AsyncThreadPoolExecutor,
+ executor: AsyncExecutor,
) -> None:
def thread_fn(value: int) -> int:
return value
@@ -43,7 +39,7 @@ def thread_fn(value: int) -> int:
async def test____run____ignore_cancellation(
self,
event_loop: asyncio.AbstractEventLoop,
- executor: AsyncThreadPoolExecutor,
+ executor: AsyncExecutor,
) -> None:
task = event_loop.create_task(executor.run(time.sleep, 0.5))
@@ -57,7 +53,7 @@ async def test____run____ignore_cancellation(
@pytest.mark.feature_sniffio
async def test____run____sniffio_contextvar_reset(
self,
- executor: AsyncThreadPoolExecutor,
+ executor: AsyncExecutor,
) -> None:
import sniffio
@@ -74,7 +70,7 @@ def callback() -> str | None:
async def test____shutdown____idempotent(
self,
- executor: AsyncThreadPoolExecutor,
+ executor: AsyncExecutor,
) -> None:
await executor.shutdown()
await executor.shutdown()
@@ -83,7 +79,7 @@ async def test____shutdown____idempotent(
async def test____shutdown____cancel_futures(
self,
event_loop: asyncio.AbstractEventLoop,
- executor: AsyncThreadPoolExecutor,
+ executor: AsyncExecutor,
) -> None:
busy_task = event_loop.create_task(executor.run(time.sleep, 1))
diff --git a/tests/functional_test/test_async/test_backend/test_tasks.py b/tests/functional_test/test_async/test_backend/test_tasks.py
index a381a504..5414ec31 100644
--- a/tests/functional_test/test_async/test_backend/test_tasks.py
+++ b/tests/functional_test/test_async/test_backend/test_tasks.py
@@ -3,7 +3,7 @@
import asyncio
from typing import TYPE_CHECKING, Any
-from easynetwork.api_async.backend.abc import AbstractAsyncBackend
+from easynetwork.api_async.backend.abc import AsyncBackend
from easynetwork.api_async.backend.factory import AsyncBackendFactory
from easynetwork.api_async.backend.tasks import SingleTaskRunner
@@ -17,12 +17,12 @@
class TestSingleTaskRunner:
@pytest.fixture
@staticmethod
- def backend() -> AbstractAsyncBackend:
+ def backend() -> AsyncBackend:
return AsyncBackendFactory.new("asyncio")
async def test____run____run_task_once(
self,
- backend: AbstractAsyncBackend,
+ backend: AsyncBackend,
mocker: MockerFixture,
) -> None:
coro_func = mocker.AsyncMock(spec=lambda *args, **kwargs: None, return_value=mocker.sentinel.task_result)
@@ -37,7 +37,7 @@ async def test____run____run_task_once(
async def test____run____early_cancel(
self,
- backend: AbstractAsyncBackend,
+ backend: AsyncBackend,
mocker: MockerFixture,
) -> None:
coro_func = mocker.AsyncMock(spec=lambda *args, **kwargs: None, return_value=mocker.sentinel.task_result)
@@ -52,7 +52,7 @@ async def test____run____early_cancel(
coro_func.assert_not_awaited()
coro_func.assert_not_called()
- async def test____run____cancel_while_running(self, backend: AbstractAsyncBackend) -> None:
+ async def test____run____cancel_while_running(self, backend: AsyncBackend) -> None:
async def coro_func(value: int) -> int:
return await asyncio.sleep(3600, value)
@@ -72,7 +72,7 @@ async def coro_func(value: int) -> int:
async def test____run____unhandled_exceptions(
self,
- backend: AbstractAsyncBackend,
+ backend: AsyncBackend,
mocker: MockerFixture,
) -> None:
my_exc = OSError()
@@ -87,7 +87,7 @@ async def test____run____unhandled_exceptions(
assert exc_info_run_1.value is my_exc
assert exc_info_run_2.value is my_exc
- async def test____run____waiting_task_is_cancelled(self, backend: AbstractAsyncBackend) -> None:
+ async def test____run____waiting_task_is_cancelled(self, backend: AsyncBackend) -> None:
inner_task: list[asyncio.Task[int] | None] = []
async def coro_func(value: int) -> int:
@@ -106,7 +106,7 @@ async def coro_func(value: int) -> int:
assert inner_task[0] is not None and inner_task[0].cancelled()
- async def test____run____waiting_task_is_cancelled____not_the_first_runner(self, backend: AbstractAsyncBackend) -> None:
+ async def test____run____waiting_task_is_cancelled____not_the_first_runner(self, backend: AsyncBackend) -> None:
async def coro_func(value: int) -> int:
return await asyncio.sleep(0.5, value)
diff --git a/tests/functional_test/test_communication/conftest.py b/tests/functional_test/test_communication/conftest.py
index 0fd38cfd..4085e11a 100644
--- a/tests/functional_test/test_communication/conftest.py
+++ b/tests/functional_test/test_communication/conftest.py
@@ -99,7 +99,7 @@ def socket_pair(localhost_ip: str, tcp_socket_factory: Callable[[], Socket]) ->
pass
csock.setblocking(True)
ssock, _ = lsock.accept()
- except: # noqa
+ except: # noqa: E722
csock.close()
raise
finally:
diff --git a/tests/functional_test/test_communication/serializer.py b/tests/functional_test/test_communication/serializer.py
index e91d8bdd..a68176a7 100644
--- a/tests/functional_test/test_communication/serializer.py
+++ b/tests/functional_test/test_communication/serializer.py
@@ -6,7 +6,7 @@
from easynetwork.serializers.abc import AbstractIncrementalPacketSerializer
-class StringSerializer(AbstractIncrementalPacketSerializer[str, str]):
+class StringSerializer(AbstractIncrementalPacketSerializer[str]):
"""
Serializer to use in order to test clients and servers
"""
diff --git a/tests/functional_test/test_communication/test_async/test_client/test_tcp.py b/tests/functional_test/test_communication/test_async/test_client/test_tcp.py
index 73515f11..eb085a4d 100644
--- a/tests/functional_test/test_communication/test_async/test_client/test_tcp.py
+++ b/tests/functional_test/test_communication/test_async/test_client/test_tcp.py
@@ -218,14 +218,31 @@ async def test____iter_received_packets____yields_available_packets_until_eof(
await event_loop.sock_sendall(server, b"A\nB\nC\nD\nE\nF")
event_loop.call_soon(server.shutdown, SHUT_WR)
event_loop.call_soon(server.close)
- assert [p async for p in client.iter_received_packets()] == ["A", "B", "C", "D", "E"]
+ assert [p async for p in client.iter_received_packets(timeout=None)] == ["A", "B", "C", "D", "E"]
- async def test____fileno____consistency(self, client: AsyncTCPNetworkClient[str, str]) -> None:
- assert client.fileno() == client.socket.fileno()
-
- async def test____fileno____closed_client(self, client: AsyncTCPNetworkClient[str, str]) -> None:
- await client.aclose()
- assert client.fileno() == -1
+ async def test____iter_received_packets____yields_available_packets_within_timeout(
+ self,
+ event_loop: asyncio.AbstractEventLoop,
+ client: AsyncTCPNetworkClient[str, str],
+ server: Socket,
+ ) -> None:
+ async def send_coro() -> None:
+ await event_loop.sock_sendall(server, b"A\n")
+ await asyncio.sleep(0.1)
+ await event_loop.sock_sendall(server, b"B\n")
+ await asyncio.sleep(0.4)
+ await event_loop.sock_sendall(server, b"C\n")
+ await asyncio.sleep(0.2)
+ await event_loop.sock_sendall(server, b"D\n")
+ await asyncio.sleep(0.5)
+ await event_loop.sock_sendall(server, b"E\n")
+
+ send_task = event_loop.create_task(send_coro())
+ try:
+ assert [p async for p in client.iter_received_packets(timeout=1)] == ["A", "B", "C", "D"]
+ finally:
+ send_task.cancel()
+ await asyncio.wait({send_task})
async def test____get_local_address____consistency(self, socket_family: int, client: AsyncTCPNetworkClient[str, str]) -> None:
address = client.get_local_address()
@@ -393,7 +410,7 @@ async def test____socket_property____connection_not_performed_yet(
backend_kwargs=backend_kwargs,
)
) as client:
- with pytest.raises(OSError):
+ with pytest.raises(AttributeError):
_ = client.socket
await client.wait_connected()
@@ -440,25 +457,6 @@ async def test____get_remote_address____connection_not_performed_yet(
assert client.get_remote_address()[:2] == remote_address
- async def test____fileno____connection_not_performed_yet(
- self,
- remote_address: tuple[str, int],
- stream_protocol: StreamProtocol[str, str],
- backend_kwargs: dict[str, Any],
- ) -> None:
- async with contextlib.aclosing(
- AsyncTCPNetworkClient(
- remote_address,
- stream_protocol,
- backend_kwargs=backend_kwargs,
- )
- ) as client:
- assert client.fileno() == -1
-
- await client.wait_connected()
-
- assert client.fileno() > -1
-
async def test____send_packet____recv_packet____implicit_connection(
self,
remote_address: tuple[str, int],
diff --git a/tests/functional_test/test_communication/test_async/test_client/test_udp.py b/tests/functional_test/test_communication/test_async/test_client/test_udp.py
index 9da8b09c..6de434bf 100644
--- a/tests/functional_test/test_communication/test_async/test_client/test_udp.py
+++ b/tests/functional_test/test_communication/test_async/test_client/test_udp.py
@@ -190,16 +190,34 @@ async def test____iter_received_packets____yields_available_packets_until_close(
await asyncio.sleep(0)
try:
# NOTE: Comparison using set because equality check does not verify order
- assert {p async for p in client.iter_received_packets()} == {"A", "B", "C", "D", "E", "F"}
+ assert {p async for p in client.iter_received_packets(timeout=None)} == {"A", "B", "C", "D", "E", "F"}
finally:
close_task.cancel()
+ await asyncio.wait({close_task})
- async def test____fileno____consistency(self, client: AsyncUDPNetworkClient[str, str]) -> None:
- assert client.fileno() == client.socket.fileno()
-
- async def test____fileno____closed_client(self, client: AsyncUDPNetworkClient[str, str]) -> None:
- await client.aclose()
- assert client.fileno() == -1
+ @use_asyncio_transport_xfail_uvloop
+ async def test____iter_received_packets____yields_available_packets_within_given_timeout(
+ self,
+ client: AsyncUDPNetworkClient[str, str],
+ server: DatagramEndpoint,
+ ) -> None:
+ async def send_coro() -> None:
+ await server.sendto(b"A", client.get_local_address())
+ await asyncio.sleep(0.1)
+ await server.sendto(b"B", client.get_local_address())
+ await asyncio.sleep(0.4)
+ await server.sendto(b"C", client.get_local_address())
+ await asyncio.sleep(0.2)
+ await server.sendto(b"D", client.get_local_address())
+ await asyncio.sleep(0.5)
+ await server.sendto(b"E", client.get_local_address())
+
+ send_task = asyncio.create_task(send_coro())
+ try:
+ assert [p async for p in client.iter_received_packets(timeout=1)] == ["A", "B", "C", "D"]
+ finally:
+ send_task.cancel()
+ await asyncio.wait({send_task})
async def test____get_local_address____consistency(self, socket_family: int, client: AsyncUDPNetworkClient[str, str]) -> None:
address = client.get_local_address()
@@ -353,7 +371,7 @@ async def test____socket_property____connection_not_performed_yet(
backend_kwargs=backend_kwargs,
)
) as client:
- with pytest.raises(OSError):
+ with pytest.raises(AttributeError):
_ = client.socket
await client.wait_connected()
@@ -400,25 +418,6 @@ async def test____get_remote_address____connection_not_performed_yet(
assert client.get_remote_address()[:2] == remote_address
- async def test____fileno____connection_not_performed_yet(
- self,
- remote_address: tuple[str, int],
- datagram_protocol: DatagramProtocol[str, str],
- backend_kwargs: dict[str, Any],
- ) -> None:
- async with contextlib.aclosing(
- AsyncUDPNetworkClient(
- remote_address,
- datagram_protocol,
- backend_kwargs=backend_kwargs,
- )
- ) as client:
- assert client.fileno() == -1
-
- await client.wait_connected()
-
- assert client.fileno() > -1
-
@use_asyncio_transport_xfail_uvloop
async def test____send_packet____recv_packet____implicit_connection(
self,
@@ -693,7 +692,7 @@ async def test____iter_received_packets_from____yields_available_packets(
await asyncio.sleep(0)
try:
# NOTE: Comparison using set because equality check does not verify order
- assert {(p, addr) async for p, addr in client.iter_received_packets_from()} == {
+ assert {(p, addr) async for p, addr in client.iter_received_packets_from(timeout=None)} == {
("A", expected_server_address),
("B", expected_server_address),
("C", expected_server_address),
@@ -703,13 +702,31 @@ async def test____iter_received_packets_from____yields_available_packets(
}
finally:
close_task.cancel()
+ await asyncio.wait({close_task})
- async def test____fileno____consistency(self, client: AsyncUDPNetworkEndpoint[str, str]) -> None:
- assert client.fileno() == client.socket.fileno()
-
- async def test____fileno____closed_client(self, client: AsyncUDPNetworkEndpoint[str, str]) -> None:
- await client.aclose()
- assert client.fileno() == -1
+ @use_asyncio_transport_xfail_uvloop
+ async def test____iter_received_packets____yields_available_packets_within_given_timeout(
+ self,
+ client: AsyncUDPNetworkEndpoint[str, str],
+ server: DatagramEndpoint,
+ ) -> None:
+ async def send_coro() -> None:
+ await server.sendto(b"A", client.get_local_address())
+ await asyncio.sleep(0.1)
+ await server.sendto(b"B", client.get_local_address())
+ await asyncio.sleep(0.4)
+ await server.sendto(b"C", client.get_local_address())
+ await asyncio.sleep(0.2)
+ await server.sendto(b"D", client.get_local_address())
+ await asyncio.sleep(0.5)
+ await server.sendto(b"E", client.get_local_address())
+
+ send_task = asyncio.create_task(send_coro())
+ try:
+ assert [p async for p, _ in client.iter_received_packets_from(timeout=1)] == ["A", "B", "C", "D"]
+ finally:
+ send_task.cancel()
+ await asyncio.wait({send_task})
async def test____get_local_address____consistency(
self,
diff --git a/tests/functional_test/test_communication/test_async/test_server/test_tcp.py b/tests/functional_test/test_communication/test_async/test_server/test_tcp.py
index 052ba0d4..11b01b33 100644
--- a/tests/functional_test/test_communication/test_async/test_server/test_tcp.py
+++ b/tests/functional_test/test_communication/test_async/test_server/test_tcp.py
@@ -4,14 +4,14 @@
import collections
import contextlib
import logging
-import math
import ssl
+import weakref
from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Sequence
from socket import IPPROTO_TCP, TCP_NODELAY
from typing import Any
from weakref import WeakValueDictionary
-from easynetwork.api_async.backend.abc import AbstractAsyncBackend
+from easynetwork.api_async.backend.abc import AsyncBackend
from easynetwork.api_async.server.handler import AsyncStreamClient, AsyncStreamRequestHandler
from easynetwork.api_async.server.tcp import AsyncTCPNetworkServer
from easynetwork.exceptions import (
@@ -68,47 +68,26 @@ class MyAsyncTCPRequestHandler(AsyncStreamRequestHandler[str, str]):
request_received: collections.defaultdict[tuple[Any, ...], list[str]]
request_count: collections.Counter[tuple[Any, ...]]
bad_request_received: collections.defaultdict[tuple[Any, ...], list[BaseProtocolParseError]]
- backend: AbstractAsyncBackend
- service_actions_count: int
- close_all_clients_on_service_actions: bool = False
close_all_clients_on_connection: bool = False
close_client_after_n_request: int = -1
- crash_service_actions: bool = False
- stop_listening: Callable[[], None]
+ server: AsyncTCPNetworkServer[str, str]
- def set_async_backend(self, backend: AbstractAsyncBackend) -> None:
- self.backend = backend
-
- async def service_init(self) -> None:
- await super().service_init()
- assert hasattr(self, "backend")
- assert not hasattr(self, "stop_listening")
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AsyncTCPNetworkServer[str, str]) -> None:
+ await super().service_init(exit_stack, server)
+ self.server = server
self.connected_clients = WeakValueDictionary()
- self.service_actions_count = 0
self.request_received = collections.defaultdict(list)
self.request_count = collections.Counter()
self.bad_request_received = collections.defaultdict(list)
-
- async def service_actions(self) -> None:
- if self.crash_service_actions:
- raise Exception("CRASH")
- await super().service_actions()
- self.service_actions_count += 1
- if self.close_all_clients_on_service_actions:
- for client in list(self.connected_clients.values()):
- await client.aclose()
+ exit_stack.push_async_callback(self.service_quit)
async def service_quit(self) -> None:
del (
- self.backend,
self.connected_clients,
- self.service_actions_count,
self.request_received,
self.request_count,
self.bad_request_received,
- self.stop_listening,
)
- await super().service_quit()
async def on_connection(self, client: AsyncStreamClient[str]) -> None:
assert client.address not in self.connected_clients
@@ -122,14 +101,13 @@ async def on_disconnection(self, client: AsyncStreamClient[str]) -> None:
del self.connected_clients[client.address]
del self.request_count[client.address]
- def set_stop_listening_callback(self, stop_listening_callback: Callable[[], None]) -> None:
- super().set_stop_listening_callback(stop_listening_callback)
- self.stop_listening = stop_listening_callback
-
async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, str]:
if self.close_client_after_n_request >= 0 and self.request_count[client.address] >= self.close_client_after_n_request:
await client.aclose()
- request = yield
+ while True:
+ async with self.handle_bad_requests(client):
+ request = yield
+ break
self.request_count[client.address] += 1
match request:
case "__error__":
@@ -147,28 +125,38 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
case "__os_error__":
raise OSError("Server issue.")
case "__stop_listening__":
- self.stop_listening()
+ self.server.stop_listening()
await client.send_packet("successfully stop listening")
case "__wait__":
- request = yield
+ while True:
+ async with self.handle_bad_requests(client):
+ request = yield
+ break
self.request_received[client.address].append(request)
await client.send_packet(f"After wait: {request}")
case _:
self.request_received[client.address].append(request)
await client.send_packet(request.upper())
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError) -> None:
- assert isinstance(exc, StreamProtocolParseError)
- self.bad_request_received[client.address].append(exc)
- await client.send_packet("wrong encoding man.")
+ @contextlib.asynccontextmanager
+ async def handle_bad_requests(self, client: AsyncStreamClient[str]) -> AsyncIterator[None]:
+ try:
+ yield
+ except StreamProtocolParseError as exc:
+ self.bad_request_received[client.address].append(exc)
+ await client.send_packet("wrong encoding man.")
+
+ @property
+ def backend(self) -> AsyncBackend:
+ return self.server.get_backend()
class TimeoutRequestHandler(AsyncStreamRequestHandler[str, str]):
request_timeout: float = 1.0
timeout_on_second_yield: bool = False
- def set_async_backend(self, backend: AbstractAsyncBackend) -> None:
- self.backend = backend
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AsyncTCPNetworkServer[str, str]) -> None:
+ self.backend = server.get_backend()
async def on_connection(self, client: AsyncStreamClient[str]) -> None:
await client.send_packet("milk")
@@ -185,9 +173,6 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
finally:
self.request_timeout = 1.0 # Force reset to 1 second in order not to overload the server
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class CancellationRequestHandler(AsyncStreamRequestHandler[str, str]):
async def on_connection(self, client: AsyncStreamClient[str]) -> None:
@@ -197,16 +182,13 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
yield
raise asyncio.CancelledError()
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class InitialHandshakeRequestHandler(AsyncStreamRequestHandler[str, str]):
- backend: AbstractAsyncBackend
+ backend: AsyncBackend
bypass_handshake: bool = False
- def set_async_backend(self, backend: AbstractAsyncBackend) -> None:
- self.backend = backend
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AsyncTCPNetworkServer[str, str]) -> None:
+ self.backend = server.get_backend()
async def on_connection(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, str]:
await client.send_packet("milk")
@@ -229,14 +211,11 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
request = yield
await client.send_packet(request)
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class RequestRefusedHandler(AsyncStreamRequestHandler[str, str]):
refuse_after: int = 2**64
- async def service_init(self) -> None:
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AsyncTCPNetworkServer[str, str]) -> None:
self.request_count: collections.Counter[AsyncStreamClient[str]] = collections.Counter()
async def on_connection(self, client: AsyncStreamClient[str]) -> None:
@@ -252,9 +231,6 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
self.request_count[client] += 1
await client.send_packet(request)
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class ErrorInRequestHandler(AsyncStreamRequestHandler[str, str]):
mute_thrown_exception: bool = False
@@ -272,9 +248,6 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
else:
await client.send_packet(request)
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError, /) -> None:
- raise RandomError("An error occurred")
-
class ErrorBeforeYieldHandler(AsyncStreamRequestHandler[str, str]):
async def on_connection(self, client: AsyncStreamClient[str]) -> None:
@@ -285,30 +258,6 @@ async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, s
request = yield # type: ignore[unreachable]
await client.send_packet(request)
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
-
-class CloseHandleAfterBadRequest(AsyncStreamRequestHandler[str, str]):
- bad_request_return_value: bool | None = None
-
- async def on_connection(self, client: AsyncStreamClient[str]) -> None:
- await client.send_packet("milk")
-
- async def handle(self, client: AsyncStreamClient[str]) -> AsyncGenerator[None, str]:
- await client.send_packet("new handle")
- try:
- request = yield
- except GeneratorExit:
- await client.send_packet("GeneratorExit")
- raise
- else:
- await client.send_packet(request)
-
- async def bad_request(self, client: AsyncStreamClient[str], exc: BaseProtocolParseError) -> bool | None:
- await client.send_packet("wrong encoding")
- return self.bad_request_return_value
-
class MyAsyncTCPServer(AsyncTCPNetworkServer[str, str]):
__slots__ = ()
@@ -340,11 +289,6 @@ def request_handler(request: Any) -> AsyncStreamRequestHandler[str, str]:
request_handler_cls: type[AsyncStreamRequestHandler[str, str]] = getattr(request, "param", MyAsyncTCPRequestHandler)
return request_handler_cls()
- @pytest.fixture
- @staticmethod
- def service_actions_interval(request: Any) -> float | None:
- return getattr(request, "param", None)
-
@pytest.fixture
@staticmethod
def ssl_handshake_timeout(request: Any) -> float | None:
@@ -358,7 +302,6 @@ async def server(
stream_protocol: StreamProtocol[str, str],
use_ssl: bool,
server_ssl_context: ssl.SSLContext,
- service_actions_interval: float | None,
ssl_handshake_timeout: float | None,
backend_kwargs: dict[str, Any],
) -> AsyncIterator[MyAsyncTCPServer]:
@@ -370,7 +313,6 @@ async def server(
backlog=1,
ssl=server_ssl_context if use_ssl else None,
ssl_handshake_timeout=ssl_handshake_timeout,
- service_actions_interval=service_actions_interval,
backend_kwargs=backend_kwargs,
) as server:
assert not server.sockets
@@ -513,12 +455,14 @@ async def test____serve_forever____empty_listener_list(
assert not s.sockets
@pytest.mark.usefixtures("run_server_and_wait")
- async def test____serve_forever____backend_assignment(
+ async def test____serve_forever____server_assignment(
self,
server: MyAsyncTCPServer,
request_handler: MyAsyncTCPRequestHandler,
) -> None:
- assert request_handler.backend is server.get_backend()
+ assert request_handler.server == server
+ assert isinstance(request_handler.server, AsyncTCPNetworkServer)
+ assert isinstance(request_handler.server, weakref.ProxyType)
async def test____serve_forever____accept_client(
self,
@@ -572,32 +516,6 @@ async def test____serve_forever____accept_client____client_sent_RST_packet_right
assert caplog.records[0].levelno == logging.WARNING
assert caplog.records[0].message == "A client connection was interrupted just after listener.accept()"
- @pytest.mark.usefixtures("run_server_and_wait")
- @pytest.mark.parametrize("service_actions_interval", [0.1], indirect=True)
- async def test____serve_forever____service_actions(self, request_handler: MyAsyncTCPRequestHandler) -> None:
- await asyncio.sleep(0.2)
- assert request_handler.service_actions_count >= 1
-
- @pytest.mark.usefixtures("run_server_and_wait")
- @pytest.mark.parametrize("service_actions_interval", [math.inf], indirect=True)
- async def test____serve_forever____service_actions____disabled(self, request_handler: MyAsyncTCPRequestHandler) -> None:
- await asyncio.sleep(1)
- assert request_handler.service_actions_count == 0
-
- @pytest.mark.usefixtures("run_server_and_wait")
- @pytest.mark.parametrize("service_actions_interval", [0.1], indirect=True)
- async def test____serve_forever____service_actions____crash(
- self,
- request_handler: MyAsyncTCPRequestHandler,
- caplog: pytest.LogCaptureFixture,
- server: MyAsyncTCPServer,
- ) -> None:
- caplog.set_level(logging.ERROR, server.logger.name)
- request_handler.crash_service_actions = True
- await asyncio.sleep(0.5)
- assert request_handler.service_actions_count == 0
- assert "Error occurred in request_handler.service_actions()" in [rec.message for rec in caplog.records]
-
async def test____serve_forever____disable_nagle_algorithm(
self,
client_factory: Callable[[], Awaitable[tuple[asyncio.StreamReader, asyncio.StreamWriter]]],
@@ -709,72 +627,28 @@ async def test____serve_forever____bad_request(
assert isinstance(request_handler.bad_request_received[client_address][0], StreamProtocolParseError)
assert isinstance(request_handler.bad_request_received[client_address][0].error, IncrementalDeserializeError)
- @pytest.mark.parametrize("request_handler", [CloseHandleAfterBadRequest], indirect=True)
- @pytest.mark.parametrize("bad_request_return_value", [None, False, True])
- async def test____serve_forever____bad_request____return_value(
- self,
- bad_request_return_value: bool | None,
- request_handler: CloseHandleAfterBadRequest,
- client_factory: Callable[[], Awaitable[tuple[asyncio.StreamReader, asyncio.StreamWriter]]],
- ) -> None:
- request_handler.bad_request_return_value = bad_request_return_value
- reader, writer = await client_factory()
-
- assert await reader.readline() == b"new handle\n"
- writer.write("\u00E9\n".encode("latin-1")) # StringSerializer does not accept unicode
- await asyncio.sleep(0.1)
-
- assert await reader.readline() == b"wrong encoding\n"
- writer.write(b"something valid\n")
- await asyncio.sleep(0.1)
-
- if bad_request_return_value in (None, False):
- assert await reader.readline() == b"GeneratorExit\n"
- assert await reader.readline() == b"new handle\n"
-
- assert await reader.readline() == b"something valid\n"
-
- @pytest.mark.parametrize("request_handler", [ErrorInRequestHandler], indirect=True)
- async def test____serve_forever____bad_request____unexpected_error(
- self,
- client_factory: Callable[[], Awaitable[tuple[asyncio.StreamReader, asyncio.StreamWriter]]],
- caplog: pytest.LogCaptureFixture,
- server: MyAsyncTCPServer,
- ) -> None:
- caplog.set_level(logging.ERROR, server.logger.name)
- reader, writer = await client_factory()
-
- writer.write("\u00E9\n".encode("latin-1")) # StringSerializer does not accept unicode
- await asyncio.sleep(0.1)
-
- with pytest.raises(ConnectionResetError):
- assert await reader.read() == b""
- raise ConnectionResetError
- assert len(caplog.records) == 3
-
- async def test____serve_forever____bad_request____recursive_traceback_frame_clear_error(
+ @pytest.mark.parametrize("socket_family", ["AF_INET"], indirect=True)
+ @pytest.mark.parametrize("use_ssl", ["NO_SSL"], indirect=True)
+ async def test____serve_forever____connection_reset_error(
self,
client_factory: Callable[[], Awaitable[tuple[asyncio.StreamReader, asyncio.StreamWriter]]],
caplog: pytest.LogCaptureFixture,
server: MyAsyncTCPServer,
- monkeypatch: pytest.MonkeyPatch,
+ request_handler: MyAsyncTCPRequestHandler,
) -> None:
caplog.set_level(logging.WARNING, server.logger.name)
- reader, writer = await client_factory()
+ _, writer = await client_factory()
- def infinite_recursion(exc: BaseException) -> None:
- infinite_recursion(exc)
+ enable_socket_linger(writer.get_extra_info("socket"), timeout=0)
- monkeypatch.setattr(
- f"{AsyncTCPNetworkServer.__module__}._recursively_clear_exception_traceback_frames",
- infinite_recursion,
- )
-
- writer.write("\u00E9\n".encode("latin-1")) # StringSerializer does not accept unicode
- await asyncio.sleep(0.1)
+ writer.close()
+ await writer.wait_closed()
+ async with asyncio.timeout(1):
+ while request_handler.connected_clients:
+ await asyncio.sleep(0.1)
- assert await reader.readline() == b"wrong encoding man.\n"
- assert "Recursion depth reached when clearing exception's traceback frames" in [rec.message for rec in caplog.records]
+ # ECONNRESET not logged
+ assert len(caplog.records) == 0
@pytest.mark.parametrize("mute_thrown_exception", [False, True])
@pytest.mark.parametrize("request_handler", [ErrorInRequestHandler], indirect=True)
@@ -881,18 +755,6 @@ async def test____serve_forever____explicitly_closed_by_request_handler(
assert await reader.read() == b""
- async def test____serve_forever____explicitly_closed_by_service_actions(
- self,
- client_factory: Callable[[], Awaitable[tuple[asyncio.StreamReader, asyncio.StreamWriter]]],
- request_handler: MyAsyncTCPRequestHandler,
- ) -> None:
- reader, _ = await client_factory()
-
- request_handler.close_all_clients_on_service_actions = True
- await asyncio.sleep(0.2)
-
- assert await reader.read() == b""
-
async def test____serve_forever____request_handler_ask_to_stop_accepting_new_connections(
self,
client_factory: Callable[[], Awaitable[tuple[asyncio.StreamReader, asyncio.StreamWriter]]],
diff --git a/tests/functional_test/test_communication/test_async/test_server/test_udp.py b/tests/functional_test/test_communication/test_async/test_server/test_udp.py
index b6a7b0b7..6482f919 100644
--- a/tests/functional_test/test_communication/test_async/test_server/test_udp.py
+++ b/tests/functional_test/test_communication/test_async/test_server/test_udp.py
@@ -4,11 +4,10 @@
import collections
import contextlib
import logging
-import math
+import weakref
from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable
from typing import Any
-from easynetwork.api_async.backend.abc import AbstractAsyncBackend
from easynetwork.api_async.server.handler import AsyncDatagramClient, AsyncDatagramRequestHandler
from easynetwork.api_async.server.udp import AsyncUDPNetworkServer
from easynetwork.exceptions import BaseProtocolParseError, ClientClosedError, DatagramProtocolParseError, DeserializeError
@@ -30,25 +29,16 @@ class MyAsyncUDPRequestHandler(AsyncDatagramRequestHandler[str, str]):
request_received: collections.defaultdict[tuple[Any, ...], list[str]]
bad_request_received: collections.defaultdict[tuple[Any, ...], list[BaseProtocolParseError]]
created_clients: set[AsyncDatagramClient[str]]
- backend: AbstractAsyncBackend
- service_actions_count: int
- crash_service_actions: bool = False
+ server: AsyncUDPNetworkServer[str, str]
- def set_async_backend(self, backend: AbstractAsyncBackend) -> None:
- self.backend = backend
-
- async def service_init(self) -> None:
- await super().service_init()
- self.service_actions_count = 0
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AsyncUDPNetworkServer[str, str]) -> None:
+ await super().service_init(exit_stack, server)
+ self.server = server
self.request_received = collections.defaultdict(list)
self.bad_request_received = collections.defaultdict(list)
self.created_clients = set()
- async def service_actions(self) -> None:
- if self.crash_service_actions:
- raise Exception("CRASH")
- await super().service_actions()
- self.service_actions_count += 1
+ exit_stack.push_async_callback(self.service_quit)
async def service_quit(self) -> None:
# At this point, ALL clients should be closed (since the UDP socket is closed)
@@ -58,16 +48,18 @@ async def service_quit(self) -> None:
await client.send_packet("something")
del (
- self.service_actions_count,
self.request_received,
self.bad_request_received,
self.created_clients,
)
- await super().service_quit()
async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None, str]:
self.created_clients.add(client)
- match (yield):
+ while True:
+ async with self.handle_bad_requests(client):
+ request = yield
+ break
+ match request:
case "__error__":
raise RandomError("Sorry man!")
case "__os_error__":
@@ -79,18 +71,24 @@ async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None,
assert object() not in list(self.created_clients)
await client.send_packet("True")
case "__wait__":
- request = yield
+ while True:
+ async with self.handle_bad_requests(client):
+ request = yield
+ break
self.request_received[client.address].append(request)
await client.send_packet(f"After wait: {request}")
- case request:
+ case _:
self.request_received[client.address].append(request)
await client.send_packet(request.upper())
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError) -> None:
- assert isinstance(exc, DatagramProtocolParseError)
- assert exc.sender_address == client.address
- self.bad_request_received[client.address].append(exc)
- await client.send_packet("wrong encoding man.")
+ @contextlib.asynccontextmanager
+ async def handle_bad_requests(self, client: AsyncDatagramClient[str]) -> AsyncIterator[None]:
+ try:
+ yield
+ except DatagramProtocolParseError as exc:
+ assert exc.sender_address == client.address
+ self.bad_request_received[client.address].append(exc)
+ await client.send_packet("wrong encoding man.")
class TimeoutRequestHandler(AsyncDatagramRequestHandler[str, str]):
@@ -109,9 +107,6 @@ async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None,
finally:
self.request_timeout = 1.0 # Force reset to 1 second in order not to overload the server
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class ConcurrencyTestRequestHandler(AsyncDatagramRequestHandler[str, str]):
sleep_time_before_second_yield: float = 0.0
@@ -124,9 +119,6 @@ async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None,
await asyncio.sleep(self.sleep_time_before_response)
await client.send_packet(f"After wait: {request}")
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class CancellationRequestHandler(AsyncDatagramRequestHandler[str, str]):
async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None, str]:
@@ -134,15 +126,12 @@ async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None,
await client.send_packet("response")
raise asyncio.CancelledError()
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class RequestRefusedHandler(AsyncDatagramRequestHandler[str, str]):
refuse_after: int = 2**64
bypass_refusal: bool = False
- async def service_init(self) -> None:
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AsyncUDPNetworkServer[str, str]) -> None:
self.request_count: collections.Counter[AsyncDatagramClient[str]] = collections.Counter()
async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None, str]:
@@ -152,9 +141,6 @@ async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None,
self.request_count[client] += 1
await client.send_packet(request)
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError, /) -> None:
- pass
-
class ErrorInRequestHandler(AsyncDatagramRequestHandler[str, str]):
mute_thrown_exception: bool = False
@@ -169,9 +155,6 @@ async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None,
else:
await client.send_packet(request)
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError, /) -> None:
- raise RandomError("An error occurred")
-
class ErrorBeforeYieldHandler(AsyncDatagramRequestHandler[str, str]):
raise_error: bool = False
@@ -186,24 +169,6 @@ async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolP
pass
-class CloseHandleAfterBadRequest(AsyncDatagramRequestHandler[str, str]):
- bad_request_return_value: bool | None = None
-
- async def handle(self, client: AsyncDatagramClient[str]) -> AsyncGenerator[None, str]:
- await client.send_packet("new handle")
- try:
- request = yield
- except GeneratorExit:
- await client.send_packet("GeneratorExit")
- raise
- else:
- await client.send_packet(request)
-
- async def bad_request(self, client: AsyncDatagramClient[str], exc: BaseProtocolParseError) -> bool | None:
- await client.send_packet("wrong encoding")
- return self.bad_request_return_value
-
-
class MyAsyncUDPServer(AsyncUDPNetworkServer[str, str]):
__slots__ = ()
@@ -224,18 +189,12 @@ def request_handler(request: Any) -> AsyncDatagramRequestHandler[str, str]:
request_handler_cls: type[AsyncDatagramRequestHandler[str, str]] = getattr(request, "param", MyAsyncUDPRequestHandler)
return request_handler_cls()
- @pytest.fixture
- @staticmethod
- def service_actions_interval(request: Any) -> float | None:
- return getattr(request, "param", None)
-
@pytest_asyncio.fixture
@staticmethod
async def server(
request_handler: AsyncDatagramRequestHandler[str, str],
localhost_ip: str,
datagram_protocol: DatagramProtocol[str, str],
- service_actions_interval: float | None,
backend_kwargs: dict[str, Any],
) -> AsyncIterator[MyAsyncUDPServer]:
async with MyAsyncUDPServer(
@@ -243,7 +202,6 @@ async def server(
0,
datagram_protocol,
request_handler,
- service_actions_interval=service_actions_interval,
backend_kwargs=backend_kwargs,
) as server:
assert server.socket is None
@@ -287,58 +245,15 @@ async def factory() -> DatagramEndpoint:
yield factory
- @pytest.fixture(
- params=[
- pytest.param(True, id="TaskGroup.start_soon_with_context available"),
- pytest.param(False, id="TaskGroup.start_soon_with_context unavailable"),
- ]
- )
- @staticmethod
- def start_task_with_context(request: pytest.FixtureRequest, monkeypatch: pytest.MonkeyPatch) -> None:
- match getattr(request, "param"):
- case True:
- pass
- case False:
- from easynetwork.api_async.backend.abc import AbstractTaskGroup
- from easynetwork_asyncio.tasks import TaskGroup
-
- monkeypatch.setattr(TaskGroup, "start_soon_with_context", AbstractTaskGroup.start_soon_with_context)
- case invalid_param:
- pytest.fail(f"Invalid param: {invalid_param!r}")
-
@pytest.mark.usefixtures("run_server_and_wait")
- async def test____serve_forever____backend_assignment(
+ async def test____serve_forever____server_assignment(
self,
server: MyAsyncUDPServer,
request_handler: MyAsyncUDPRequestHandler,
) -> None:
- assert request_handler.backend is server.get_backend()
-
- @pytest.mark.usefixtures("run_server_and_wait")
- @pytest.mark.parametrize("service_actions_interval", [0.1], indirect=True)
- async def test____serve_forever____service_actions(self, request_handler: MyAsyncUDPRequestHandler) -> None:
- await asyncio.sleep(0.2)
- assert request_handler.service_actions_count >= 1
-
- @pytest.mark.usefixtures("run_server_and_wait")
- @pytest.mark.parametrize("service_actions_interval", [math.inf], indirect=True)
- async def test____serve_forever____service_actions____disabled(self, request_handler: MyAsyncUDPRequestHandler) -> None:
- await asyncio.sleep(1)
- assert request_handler.service_actions_count == 0
-
- @pytest.mark.usefixtures("run_server_and_wait")
- @pytest.mark.parametrize("service_actions_interval", [0.1], indirect=True)
- async def test____serve_forever____service_actions____crash(
- self,
- request_handler: MyAsyncUDPRequestHandler,
- caplog: pytest.LogCaptureFixture,
- server: MyAsyncUDPServer,
- ) -> None:
- caplog.set_level(logging.ERROR, server.logger.name)
- request_handler.crash_service_actions = True
- await asyncio.sleep(0.5)
- assert request_handler.service_actions_count == 0
- assert "Error occurred in request_handler.service_actions()" in [rec.message for rec in caplog.records]
+ assert request_handler.server == server
+ assert isinstance(request_handler.server, AsyncUDPNetworkServer)
+ assert isinstance(request_handler.server, weakref.ProxyType)
async def test____serve_forever____handle_request(
self,
@@ -408,74 +323,6 @@ async def test____serve_forever____bad_request(
assert isinstance(request_handler.bad_request_received[client_address][0], DatagramProtocolParseError)
assert isinstance(request_handler.bad_request_received[client_address][0].error, DeserializeError)
- @pytest.mark.parametrize("request_handler", [CloseHandleAfterBadRequest], indirect=True)
- @pytest.mark.parametrize("bad_request_return_value", [None, False, True])
- async def test____serve_forever____bad_request____return_value(
- self,
- bad_request_return_value: bool | None,
- request_handler: CloseHandleAfterBadRequest,
- client_factory: Callable[[], Awaitable[DatagramEndpoint]],
- ) -> None:
- request_handler.bad_request_return_value = bad_request_return_value
- endpoint = await client_factory()
-
- await endpoint.sendto("\u00E9".encode("latin-1"), None) # StringSerializer does not accept unicode
- await asyncio.sleep(0.1)
- assert (await endpoint.recvfrom())[0] == b"new handle"
-
- assert (await endpoint.recvfrom())[0] == b"wrong encoding"
- await endpoint.sendto(b"something valid", None)
- await asyncio.sleep(0.1)
-
- if bad_request_return_value in (None, False):
- assert (await endpoint.recvfrom())[0] == b"GeneratorExit"
- assert (await endpoint.recvfrom())[0] == b"new handle"
-
- assert (await endpoint.recvfrom())[0] == b"something valid"
-
- @pytest.mark.parametrize("request_handler", [ErrorInRequestHandler], indirect=True)
- async def test____serve_forever____bad_request____unexpected_error(
- self,
- client_factory: Callable[[], Awaitable[DatagramEndpoint]],
- caplog: pytest.LogCaptureFixture,
- server: MyAsyncUDPServer,
- ) -> None:
- caplog.set_level(logging.ERROR, server.logger.name)
- endpoint = await client_factory()
-
- await endpoint.sendto("\u00E9".encode("latin-1"), None) # StringSerializer does not accept unicode
- await asyncio.sleep(0.2)
-
- with pytest.raises(TimeoutError):
- async with asyncio.timeout(1):
- await endpoint.recvfrom()
- pytest.fail("Should not arrive here")
- assert len(caplog.records) == 3
-
- async def test____serve_forever____bad_request____recursive_traceback_frame_clear_error(
- self,
- client_factory: Callable[[], Awaitable[DatagramEndpoint]],
- caplog: pytest.LogCaptureFixture,
- server: MyAsyncUDPServer,
- monkeypatch: pytest.MonkeyPatch,
- ) -> None:
- caplog.set_level(logging.WARNING, server.logger.name)
- endpoint = await client_factory()
-
- def infinite_recursion(exc: BaseException) -> None:
- infinite_recursion(exc)
-
- monkeypatch.setattr(
- f"{AsyncUDPNetworkServer.__module__}._recursively_clear_exception_traceback_frames",
- infinite_recursion,
- )
-
- await endpoint.sendto("\u00E9".encode("latin-1"), None) # StringSerializer does not accept unicode
- await asyncio.sleep(0.1)
-
- assert (await endpoint.recvfrom())[0] == b"wrong encoding man."
- assert "Recursion depth reached when clearing exception's traceback frames" in [rec.message for rec in caplog.records]
-
@pytest.mark.parametrize("mute_thrown_exception", [False, True])
@pytest.mark.parametrize("request_handler", [ErrorInRequestHandler], indirect=True)
@pytest.mark.parametrize("serializer", [pytest.param("invalid", id="serializer_crash")], indirect=True)
@@ -622,7 +469,6 @@ async def test____serve_forever____request_handler_did_not_yield(
await endpoint.sendto(b"hello world", None)
assert (await endpoint.recvfrom())[0] == b"hello world"
- @pytest.mark.usefixtures("start_task_with_context")
@pytest.mark.parametrize("request_handler", [ConcurrencyTestRequestHandler], indirect=True)
async def test____serve_forever____datagram_while_request_handle_is_performed(
self,
@@ -637,7 +483,6 @@ async def test____serve_forever____datagram_while_request_handle_is_performed(
async with asyncio.timeout(5):
assert (await endpoint.recvfrom())[0] == b"After wait: hello, world."
- @pytest.mark.usefixtures("start_task_with_context")
@pytest.mark.parametrize("request_handler", [ConcurrencyTestRequestHandler], indirect=True)
async def test____serve_forever____too_many_datagrams_while_request_handle_is_performed(
self,
diff --git a/tests/functional_test/test_communication/test_sync/test_server/test_standalone.py b/tests/functional_test/test_communication/test_sync/test_server/test_standalone.py
index 35a9b8dc..6ecd0ca1 100644
--- a/tests/functional_test/test_communication/test_sync/test_server/test_standalone.py
+++ b/tests/functional_test/test_communication/test_sync/test_server/test_standalone.py
@@ -1,86 +1,82 @@
from __future__ import annotations
import asyncio
+import contextlib
import threading
import time
from collections.abc import AsyncGenerator, Callable, Iterator
-from easynetwork.api_async.server.handler import (
- AsyncBaseClientInterface,
- AsyncDatagramRequestHandler,
- AsyncStreamClient,
- AsyncStreamRequestHandler,
-)
-from easynetwork.api_sync.server.abc import AbstractStandaloneNetworkServer
+from easynetwork.api_async.server.abc import AbstractAsyncNetworkServer
+from easynetwork.api_async.server.handler import AsyncBaseClientInterface, AsyncDatagramRequestHandler, AsyncStreamRequestHandler
+from easynetwork.api_sync.server.abc import AbstractNetworkServer
from easynetwork.api_sync.server.tcp import StandaloneTCPNetworkServer
-from easynetwork.api_sync.server.thread import StandaloneNetworkServerThread
+from easynetwork.api_sync.server.thread import NetworkServerThread
from easynetwork.api_sync.server.udp import StandaloneUDPNetworkServer
-from easynetwork.exceptions import BaseProtocolParseError, ServerAlreadyRunning, ServerClosedError
+from easynetwork.exceptions import ServerAlreadyRunning, ServerClosedError
from easynetwork.protocol import DatagramProtocol, StreamProtocol
import pytest
class EchoRequestHandler(AsyncStreamRequestHandler[str, str], AsyncDatagramRequestHandler[str, str]):
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AbstractAsyncNetworkServer) -> None:
+ pass
+
async def handle(self, client: AsyncBaseClientInterface[str]) -> AsyncGenerator[None, str]:
request = yield
await client.send_packet(request)
- async def bad_request(self, client: AsyncBaseClientInterface[str], exc: BaseProtocolParseError, /) -> None:
- if isinstance(client, AsyncStreamClient):
- await client.aclose()
-
class BaseTestStandaloneNetworkServer:
@pytest.fixture
@staticmethod
def start_server(
- server: AbstractStandaloneNetworkServer,
- ) -> Iterator[StandaloneNetworkServerThread]:
+ server: AbstractNetworkServer,
+ ) -> Iterator[NetworkServerThread]:
with server:
- server_thread = StandaloneNetworkServerThread(server, daemon=True)
+ server_thread = NetworkServerThread(server, daemon=True)
server_thread.start()
yield server_thread
server_thread.join(timeout=1)
- def test____is_serving____default_to_False(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____is_serving____default_to_False(self, server: AbstractNetworkServer) -> None:
with server:
assert not server.is_serving()
- def test____shutdown____default_to_noop(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____shutdown____default_to_noop(self, server: AbstractNetworkServer) -> None:
with server:
server.shutdown()
@pytest.mark.usefixtures("start_server")
- def test____shutdown____while_server_is_running(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____shutdown____while_server_is_running(self, server: AbstractNetworkServer) -> None:
assert server.is_serving()
server.shutdown()
assert not server.is_serving()
- def test____server_close____idempotent(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____server_close____idempotent(self, server: AbstractNetworkServer) -> None:
server.server_close()
server.server_close()
server.server_close()
@pytest.mark.usefixtures("start_server")
- def test____server_close____while_server_is_running(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____server_close____while_server_is_running(self, server: AbstractNetworkServer) -> None:
server.server_close()
@pytest.mark.usefixtures("start_server")
- def test____serve_forever____error_server_already_running(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____serve_forever____error_server_already_running(self, server: AbstractNetworkServer) -> None:
with pytest.raises(ServerAlreadyRunning):
server.serve_forever()
- def test____serve_forever____error_server_closed(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____serve_forever____error_server_closed(self, server: AbstractNetworkServer) -> None:
server.server_close()
with pytest.raises(ServerClosedError):
server.serve_forever()
- def test____serve_forever____without_is_up_event(self, server: AbstractStandaloneNetworkServer) -> None:
+ def test____serve_forever____without_is_up_event(self, server: AbstractNetworkServer) -> None:
with server:
t = threading.Thread(target=server.serve_forever, daemon=True)
t.start()
@@ -95,7 +91,7 @@ def test____serve_forever____without_is_up_event(self, server: AbstractStandalon
def test____server_thread____several_join(
self,
- start_server: StandaloneNetworkServerThread,
+ start_server: NetworkServerThread,
) -> None:
start_server.join()
start_server.join()
@@ -142,7 +138,7 @@ def test____serve_forever____serve_several_times(self, server: StandaloneTCPNetw
assert not server.is_serving()
assert not server.get_addresses()
- server_thread = StandaloneNetworkServerThread(server, daemon=True)
+ server_thread = NetworkServerThread(server, daemon=True)
server_thread.start()
try:
assert server.is_serving()
@@ -209,7 +205,7 @@ def test____serve_forever____serve_several_times(self, server: StandaloneUDPNetw
assert not server.is_serving()
assert server.get_address() is None
- server_thread = StandaloneNetworkServerThread(server, daemon=True)
+ server_thread = NetworkServerThread(server, daemon=True)
server_thread.start()
try:
assert server.is_serving()
diff --git a/tests/functional_test/test_concurrency/conftest.py b/tests/functional_test/test_concurrency/conftest.py
index d81d64fd..1ffb4566 100644
--- a/tests/functional_test/test_concurrency/conftest.py
+++ b/tests/functional_test/test_concurrency/conftest.py
@@ -1,19 +1,15 @@
from __future__ import annotations
+import contextlib
import threading
from collections.abc import AsyncGenerator, Iterator
from typing import Literal
-from easynetwork.api_async.server.handler import (
- AsyncBaseClientInterface,
- AsyncDatagramRequestHandler,
- AsyncStreamClient,
- AsyncStreamRequestHandler,
-)
-from easynetwork.api_sync.server.abc import AbstractStandaloneNetworkServer
+from easynetwork.api_async.server.abc import AbstractAsyncNetworkServer
+from easynetwork.api_async.server.handler import AsyncBaseClientInterface, AsyncDatagramRequestHandler, AsyncStreamRequestHandler
+from easynetwork.api_sync.server.abc import AbstractNetworkServer
from easynetwork.api_sync.server.tcp import StandaloneTCPNetworkServer
from easynetwork.api_sync.server.udp import StandaloneUDPNetworkServer
-from easynetwork.exceptions import BaseProtocolParseError
from easynetwork.protocol import DatagramProtocol, StreamProtocol
from easynetwork.serializers.line import StringLineSerializer
@@ -21,21 +17,20 @@
class EchoRequestHandler(AsyncStreamRequestHandler[str, str], AsyncDatagramRequestHandler[str, str]):
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AbstractAsyncNetworkServer) -> None:
+ pass
+
async def handle(self, client: AsyncBaseClientInterface[str]) -> AsyncGenerator[None, str]:
request = yield
await client.send_packet(request)
- async def bad_request(self, client: AsyncBaseClientInterface[str], exc: BaseProtocolParseError, /) -> None:
- if isinstance(client, AsyncStreamClient):
- await client.aclose()
-
@pytest.fixture(params=["TCP", "UDP"])
def ipproto(request: pytest.FixtureRequest) -> Literal["TCP", "UDP"]:
return getattr(request, "param").upper()
-def _build_server(ipproto: Literal["TCP", "UDP"]) -> AbstractStandaloneNetworkServer:
+def _build_server(ipproto: Literal["TCP", "UDP"]) -> AbstractNetworkServer:
serializer = StringLineSerializer()
request_handler = EchoRequestHandler()
match ipproto:
@@ -47,7 +42,7 @@ def _build_server(ipproto: Literal["TCP", "UDP"]) -> AbstractStandaloneNetworkSe
pytest.fail("Invalid ipproto")
-def _run_server(server: AbstractStandaloneNetworkServer) -> None:
+def _run_server(server: AbstractNetworkServer) -> None:
is_up_event = threading.Event()
t = threading.Thread(target=server.serve_forever, kwargs={"is_up_event": is_up_event}, daemon=True)
t.start()
@@ -57,7 +52,7 @@ def _run_server(server: AbstractStandaloneNetworkServer) -> None:
assert server.is_serving()
-def _retrieve_server_port(server: AbstractStandaloneNetworkServer) -> int:
+def _retrieve_server_port(server: AbstractNetworkServer) -> int:
match server:
case StandaloneTCPNetworkServer():
addresses = server.get_addresses()
diff --git a/tests/functional_test/test_serializers/base.py b/tests/functional_test/test_serializers/base.py
index ecf020ba..940e5d3c 100644
--- a/tests/functional_test/test_serializers/base.py
+++ b/tests/functional_test/test_serializers/base.py
@@ -29,23 +29,23 @@ def invalid_complete_data() -> bytes:
def test____fixture____consistency(
self,
- serializer_for_serialization: AbstractPacketSerializer[Any, Any],
- serializer_for_deserialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_serialization: AbstractPacketSerializer[Any],
+ serializer_for_deserialization: AbstractPacketSerializer[Any],
) -> None:
assert type(serializer_for_serialization) is type(serializer_for_deserialization)
def test____slots____no_dict(
self,
- serializer_for_serialization: AbstractPacketSerializer[Any, Any],
- serializer_for_deserialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_serialization: AbstractPacketSerializer[Any],
+ serializer_for_deserialization: AbstractPacketSerializer[Any],
) -> None:
assert not hasattr(serializer_for_serialization, "__dict__")
assert not hasattr(serializer_for_deserialization, "__dict__")
def test____slots____weakref(
self,
- serializer_for_serialization: AbstractPacketSerializer[Any, Any],
- serializer_for_deserialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_serialization: AbstractPacketSerializer[Any],
+ serializer_for_deserialization: AbstractPacketSerializer[Any],
) -> None:
import weakref
@@ -54,7 +54,7 @@ def test____slots____weakref(
def test____serialize____sample(
self,
- serializer_for_serialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_serialization: AbstractPacketSerializer[Any],
packet_to_serialize: Any,
expected_complete_data: bytes | Callable[[bytes], None],
) -> None:
@@ -72,7 +72,7 @@ def test____serialize____sample(
def test____deserialize____sample(
self,
- serializer_for_deserialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractPacketSerializer[Any],
complete_data: bytes,
packet_to_serialize: Any,
) -> None:
@@ -87,7 +87,7 @@ def test____deserialize____sample(
def test____deserialize____invalid_data(
self,
- serializer_for_deserialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractPacketSerializer[Any],
invalid_complete_data: bytes,
) -> None:
# Arrange
@@ -98,7 +98,7 @@ def test____deserialize____invalid_data(
def test____deserialize____extra_data(
self,
- serializer_for_deserialization: AbstractPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractPacketSerializer[Any],
complete_data: bytes,
oneshot_extra_data: bytes,
) -> None:
@@ -128,7 +128,7 @@ def invalid_partial_data_extra_data() -> bytes | None:
def test____incremental_serialize____concatenated_chunks(
self,
- serializer_for_serialization: AbstractIncrementalPacketSerializer[Any, Any],
+ serializer_for_serialization: AbstractIncrementalPacketSerializer[Any],
packet_to_serialize: Any,
expected_joined_data: bytes | Callable[[bytes], None],
) -> None:
@@ -145,7 +145,7 @@ def test____incremental_serialize____concatenated_chunks(
def test____incremental_deserialize____one_shot_chunk(
self,
- serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any],
complete_data_for_incremental_deserialize: bytes,
packet_to_serialize: Any,
) -> None:
@@ -164,7 +164,7 @@ def test____incremental_deserialize____one_shot_chunk(
def test____incremental_deserialize____with_remaining_data(
self,
- serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any],
complete_data_for_incremental_deserialize: bytes,
packet_to_serialize: Any,
incremental_extra_data: bytes,
@@ -185,7 +185,7 @@ def test____incremental_deserialize____with_remaining_data(
def test____incremental_deserialize____give_chunk_byte_per_byte(
self,
- serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any],
complete_data_for_incremental_deserialize: bytes,
packet_to_serialize: Any,
) -> None:
@@ -212,7 +212,7 @@ def test____incremental_deserialize____give_chunk_byte_per_byte(
def test____incremental_deserialize____invalid_data(
self,
- serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any, Any],
+ serializer_for_deserialization: AbstractIncrementalPacketSerializer[Any],
invalid_partial_data: bytes,
invalid_partial_data_extra_data: bytes | None,
) -> None:
@@ -236,7 +236,7 @@ def test____incremental_deserialize____invalid_data(
@final
-class NoSerialization(AbstractPacketSerializer[bytes, bytes]):
+class NoSerialization(AbstractPacketSerializer[bytes]):
"""Helper for serializer wrapper"""
def serialize(self, packet: bytes) -> bytes:
diff --git a/tests/functional_test/test_serializers/samples/pickle.py b/tests/functional_test/test_serializers/samples/pickle.py
index 90a85c12..d4081ef2 100644
--- a/tests/functional_test/test_serializers/samples/pickle.py
+++ b/tests/functional_test/test_serializers/samples/pickle.py
@@ -7,10 +7,10 @@ class Dummy:
def __init__(self) -> None:
self.attr = "attr"
- def __eq__(self, __o: object) -> bool:
- if not isinstance(__o, Dummy):
+ def __eq__(self, other: object, /) -> bool:
+ if not isinstance(other, Dummy):
return NotImplemented
- return self.attr == __o.attr
+ return self.attr == other.attr
class BigDummy:
@@ -37,10 +37,10 @@ def __init__(self, level: int) -> None:
],
}
- def __eq__(self, __o: object) -> bool:
- if not isinstance(__o, BigDummy):
+ def __eq__(self, other: object, /) -> bool:
+ if not isinstance(other, BigDummy):
return NotImplemented
- return self.dummy == __o.dummy
+ return self.dummy == other.dummy
SAMPLES = [
diff --git a/tests/functional_test/test_serializers/test_base64.py b/tests/functional_test/test_serializers/test_base64.py
index e46b017f..a40c616d 100644
--- a/tests/functional_test/test_serializers/test_base64.py
+++ b/tests/functional_test/test_serializers/test_base64.py
@@ -41,19 +41,17 @@ def serializer(
cls,
checksum: bool | bytes,
alphabet: Literal["standard", "urlsafe"],
- ) -> Base64EncoderSerializer[bytes, bytes]:
+ ) -> Base64EncoderSerializer[bytes]:
return Base64EncoderSerializer(NoSerialization(), alphabet=alphabet, checksum=checksum)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_serialization(serializer: Base64EncoderSerializer[bytes, bytes]) -> Base64EncoderSerializer[bytes, bytes]:
+ def serializer_for_serialization(serializer: Base64EncoderSerializer[bytes]) -> Base64EncoderSerializer[bytes]:
return serializer
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization(
- serializer: Base64EncoderSerializer[bytes, bytes]
- ) -> Base64EncoderSerializer[bytes, bytes]:
+ def serializer_for_deserialization(serializer: Base64EncoderSerializer[bytes]) -> Base64EncoderSerializer[bytes]:
return serializer
#### Packets to test
@@ -191,7 +189,7 @@ def test____dunder_init____invalid_key____invalid_base64_encoded_byte_length(sel
def test____deserialize____invalid_signature(
self,
- serializer: Base64EncoderSerializer[bytes, bytes],
+ serializer: Base64EncoderSerializer[bytes],
packet_to_serialize: bytes,
expected_complete_data: bytes,
) -> None:
diff --git a/tests/functional_test/test_serializers/test_cbor.py b/tests/functional_test/test_serializers/test_cbor.py
index 2966e308..3136f94f 100644
--- a/tests/functional_test/test_serializers/test_cbor.py
+++ b/tests/functional_test/test_serializers/test_cbor.py
@@ -22,12 +22,12 @@ class TestCBORSerializer(BaseTestIncrementalSerializer):
@pytest.fixture(scope="class")
@classmethod
- def serializer_for_serialization(cls) -> CBORSerializer[Any, Any]:
+ def serializer_for_serialization(cls) -> CBORSerializer:
return CBORSerializer(encoder_config=cls.ENCODER_CONFIG)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization() -> CBORSerializer[Any, Any]:
+ def serializer_for_deserialization() -> CBORSerializer:
return CBORSerializer()
#### Packets to test
diff --git a/tests/functional_test/test_serializers/test_compressors.py b/tests/functional_test/test_serializers/test_compressors.py
index bcaf9e24..781e2cd9 100644
--- a/tests/functional_test/test_serializers/test_compressors.py
+++ b/tests/functional_test/test_serializers/test_compressors.py
@@ -72,12 +72,12 @@ def compress_level(request: Any) -> Any:
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_serialization(compress_level: int) -> BZ2CompressorSerializer[bytes, bytes]:
+ def serializer_for_serialization(compress_level: int) -> BZ2CompressorSerializer[bytes]:
return BZ2CompressorSerializer(NoSerialization(), compress_level=compress_level)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization() -> BZ2CompressorSerializer[bytes, bytes]:
+ def serializer_for_deserialization() -> BZ2CompressorSerializer[bytes]:
return BZ2CompressorSerializer(NoSerialization())
#### One-shot Serialize
@@ -108,12 +108,12 @@ def compress_level(request: Any) -> Any:
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_serialization(compress_level: int) -> ZlibCompressorSerializer[bytes, bytes]:
+ def serializer_for_serialization(compress_level: int) -> ZlibCompressorSerializer[bytes]:
return ZlibCompressorSerializer(NoSerialization(), compress_level=compress_level)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization() -> ZlibCompressorSerializer[bytes, bytes]:
+ def serializer_for_deserialization() -> ZlibCompressorSerializer[bytes]:
return ZlibCompressorSerializer(NoSerialization())
#### One-shot Serialize
diff --git a/tests/functional_test/test_serializers/test_encryptor.py b/tests/functional_test/test_serializers/test_encryptor.py
index 5794bbb7..76ceb16e 100644
--- a/tests/functional_test/test_serializers/test_encryptor.py
+++ b/tests/functional_test/test_serializers/test_encryptor.py
@@ -22,17 +22,17 @@ class TestEncryptorSerializer(BaseTestIncrementalSerializer):
@pytest.fixture(scope="class")
@classmethod
- def serializer(cls) -> EncryptorSerializer[bytes, bytes]:
+ def serializer(cls) -> EncryptorSerializer[bytes]:
return EncryptorSerializer(NoSerialization(), key=cls.KEY)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_serialization(serializer: EncryptorSerializer[bytes, bytes]) -> EncryptorSerializer[bytes, bytes]:
+ def serializer_for_serialization(serializer: EncryptorSerializer[bytes]) -> EncryptorSerializer[bytes]:
return serializer
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization(serializer: EncryptorSerializer[bytes, bytes]) -> EncryptorSerializer[bytes, bytes]:
+ def serializer_for_deserialization(serializer: EncryptorSerializer[bytes]) -> EncryptorSerializer[bytes]:
return serializer
#### Packets to test
diff --git a/tests/functional_test/test_serializers/test_json.py b/tests/functional_test/test_serializers/test_json.py
index 464c6089..4d173b6b 100644
--- a/tests/functional_test/test_serializers/test_json.py
+++ b/tests/functional_test/test_serializers/test_json.py
@@ -21,12 +21,12 @@ class TestJSONSerializer(BaseTestIncrementalSerializer):
@pytest.fixture(scope="class")
@classmethod
- def serializer_for_serialization(cls) -> JSONSerializer[Any, Any]:
+ def serializer_for_serialization(cls) -> JSONSerializer:
return JSONSerializer(encoder_config=cls.ENCODER_CONFIG)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization() -> JSONSerializer[Any, Any]:
+ def serializer_for_deserialization() -> JSONSerializer:
return JSONSerializer()
#### Packets to test
diff --git a/tests/functional_test/test_serializers/test_msgpack.py b/tests/functional_test/test_serializers/test_msgpack.py
index 8b1190a4..81f42199 100644
--- a/tests/functional_test/test_serializers/test_msgpack.py
+++ b/tests/functional_test/test_serializers/test_msgpack.py
@@ -25,12 +25,12 @@ def packer_config() -> MessagePackerConfig:
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_serialization(packer_config: MessagePackerConfig) -> MessagePackSerializer[Any, Any]:
+ def serializer_for_serialization(packer_config: MessagePackerConfig) -> MessagePackSerializer:
return MessagePackSerializer(packer_config=packer_config)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization() -> MessagePackSerializer[Any, Any]:
+ def serializer_for_deserialization() -> MessagePackSerializer:
return MessagePackSerializer()
#### Packets to test
diff --git a/tests/functional_test/test_serializers/test_pickle.py b/tests/functional_test/test_serializers/test_pickle.py
index aaec3d6f..1127ae18 100644
--- a/tests/functional_test/test_serializers/test_pickle.py
+++ b/tests/functional_test/test_serializers/test_pickle.py
@@ -45,12 +45,12 @@ def unpickler_config(request: Any) -> UnpicklerConfig:
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_serialization(pickler_config: PicklerConfig, pickler_optimize: bool) -> PickleSerializer[Any, Any]:
+ def serializer_for_serialization(pickler_config: PicklerConfig, pickler_optimize: bool) -> PickleSerializer:
return PickleSerializer(pickler_config=pickler_config, pickler_optimize=pickler_optimize)
@pytest.fixture(scope="class")
@staticmethod
- def serializer_for_deserialization(unpickler_config: UnpicklerConfig) -> PickleSerializer[Any, Any]:
+ def serializer_for_deserialization(unpickler_config: UnpicklerConfig) -> PickleSerializer:
return PickleSerializer(unpickler_config=unpickler_config)
#### Packets to test
diff --git a/tests/import_utils.py b/tests/import_utils.py
new file mode 100644
index 00000000..259519f4
--- /dev/null
+++ b/tests/import_utils.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from functools import cache
+from importlib import import_module
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from pkgutil import ModuleInfo
+
+
+@cache
+def _catch_all_easynetwork_packages_and_modules() -> list[ModuleInfo]:
+ from pkgutil import walk_packages
+
+ result: list[ModuleInfo] = []
+
+ for module_name in ["easynetwork", "easynetwork_asyncio"]:
+ module = import_module(module_name)
+ module_spec = module.__spec__
+
+ assert module_spec is not None
+
+ module_paths = module_spec.submodule_search_locations or module.__path__
+
+ result.extend(walk_packages(module_paths, prefix=f"{module_spec.name}."))
+
+ return result
+
+
+ALL_EASYNETWORK_PACKAGES = [info.name for info in _catch_all_easynetwork_packages_and_modules() if info.ispkg]
+ALL_EASYNETWORK_MODULES = [info.name for info in _catch_all_easynetwork_packages_and_modules()]
diff --git a/tests/other_test/test_import.py b/tests/other_test/test_import.py
index 4036e990..4f62c096 100644
--- a/tests/other_test/test_import.py
+++ b/tests/other_test/test_import.py
@@ -3,35 +3,10 @@
from functools import cache
from importlib import import_module
from itertools import combinations
-from typing import TYPE_CHECKING
import pytest
-if TYPE_CHECKING:
- from pkgutil import ModuleInfo
-
-
-@cache
-def _catch_all_easynetwork_packages_and_modules() -> list[ModuleInfo]:
- from pkgutil import walk_packages
-
- result: list[ModuleInfo] = []
-
- for module_name in ["easynetwork", "easynetwork_asyncio"]:
- module = import_module(module_name)
- module_spec = module.__spec__
-
- assert module_spec is not None
-
- module_paths = module_spec.submodule_search_locations or module.__path__
-
- result.extend(walk_packages(module_paths, prefix=f"{module_spec.name}."))
-
- return result
-
-
-ALL_EASYNETWORK_PACKAGES = [info.name for info in _catch_all_easynetwork_packages_and_modules() if info.ispkg]
-ALL_EASYNETWORK_MODULES = [info.name for info in _catch_all_easynetwork_packages_and_modules()]
+from ..import_utils import ALL_EASYNETWORK_MODULES, ALL_EASYNETWORK_PACKAGES
@cache
diff --git a/tests/pytest_plugins/asyncio_event_loop.py b/tests/pytest_plugins/asyncio_event_loop.py
index 06644a9f..38e5212e 100644
--- a/tests/pytest_plugins/asyncio_event_loop.py
+++ b/tests/pytest_plugins/asyncio_event_loop.py
@@ -64,7 +64,7 @@ def pytest_configure(config: pytest.Config) -> None:
config.addinivalue_line("markers", "xfail_uvloop: Expected asyncio test to fail if uvloop is used")
-@pytest.hookimpl(trylast=True) # type: ignore[misc]
+@pytest.hookimpl(trylast=True)
def pytest_report_header(config: pytest.Config) -> str:
return f"asyncio event-loop: {config.getoption(ASYNCIO_EVENT_LOOP_OPTION)}"
diff --git a/tests/scripts/async_server_test.py b/tests/scripts/async_server_test.py
index 7524ec0d..3930ef64 100644
--- a/tests/scripts/async_server_test.py
+++ b/tests/scripts/async_server_test.py
@@ -1,14 +1,15 @@
from __future__ import annotations
import argparse
+import contextlib
import logging
from collections.abc import AsyncGenerator, Callable
+from easynetwork.api_async.server.abc import AbstractAsyncNetworkServer
from easynetwork.api_async.server.handler import AsyncBaseClientInterface, AsyncDatagramRequestHandler, AsyncStreamRequestHandler
-from easynetwork.api_sync.server.abc import AbstractStandaloneNetworkServer
+from easynetwork.api_sync.server.abc import AbstractNetworkServer
from easynetwork.api_sync.server.tcp import StandaloneTCPNetworkServer
from easynetwork.api_sync.server.udp import StandaloneUDPNetworkServer
-from easynetwork.exceptions import BaseProtocolParseError
from easynetwork.protocol import DatagramProtocol, StreamProtocol
from easynetwork.serializers.line import StringLineSerializer
@@ -18,6 +19,9 @@
class MyAsyncRequestHandler(AsyncStreamRequestHandler[str, str], AsyncDatagramRequestHandler[str, str]):
+ async def service_init(self, exit_stack: contextlib.AsyncExitStack, server: AbstractAsyncNetworkServer) -> None:
+ pass
+
async def handle(self, client: AsyncBaseClientInterface[str]) -> AsyncGenerator[None, str]:
request: str = yield
logger.debug(f"Received {request!r}")
@@ -25,9 +29,6 @@ async def handle(self, client: AsyncBaseClientInterface[str]) -> AsyncGenerator[
request = (yield) + " after wait"
await client.send_packet(request.upper())
- async def bad_request(self, client: AsyncBaseClientInterface[str], exc: BaseProtocolParseError) -> None:
- pass
-
def create_tcp_server() -> StandaloneTCPNetworkServer[str, str]:
return StandaloneTCPNetworkServer(None, PORT, StreamProtocol(StringLineSerializer()), MyAsyncRequestHandler())
@@ -71,7 +72,7 @@ def main() -> None:
args = parser.parse_args()
- server_factory: Callable[[], AbstractStandaloneNetworkServer] = args.server_factory
+ server_factory: Callable[[], AbstractNetworkServer] = args.server_factory
logging.basicConfig(level=getattr(logging, args.log_level), format="[ %(levelname)s ] [ %(name)s ] %(message)s")
diff --git a/tests/unit_test/test_async/conftest.py b/tests/unit_test/test_async/conftest.py
index 3882a361..ced23fa5 100644
--- a/tests/unit_test/test_async/conftest.py
+++ b/tests/unit_test/test_async/conftest.py
@@ -5,10 +5,10 @@
from typing import TYPE_CHECKING
from easynetwork.api_async.backend.abc import (
- AbstractAsyncBackend,
- AbstractAsyncDatagramSocketAdapter,
- AbstractAsyncHalfCloseableStreamSocketAdapter,
- AbstractAsyncStreamSocketAdapter,
+ AsyncBackend,
+ AsyncDatagramSocketAdapter,
+ AsyncHalfCloseableStreamSocketAdapter,
+ AsyncStreamSocketAdapter,
)
import pytest
@@ -41,10 +41,10 @@ def mock_backend(fake_cancellation_cls: type[BaseException], mocker: MockerFixtu
from .._utils import AsyncDummyLock
- mock_backend = mocker.NonCallableMagicMock(spec=AbstractAsyncBackend)
+ mock_backend = mocker.NonCallableMagicMock(spec=AsyncBackend)
mock_backend.get_cancelled_exc_class.return_value = fake_cancellation_cls
- mock_backend.spawn_task = lambda coro_func, *args, **kwargs: SystemTask(coro_func(*args, **kwargs))
+ mock_backend.spawn_task = lambda coro_func, *args, **kwargs: SystemTask(coro_func(*args), **kwargs)
mock_backend.create_lock = AsyncDummyLock
mock_backend.create_event = asyncio.Event
mock_backend.create_task_group = TaskGroup
@@ -61,9 +61,9 @@ def mock_stream_socket_adapter_factory(request: pytest.FixtureRequest, mocker: M
def factory() -> MagicMock:
if eof_support:
- mock = mocker.NonCallableMagicMock(spec=AbstractAsyncHalfCloseableStreamSocketAdapter)
+ mock = mocker.NonCallableMagicMock(spec=AsyncHalfCloseableStreamSocketAdapter)
else:
- mock = mocker.NonCallableMagicMock(spec=AbstractAsyncStreamSocketAdapter)
+ mock = mocker.NonCallableMagicMock(spec=AsyncStreamSocketAdapter)
mock.sendall_fromiter = mocker.MagicMock(side_effect=lambda iterable_of_data: mock.sendall(b"".join(iterable_of_data)))
mock.is_closing.return_value = False
return mock
@@ -79,7 +79,7 @@ def mock_stream_socket_adapter(mock_stream_socket_adapter_factory: Callable[[],
@pytest.fixture
def mock_datagram_socket_adapter_factory(mocker: MockerFixture) -> Callable[[], MagicMock]:
def factory() -> MagicMock:
- mock = mocker.NonCallableMagicMock(spec=AbstractAsyncDatagramSocketAdapter)
+ mock = mocker.NonCallableMagicMock(spec=AsyncDatagramSocketAdapter)
mock.is_closing.return_value = False
return mock
diff --git a/tests/unit_test/test_async/test_api/test_backend/_fake_backends.py b/tests/unit_test/test_async/test_api/test_backend/_fake_backends.py
index ff5b22e3..48ee61d3 100644
--- a/tests/unit_test/test_async/test_api/test_backend/_fake_backends.py
+++ b/tests/unit_test/test_async/test_api/test_backend/_fake_backends.py
@@ -5,23 +5,23 @@
from typing import Any, AsyncContextManager, NoReturn, final
from easynetwork.api_async.backend.abc import (
- AbstractAsyncBackend,
- AbstractAsyncDatagramSocketAdapter,
- AbstractAsyncListenerSocketAdapter,
- AbstractAsyncStreamSocketAdapter,
- AbstractRunner,
- AbstractSystemTask,
- AbstractTaskGroup,
- AbstractThreadsPortal,
- AbstractTimeoutHandle,
+ AsyncBackend,
+ AsyncDatagramSocketAdapter,
+ AsyncListenerSocketAdapter,
+ AsyncStreamSocketAdapter,
ICondition,
IEvent,
ILock,
+ Runner,
+ SystemTask,
+ TaskGroup,
+ ThreadsPortal,
+ TimeoutHandle,
)
-class BaseFakeBackend(AbstractAsyncBackend):
- def new_runner(self) -> AbstractRunner:
+class BaseFakeBackend(AsyncBackend):
+ def new_runner(self) -> Runner:
raise NotImplementedError
async def sleep(self, delay: float) -> None:
@@ -42,40 +42,40 @@ async def cancel_shielded_coro_yield(self) -> None:
async def ignore_cancellation(self, coroutine: Coroutine[Any, Any, Any]) -> Any:
raise NotImplementedError
- def timeout(self, delay: Any) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def timeout(self, delay: Any) -> AsyncContextManager[TimeoutHandle]:
raise NotImplementedError
- def timeout_at(self, deadline: Any) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def timeout_at(self, deadline: Any) -> AsyncContextManager[TimeoutHandle]:
raise NotImplementedError
- def move_on_after(self, delay: Any) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def move_on_after(self, delay: Any) -> AsyncContextManager[TimeoutHandle]:
raise NotImplementedError
- def move_on_at(self, deadline: Any) -> AsyncContextManager[AbstractTimeoutHandle]:
+ def move_on_at(self, deadline: Any) -> AsyncContextManager[TimeoutHandle]:
raise NotImplementedError
def get_cancelled_exc_class(self) -> type[BaseException]:
raise NotImplementedError
- def spawn_task(self, *args: Any, **kwargs: Any) -> AbstractSystemTask[Any]:
+ def spawn_task(self, *args: Any, **kwargs: Any) -> SystemTask[Any]:
raise NotImplementedError
- def create_task_group(self) -> AbstractTaskGroup:
+ def create_task_group(self) -> TaskGroup:
raise NotImplementedError
- async def create_tcp_connection(self, *args: Any, **kwargs: Any) -> AbstractAsyncStreamSocketAdapter:
+ async def create_tcp_connection(self, *args: Any, **kwargs: Any) -> AsyncStreamSocketAdapter:
raise NotImplementedError
- async def wrap_tcp_client_socket(self, socket: Socket) -> AbstractAsyncStreamSocketAdapter:
+ async def wrap_tcp_client_socket(self, socket: Socket) -> AsyncStreamSocketAdapter:
raise NotImplementedError
- async def create_tcp_listeners(self, *args: Any, **kwargs: Any) -> Sequence[AbstractAsyncListenerSocketAdapter]:
+ async def create_tcp_listeners(self, *args: Any, **kwargs: Any) -> Sequence[AsyncListenerSocketAdapter]:
raise NotImplementedError
- async def create_udp_endpoint(self, *args: Any, **kwargs: Any) -> AbstractAsyncDatagramSocketAdapter:
+ async def create_udp_endpoint(self, *args: Any, **kwargs: Any) -> AsyncDatagramSocketAdapter:
raise NotImplementedError
- async def wrap_udp_socket(self, socket: Socket) -> AbstractAsyncDatagramSocketAdapter:
+ async def wrap_udp_socket(self, socket: Socket) -> AsyncDatagramSocketAdapter:
raise NotImplementedError
def create_lock(self) -> ILock:
@@ -87,10 +87,10 @@ def create_event(self) -> IEvent:
def create_condition_var(self, lock: ILock | None = ...) -> ICondition:
raise NotImplementedError
- async def run_in_thread(self, __func: Callable[..., Any], /, *args: Any, **kwargs: Any) -> Any:
+ async def run_in_thread(self, func: Callable[..., Any], /, *args: Any, **kwargs: Any) -> Any:
raise NotImplementedError
- def create_threads_portal(self) -> AbstractThreadsPortal:
+ def create_threads_portal(self) -> ThreadsPortal:
raise NotImplementedError
async def wait_future(self, *args: Any, **kwargs: Any) -> Any:
diff --git a/tests/unit_test/test_async/test_api/test_backend/test_backend.py b/tests/unit_test/test_async/test_api/test_backend/test_backend.py
index dae85a46..dd2fc3df 100644
--- a/tests/unit_test/test_async/test_api/test_backend/test_backend.py
+++ b/tests/unit_test/test_async/test_api/test_backend/test_backend.py
@@ -6,7 +6,7 @@
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, Literal, assert_never, final
-from easynetwork.api_async.backend.abc import AbstractAsyncBackend
+from easynetwork.api_async.backend.abc import AsyncBackend
from easynetwork.api_async.backend.factory import AsyncBackendFactory
import pytest
@@ -48,7 +48,7 @@ async def ignore_cancellation(self, coroutine: Coroutine[Any, Any, Any]) -> Any:
@pytest.mark.asyncio
-class TestAbstractAsyncBackend:
+class TestAsyncBackend:
@pytest.fixture
@staticmethod
def backend(mocker: MockerFixture) -> MockBackend:
@@ -85,7 +85,7 @@ async def test____sleep_until____deadline_lower_than_current_time(
class TestAsyncBackendFactory:
- BACKENDS: MappingProxyType[str, type[AbstractAsyncBackend]] = MappingProxyType(
+ BACKENDS: MappingProxyType[str, type[AsyncBackend]] = MappingProxyType(
{
"asyncio": FakeAsyncioBackend,
"trio": FakeTrioBackend,
@@ -93,7 +93,7 @@ class TestAsyncBackendFactory:
}
)
- BACKEND_CLS_TO_NAME: MappingProxyType[type[AbstractAsyncBackend], str] = MappingProxyType({v: k for k, v in BACKENDS.items()})
+ BACKEND_CLS_TO_NAME: MappingProxyType[type[AsyncBackend], str] = MappingProxyType({v: k for k, v in BACKENDS.items()})
@pytest.fixture(scope="class", autouse=True)
@staticmethod
@@ -178,7 +178,7 @@ def test____get_all_backends____entry_point_is_abstract(
) -> None:
# Arrange
mock_importlib_metadata_entry_points.return_value = [
- self.build_entry_point("asyncio", "easynetwork.api_async.backend.abc:AbstractAsyncBackend"),
+ self.build_entry_point("asyncio", "easynetwork.api_async.backend.abc:AsyncBackend"),
]
# Act & Assert
@@ -260,7 +260,7 @@ def test____set_default_backend____from_string____unknown_backend(self) -> None:
@pytest.mark.parametrize("backend_cls", [*BACKENDS.values(), MockBackend])
@pytest.mark.parametrize("extended", [False, True], ids=lambda extended: f"extended=={extended}")
- def test____set_default_backend____from_class(self, backend_cls: type[AbstractAsyncBackend], extended: bool) -> None:
+ def test____set_default_backend____from_class(self, backend_cls: type[AsyncBackend], extended: bool) -> None:
# Arrange
if extended:
try:
@@ -279,8 +279,8 @@ class ExtendedBackend(backend_cls): # type: ignore[valid-type,misc]
# Assert
assert AsyncBackendFactory.get_default_backend(guess_current_async_library=False) is backend_cls
- @pytest.mark.parametrize("invalid_cls", [int, Socket, TestAbstractAsyncBackend])
- def test____set_default_backend____from_class____error_do_not_derive_from_AbstractAsyncBackend(
+ @pytest.mark.parametrize("invalid_cls", [int, Socket, TestAsyncBackend])
+ def test____set_default_backend____from_class____error_do_not_derive_from_AsyncBackend(
self,
invalid_cls: type[Any],
) -> None:
@@ -294,8 +294,8 @@ def test____set_default_backend____from_class____error_abstract_class_given(self
# Arrange
# Act & Assert
- with pytest.raises(TypeError, match=rf"^Invalid backend class: {AbstractAsyncBackend!r}$"):
- AsyncBackendFactory.set_default_backend(AbstractAsyncBackend)
+ with pytest.raises(TypeError, match=rf"^Invalid backend class: {AsyncBackend!r}$"):
+ AsyncBackendFactory.set_default_backend(AsyncBackend)
@pytest.mark.parametrize("backend_name", list(BACKENDS))
def test____extend____replace_by_a_subclass(self, backend_name: str) -> None:
diff --git a/tests/unit_test/test_async/test_api/test_backend/test_futures.py b/tests/unit_test/test_async/test_api/test_backend/test_futures.py
index 79639e95..1b79567f 100644
--- a/tests/unit_test/test_async/test_api/test_backend/test_futures.py
+++ b/tests/unit_test/test_async/test_api/test_backend/test_futures.py
@@ -4,11 +4,13 @@
import contextvars
from typing import TYPE_CHECKING
-from easynetwork.api_async.backend.futures import AsyncExecutor, AsyncThreadPoolExecutor
+from easynetwork.api_async.backend.futures import AsyncExecutor
from easynetwork.api_async.backend.sniffio import current_async_library_cvar
import pytest
+from ...._utils import partial_eq
+
if TYPE_CHECKING:
from unittest.mock import MagicMock
@@ -24,16 +26,53 @@ def mock_stdlib_executor(mocker: MockerFixture) -> MagicMock:
executor.shutdown.return_value = None
return executor
+ @pytest.fixture(params=[False, True], ids=lambda p: f"handle_context=={p}")
+ @staticmethod
+ def executor_handle_contexts(request: pytest.FixtureRequest) -> bool:
+ return getattr(request, "param")
+
+ @pytest.fixture
+ @staticmethod
+ def executor(mock_backend: MagicMock, mock_stdlib_executor: MagicMock, executor_handle_contexts: bool) -> AsyncExecutor:
+ return AsyncExecutor(mock_stdlib_executor, mock_backend, handle_contexts=executor_handle_contexts)
+
@pytest.fixture
@staticmethod
- def executor(mock_backend: MagicMock, mock_stdlib_executor: MagicMock) -> AsyncExecutor:
- return AsyncExecutor(mock_backend, mock_stdlib_executor)
+ def mock_context(mocker: MockerFixture) -> MagicMock:
+ return mocker.NonCallableMagicMock(spec=contextvars.Context)
+
+ @pytest.fixture(autouse=True)
+ @staticmethod
+ def mock_contextvars_copy_context(
+ mock_context: MagicMock,
+ mocker: MockerFixture,
+ ) -> MagicMock:
+ return mocker.patch(
+ "contextvars.copy_context",
+ autospec=True,
+ return_value=mock_context,
+ )
+
+ async def test___dunder_init___invalid_executor(
+ self,
+ mock_backend: MagicMock,
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ invalid_executor = mocker.NonCallableMagicMock(spec=object)
+
+ # Act & Assert
+ with pytest.raises(TypeError):
+ _ = AsyncExecutor(invalid_executor, mock_backend)
async def test____run____submit_to_executor_and_wait(
self,
executor: AsyncExecutor,
+ executor_handle_contexts: bool,
mock_backend: MagicMock,
mock_stdlib_executor: MagicMock,
+ mock_context: MagicMock,
+ mock_contextvars_copy_context: MagicMock,
mocker: MockerFixture,
) -> None:
# Arrange
@@ -51,13 +90,29 @@ async def test____run____submit_to_executor_and_wait(
)
# Assert
- mock_stdlib_executor.submit.assert_called_once_with(
- func,
- mocker.sentinel.arg1,
- mocker.sentinel.arg2,
- kw1=mocker.sentinel.kw1,
- kw2=mocker.sentinel.kw2,
- )
+ if executor_handle_contexts:
+ mock_contextvars_copy_context.assert_called_once_with()
+ if current_async_library_cvar is not None:
+ mock_context.run.assert_called_once_with(current_async_library_cvar.set, None)
+ else:
+ mock_context.run.assert_not_called()
+ mock_stdlib_executor.submit.assert_called_once_with(
+ partial_eq(mock_context.run, func),
+ mocker.sentinel.arg1,
+ mocker.sentinel.arg2,
+ kw1=mocker.sentinel.kw1,
+ kw2=mocker.sentinel.kw2,
+ )
+ else:
+ mock_contextvars_copy_context.assert_not_called()
+ mock_context.run.assert_not_called()
+ mock_stdlib_executor.submit.assert_called_once_with(
+ func,
+ mocker.sentinel.arg1,
+ mocker.sentinel.arg2,
+ kw1=mocker.sentinel.kw1,
+ kw2=mocker.sentinel.kw2,
+ )
func.assert_not_called()
mock_backend.wait_future.assert_awaited_once_with(mocker.sentinel.future)
assert result is mocker.sentinel.result
@@ -147,93 +202,3 @@ async def test____context_manager____shutdown_executor_at_end(
# Assert
mock_stdlib_executor.shutdown.assert_not_called()
mock_backend.run_in_thread.assert_awaited_once_with(mock_stdlib_executor.shutdown, wait=True, cancel_futures=False)
-
-
-@pytest.mark.asyncio
-class TestAsyncThreadPoolExecutor:
- @pytest.fixture
- @staticmethod
- def mock_stdlib_executor(mocker: MockerFixture) -> MagicMock:
- executor = mocker.NonCallableMagicMock(spec=concurrent.futures.ThreadPoolExecutor)
- executor.shutdown.return_value = None
- return executor
-
- @pytest.fixture(autouse=True)
- @staticmethod
- def mock_stdlib_executor_cls(mocker: MockerFixture, mock_stdlib_executor: MagicMock) -> MagicMock:
- return mocker.patch("concurrent.futures.ThreadPoolExecutor", return_value=mock_stdlib_executor)
-
- @pytest.fixture
- @staticmethod
- def executor(mock_backend: MagicMock) -> AsyncThreadPoolExecutor:
- return AsyncThreadPoolExecutor(mock_backend)
-
- async def test____dunder_init____pass_kwargs_to_executor_cls(
- self,
- mock_backend: MagicMock,
- mock_stdlib_executor_cls: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
-
- # Act
- _ = AsyncThreadPoolExecutor(
- mock_backend,
- max_workers=mocker.sentinel.max_workers,
- thread_name_prefix=mocker.sentinel.thread_name_prefix,
- initializer=mocker.sentinel.initializer,
- initargs=mocker.sentinel.initargs,
- )
-
- # Assert
- mock_stdlib_executor_cls.assert_called_once_with(
- max_workers=mocker.sentinel.max_workers,
- thread_name_prefix=mocker.sentinel.thread_name_prefix,
- initializer=mocker.sentinel.initializer,
- initargs=mocker.sentinel.initargs,
- )
-
- async def test____run____submit_to_executor_and_wait(
- self,
- executor: AsyncThreadPoolExecutor,
- mock_backend: MagicMock,
- mock_stdlib_executor: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
- mock_context: MagicMock = mocker.NonCallableMagicMock(spec=contextvars.Context)
- mock_contextvars_copy_context: MagicMock = mocker.patch(
- "contextvars.copy_context",
- autospec=True,
- return_value=mock_context,
- )
- func = mocker.stub()
- mock_stdlib_executor.submit.return_value = mocker.sentinel.future
- mock_backend.wait_future.return_value = mocker.sentinel.result
-
- # Act
- result = await executor.run(
- func,
- mocker.sentinel.arg1,
- mocker.sentinel.arg2,
- kw1=mocker.sentinel.kw1,
- kw2=mocker.sentinel.kw2,
- )
-
- # Assert
- mock_contextvars_copy_context.assert_called_once_with()
- if current_async_library_cvar is not None:
- mock_context.run.assert_called_once_with(current_async_library_cvar.set, None)
- else:
- mock_context.run.assert_not_called()
- mock_stdlib_executor.submit.assert_called_once_with(
- mock_context.run,
- func,
- mocker.sentinel.arg1,
- mocker.sentinel.arg2,
- kw1=mocker.sentinel.kw1,
- kw2=mocker.sentinel.kw2,
- )
- func.assert_not_called()
- mock_backend.wait_future.assert_awaited_once_with(mocker.sentinel.future)
- assert result is mocker.sentinel.result
diff --git a/tests/unit_test/test_async/test_api/test_client/test_abc.py b/tests/unit_test/test_async/test_api/test_client/test_abc.py
index 4c04eb1d..01b7ecc7 100644
--- a/tests/unit_test/test_async/test_api/test_client/test_abc.py
+++ b/tests/unit_test/test_async/test_api/test_client/test_abc.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+import math
from typing import TYPE_CHECKING, Any, final
from easynetwork.api_async.client.abc import AbstractAsyncNetworkClient
@@ -8,15 +9,18 @@
import pytest
if TYPE_CHECKING:
- from easynetwork.api_async.backend.abc import AbstractAsyncBackend
+ from unittest.mock import MagicMock
+
+ from easynetwork.api_async.backend.abc import AsyncBackend
from pytest_mock import MockerFixture
@final
class MockAsyncClient(AbstractAsyncNetworkClient[Any, Any]):
- def __init__(self, mocker: MockerFixture) -> None:
+ def __init__(self, mock_backend: MagicMock, mocker: MockerFixture) -> None:
super().__init__()
+ self.mock_backend = mock_backend
self.mock_wait_connected = mocker.AsyncMock(return_value=None)
self.mock_close = mocker.AsyncMock(return_value=None)
self.mock_recv_packet = mocker.AsyncMock()
@@ -45,19 +49,16 @@ async def send_packet(self, packet: Any) -> None:
async def recv_packet(self) -> Any:
return await self.mock_recv_packet()
- def fileno(self) -> int:
- raise NotImplementedError
-
- def get_backend(self) -> AbstractAsyncBackend:
- raise NotImplementedError
+ def get_backend(self) -> AsyncBackend:
+ return self.mock_backend
@pytest.mark.asyncio
class TestAbstractAsyncNetworkClient:
@pytest.fixture
@staticmethod
- def client(mocker: MockerFixture) -> MockAsyncClient:
- return MockAsyncClient(mocker)
+ def client(mock_backend: MagicMock, mocker: MockerFixture) -> MockAsyncClient:
+ return MockAsyncClient(mock_backend, mocker)
async def test____context____close_client_at_end(self, client: MockAsyncClient) -> None:
# Arrange
@@ -71,9 +72,11 @@ async def test____context____close_client_at_end(self, client: MockAsyncClient)
client.mock_wait_connected.assert_awaited_once_with()
client.mock_close.assert_awaited_once_with()
+ @pytest.mark.parametrize("timeout", [0, 123456.789, None])
@pytest.mark.parametrize("error", [OSError])
async def test____iter_received_packets____stop_if_an_error_occurs(
self,
+ timeout: float | None,
client: MockAsyncClient,
error: type[BaseException],
mocker: MockerFixture,
@@ -82,8 +85,78 @@ async def test____iter_received_packets____stop_if_an_error_occurs(
client.mock_recv_packet.side_effect = [mocker.sentinel.packet_a, error]
# Act
- packets = [p async for p in client.iter_received_packets()]
+ packets = [p async for p in client.iter_received_packets(timeout=timeout)]
# Assert
assert client.mock_recv_packet.mock_calls == [mocker.call() for _ in range(2)]
assert packets == [mocker.sentinel.packet_a]
+
+ async def test____iter_received_packets____timeout_decrement(
+ self,
+ client: MockAsyncClient,
+ mock_backend: MagicMock,
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ client.mock_recv_packet.return_value = mocker.sentinel.packet
+ async_iterator = client.iter_received_packets(timeout=10)
+ now = 798546132
+ mocker.patch(
+ "time.perf_counter",
+ side_effect=[
+ now,
+ now + 6,
+ now + 7,
+ now + 12,
+ now + 12,
+ now + 12,
+ ],
+ )
+
+ # Act
+ await anext(async_iterator)
+ await anext(async_iterator)
+ await anext(async_iterator)
+
+ # Assert
+ assert client.mock_recv_packet.call_args_list == [mocker.call() for _ in range(3)]
+ assert mock_backend.timeout.call_args_list == [
+ mocker.call(10),
+ mocker.call(4),
+ mocker.call(0),
+ ]
+
+ async def test____iter_received_packets____infinite_timeout(
+ self,
+ client: MockAsyncClient,
+ mock_backend: MagicMock,
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ client.mock_recv_packet.return_value = mocker.sentinel.packet
+ async_iterator = client.iter_received_packets(timeout=None)
+ now = 798546132
+ mocker.patch(
+ "time.perf_counter",
+ side_effect=[
+ now,
+ now + 6,
+ now + 7,
+ now + 12,
+ now + 12,
+ now + 12,
+ ],
+ )
+
+ # Act
+ await anext(async_iterator)
+ await anext(async_iterator)
+ await anext(async_iterator)
+
+ # Assert
+ assert client.mock_recv_packet.call_args_list == [mocker.call() for _ in range(3)]
+ assert mock_backend.timeout.call_args_list == [
+ mocker.call(math.inf),
+ mocker.call(math.inf),
+ mocker.call(math.inf),
+ ]
diff --git a/tests/unit_test/test_async/test_api/test_client/test_tcp.py b/tests/unit_test/test_async/test_api/test_client/test_tcp.py
index fdc43187..a1ed94c7 100644
--- a/tests/unit_test/test_async/test_api/test_client/test_tcp.py
+++ b/tests/unit_test/test_async/test_api/test_client/test_tcp.py
@@ -962,52 +962,6 @@ async def test____get_remote_address____error_connection_not_performed(
# Assert
mock_stream_socket_adapter.get_remote_address.assert_not_called()
- async def test____fileno____default(
- self,
- client_connected: AsyncTCPNetworkClient[Any, Any],
- mock_tcp_socket: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
- mock_tcp_socket.fileno.return_value = mocker.sentinel.fileno
-
- # Act
- fd = client_connected.fileno()
-
- # Assert
- mock_tcp_socket.fileno.assert_called_once_with()
- assert fd is mocker.sentinel.fileno
-
- async def test____fileno____connection_not_performed(
- self,
- client_not_connected: AsyncTCPNetworkClient[Any, Any],
- mock_tcp_socket: MagicMock,
- ) -> None:
- # Arrange
-
- # Act
- fd = client_not_connected.fileno()
-
- # Assert
- mock_tcp_socket.fileno.assert_not_called()
- assert fd == -1
-
- async def test____fileno____closed_client(
- self,
- client_connected_or_not: AsyncTCPNetworkClient[Any, Any],
- mock_tcp_socket: MagicMock,
- ) -> None:
- # Arrange
- await client_connected_or_not.aclose()
- assert client_connected_or_not.is_closing()
-
- # Act
- fd = client_connected_or_not.fileno()
-
- # Assert
- mock_tcp_socket.fileno.assert_not_called()
- assert fd == -1
-
@pytest.mark.usefixtures("setup_producer_mock")
async def test____send_packet____send_bytes_to_socket(
self,
@@ -1284,7 +1238,7 @@ async def test____recv_packet____partial_data(
packet: Any = await client_connected_or_not.recv_packet()
# Assert
- mock_backend.coro_yield.assert_awaited_once()
+ mock_backend.coro_yield.assert_not_awaited()
assert mock_stream_socket_adapter.recv.mock_calls == [mocker.call(MAX_STREAM_BUFSIZE) for _ in range(2)]
assert mock_stream_data_consumer.feed.mock_calls == [mocker.call(b"pac"), mocker.call(b"ket\n")]
assert packet is mocker.sentinel.packet
diff --git a/tests/unit_test/test_async/test_api/test_client/test_udp.py b/tests/unit_test/test_async/test_api/test_client/test_udp.py
index 22e88802..3815dd7e 100644
--- a/tests/unit_test/test_async/test_api/test_client/test_udp.py
+++ b/tests/unit_test/test_async/test_api/test_client/test_udp.py
@@ -1,7 +1,8 @@
from __future__ import annotations
+import math
from socket import AF_INET6
-from typing import TYPE_CHECKING, Any, cast
+from typing import TYPE_CHECKING, Any, Literal, cast
from easynetwork.api_async.client.udp import AsyncUDPNetworkClient, AsyncUDPNetworkEndpoint
from easynetwork.exceptions import ClientClosedError, DeserializeError
@@ -542,52 +543,6 @@ async def test____get_remote_address____error_connection_not_performed(
# Assert
mock_datagram_socket_adapter.get_remote_address.assert_not_called()
- async def test____fileno____default(
- self,
- client_bound: AsyncUDPNetworkClient[Any, Any],
- mock_udp_socket: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
- mock_udp_socket.fileno.return_value = mocker.sentinel.fileno
-
- # Act
- fd = client_bound.fileno()
-
- # Assert
- mock_udp_socket.fileno.assert_called_once_with()
- assert fd is mocker.sentinel.fileno
-
- async def test____fileno____connection_not_performed(
- self,
- client_not_bound: AsyncUDPNetworkEndpoint[Any, Any],
- mock_udp_socket: MagicMock,
- ) -> None:
- # Arrange
-
- # Act
- fd = client_not_bound.fileno()
-
- # Assert
- mock_udp_socket.fileno.assert_not_called()
- assert fd == -1
-
- async def test____fileno____closed_client(
- self,
- client_bound: AsyncUDPNetworkClient[Any, Any],
- mock_udp_socket: MagicMock,
- ) -> None:
- # Arrange
- await client_bound.aclose()
- assert client_bound.is_closing()
-
- # Act
- fd = client_bound.fileno()
-
- # Assert
- mock_udp_socket.fileno.assert_not_called()
- assert fd == -1
-
@pytest.mark.parametrize("remote_address", [False], indirect=True)
@pytest.mark.usefixtures("setup_protocol_mock")
async def test____send_packet_to____send_bytes_to_socket____without_remote____default(
@@ -908,6 +863,80 @@ async def test____iter_received_packets_from____closed_client_during_iteration(
with pytest.raises(StopAsyncIteration):
_ = await anext(iterator)
+ @pytest.mark.usefixtures("setup_protocol_mock")
+ async def test____iter_received_packets____timeout_decrement(
+ self,
+ client_bound_or_not: AsyncUDPNetworkEndpoint[Any, Any],
+ sender_address: tuple[str, int],
+ mock_backend: MagicMock,
+ mock_datagram_socket_adapter: MagicMock,
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ mock_datagram_socket_adapter.recvfrom.return_value = (b"packet_1", sender_address)
+ async_iterator = client_bound_or_not.iter_received_packets_from(timeout=10)
+ now = 798546132
+ mocker.patch(
+ "time.perf_counter",
+ side_effect=[
+ now,
+ now + 6,
+ now + 7,
+ now + 12,
+ now + 12,
+ now + 12,
+ ],
+ )
+
+ # Act
+ await anext(async_iterator)
+ await anext(async_iterator)
+ await anext(async_iterator)
+
+ # Assert
+ assert mock_backend.timeout.call_args_list == [
+ mocker.call(10),
+ mocker.call(4),
+ mocker.call(0),
+ ]
+
+ @pytest.mark.usefixtures("setup_protocol_mock")
+ async def test____iter_received_packets____infinite_timeout(
+ self,
+ client_bound_or_not: AsyncUDPNetworkEndpoint[Any, Any],
+ sender_address: tuple[str, int],
+ mock_backend: MagicMock,
+ mock_datagram_socket_adapter: MagicMock,
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ mock_datagram_socket_adapter.recvfrom.return_value = (b"packet_1", sender_address)
+ async_iterator = client_bound_or_not.iter_received_packets_from(timeout=None)
+ now = 798546132
+ mocker.patch(
+ "time.perf_counter",
+ side_effect=[
+ now,
+ now + 6,
+ now + 7,
+ now + 12,
+ now + 12,
+ now + 12,
+ ],
+ )
+
+ # Act
+ await anext(async_iterator)
+ await anext(async_iterator)
+ await anext(async_iterator)
+
+ # Assert
+ assert mock_backend.timeout.call_args_list == [
+ mocker.call(math.inf),
+ mocker.call(math.inf),
+ mocker.call(math.inf),
+ ]
+
async def test____get_backend____default(
self,
client_bound_or_not: AsyncUDPNetworkEndpoint[Any, Any],
@@ -1167,14 +1196,16 @@ async def test____recv_packet____default(
mock_udp_endpoint.recv_packet_from.assert_awaited_once_with()
assert packet is mocker.sentinel.packet
+ @pytest.mark.parametrize("timeout", ["default", 0, 123456.789, None])
async def test____iter_received_packets____default(
self,
+ timeout: float | None | Literal["default"],
client: AsyncUDPNetworkClient[Any, Any],
mock_udp_endpoint: MagicMock,
mocker: MockerFixture,
) -> None:
# Arrange
- async def side_effect() -> Any:
+ async def side_effect(*, timeout: float | None = 0) -> Any:
yield (mocker.sentinel.packet_1, ("remote_address", 5000))
yield (mocker.sentinel.packet_2, ("remote_address", 5000))
yield (mocker.sentinel.packet_3, ("remote_address", 5000))
@@ -1182,28 +1213,18 @@ async def side_effect() -> Any:
mock_udp_endpoint.iter_received_packets_from.side_effect = side_effect
# Act
- packets = [p async for p in client.iter_received_packets()]
+ if timeout == "default":
+ packets = [p async for p in client.iter_received_packets()]
+ else:
+ packets = [p async for p in client.iter_received_packets(timeout=timeout)]
# Assert
- mock_udp_endpoint.iter_received_packets_from.assert_called_once_with()
+ if timeout == "default":
+ mock_udp_endpoint.iter_received_packets_from.assert_called_once_with(timeout=0)
+ else:
+ mock_udp_endpoint.iter_received_packets_from.assert_called_once_with(timeout=timeout)
assert packets == [mocker.sentinel.packet_1, mocker.sentinel.packet_2, mocker.sentinel.packet_3]
- async def test____fileno____default(
- self,
- client: AsyncUDPNetworkClient[Any, Any],
- mock_udp_endpoint: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
- mock_udp_endpoint.fileno.return_value = mocker.sentinel.fd
-
- # Act
- fd = client.fileno()
-
- # Assert
- mock_udp_endpoint.fileno.assert_called_once_with()
- assert fd is mocker.sentinel.fd
-
async def test____get_backend____default(
self,
client: AsyncUDPNetworkClient[Any, Any],
diff --git a/tests/unit_test/test_async/test_api/test_server/test_handler.py b/tests/unit_test/test_async/test_api/test_server/test_handler.py
index 7bc68ac2..d804ca33 100644
--- a/tests/unit_test/test_async/test_api/test_server/test_handler.py
+++ b/tests/unit_test/test_async/test_api/test_server/test_handler.py
@@ -2,17 +2,14 @@
from __future__ import annotations
+import contextlib
import inspect
from collections.abc import AsyncGenerator
from typing import TYPE_CHECKING, Any
-from easynetwork.api_async.server.handler import (
- AsyncBaseClientInterface,
- AsyncBaseRequestHandler,
- AsyncDatagramRequestHandler,
- AsyncStreamRequestHandler,
-)
-from easynetwork.exceptions import BaseProtocolParseError
+from easynetwork.api_async.server.handler import AsyncBaseClientInterface, AsyncDatagramRequestHandler, AsyncStreamRequestHandler
+from easynetwork.api_async.server.tcp import AsyncTCPNetworkServer
+from easynetwork.api_async.server.udp import AsyncUDPNetworkServer
import pytest
@@ -22,15 +19,12 @@
from pytest_mock import MockerFixture
-class BaseFakeHandler(AsyncBaseRequestHandler):
+class BaseFakeHandler:
__slots__ = ()
def handle(self, client: AsyncBaseClientInterface[Any]) -> AsyncGenerator[None, Any]:
raise NotImplementedError
- async def bad_request(self, client: AsyncBaseClientInterface[Any], exc: BaseProtocolParseError, /) -> None:
- pass
-
class FakeStreamHandler(BaseFakeHandler, AsyncStreamRequestHandler[Any, Any]):
__slots__ = ()
@@ -41,67 +35,49 @@ class FakeDatagramHandler(BaseFakeHandler, AsyncDatagramRequestHandler[Any, Any]
@pytest.mark.asyncio
-class BaseCommonTestsForRequestHandler:
- async def test____set_async_backend____return_None(
- self,
- mock_backend: MagicMock,
- request_handler: AsyncBaseRequestHandler,
- ) -> None:
- # Arrange
+class TestAsyncDatagramRequestHandler:
+ @pytest.fixture
+ @staticmethod
+ def request_handler() -> AsyncDatagramRequestHandler[Any, Any]:
+ return FakeDatagramHandler()
- # Act & Assert
- assert request_handler.set_async_backend(mock_backend) is None
+ @pytest.fixture
+ @staticmethod
+ def mock_server(mocker: MockerFixture) -> MagicMock:
+ return mocker.NonCallableMagicMock(spec=AsyncUDPNetworkServer)
async def test____service_init____return_None(
self,
- request_handler: AsyncBaseRequestHandler,
- ) -> None:
- # Arrange
-
- # Act & Assert
- assert (await request_handler.service_init()) is None
-
- async def test____service_quit____return_None(
- self,
- request_handler: AsyncBaseRequestHandler,
- ) -> None:
- # Arrange
-
- # Act & Assert
- assert (await request_handler.service_quit()) is None
-
- async def test____service_actions____return_None(
- self,
- request_handler: AsyncBaseRequestHandler,
+ request_handler: AsyncDatagramRequestHandler[Any, Any],
+ mock_server: MagicMock,
) -> None:
# Arrange
# Act & Assert
- assert (await request_handler.service_actions()) is None
+ assert (await request_handler.service_init(contextlib.AsyncExitStack(), mock_server)) is None
-class TestAsyncDatagramRequestHandler(BaseCommonTestsForRequestHandler):
+@pytest.mark.asyncio
+class TestAsyncStreamRequestHandler:
@pytest.fixture
@staticmethod
- def request_handler() -> AsyncDatagramRequestHandler[Any, Any]:
- return FakeDatagramHandler()
-
+ def request_handler() -> AsyncStreamRequestHandler[Any, Any]:
+ return FakeStreamHandler()
-class TestAsyncStreamRequestHandler(BaseCommonTestsForRequestHandler):
@pytest.fixture
@staticmethod
- def request_handler() -> AsyncStreamRequestHandler[Any, Any]:
- return FakeStreamHandler()
+ def mock_server(mocker: MockerFixture) -> MagicMock:
+ return mocker.NonCallableMagicMock(spec=AsyncTCPNetworkServer)
- async def test____set_stop_listening_callback____return_None(
+ async def test____service_init____return_None(
self,
request_handler: AsyncStreamRequestHandler[Any, Any],
- mocker: MockerFixture,
+ mock_server: MagicMock,
) -> None:
# Arrange
# Act & Assert
- assert request_handler.set_stop_listening_callback(mocker.stub()) is None
+ assert (await request_handler.service_init(contextlib.AsyncExitStack(), mock_server)) is None
async def test____on_connection____return_None(
self,
diff --git a/tests/unit_test/test_async/test_asyncio_backend/test_backend.py b/tests/unit_test/test_async/test_asyncio_backend/test_backend.py
index b3428fcf..18d2e19d 100644
--- a/tests/unit_test/test_async/test_asyncio_backend/test_backend.py
+++ b/tests/unit_test/test_async/test_asyncio_backend/test_backend.py
@@ -7,7 +7,7 @@
from socket import AF_INET
from typing import TYPE_CHECKING, Any, cast
-from easynetwork.api_async.backend.abc import AbstractAsyncStreamSocketAdapter
+from easynetwork.api_async.backend.abc import AsyncStreamSocketAdapter
from easynetwork_asyncio import AsyncioBackend
import pytest
@@ -156,7 +156,7 @@ async def test____create_tcp_connection____use_asyncio_open_connection(
}
# Act
- socket: AbstractAsyncStreamSocketAdapter
+ socket: AsyncStreamSocketAdapter
if ssl:
socket = await backend.create_ssl_over_tcp_connection(
*remote_address,
diff --git a/tests/unit_test/test_async/test_asyncio_backend/test_stream.py b/tests/unit_test/test_async/test_asyncio_backend/test_stream.py
index df7543e5..df0667f3 100644
--- a/tests/unit_test/test_async/test_asyncio_backend/test_stream.py
+++ b/tests/unit_test/test_async/test_asyncio_backend/test_stream.py
@@ -10,7 +10,7 @@
from socket import SHUT_WR
from typing import TYPE_CHECKING, Any
-from easynetwork.api_async.backend.abc import AbstractAcceptedSocket
+from easynetwork.api_async.backend.abc import AcceptedSocket as AbstractAcceptedSocket
from easynetwork_asyncio.stream.listener import AcceptedSocket, AcceptedSSLSocket, ListenerSocketAdapter
from easynetwork_asyncio.stream.socket import (
AsyncioTransportHalfCloseableStreamSocketAdapter,
diff --git a/tests/unit_test/test_converter.py b/tests/unit_test/test_converter.py
index 936d883f..13d33542 100644
--- a/tests/unit_test/test_converter.py
+++ b/tests/unit_test/test_converter.py
@@ -3,7 +3,7 @@
from collections.abc import Callable
from typing import TYPE_CHECKING, Any
-from easynetwork.converter import AbstractPacketConverterComposite, PacketConverterComposite, RequestResponseConverterBuilder
+from easynetwork.converter import StapledPacketConverter
import pytest
@@ -13,113 +13,61 @@
from pytest_mock import MockerFixture
-class TestPacketConverterComposite:
+class TestStapledPacketConverter:
@pytest.fixture
@staticmethod
- def create_from_dto_stub(mocker: MockerFixture) -> MagicMock:
- return mocker.MagicMock(spec=lambda packet: None, name="create_from_dto_stub")
+ def mock_sent_packet_converter(mock_converter_factory: Callable[[], MagicMock]) -> MagicMock:
+ return mock_converter_factory()
@pytest.fixture
@staticmethod
- def convert_to_dto_stub(mocker: MockerFixture) -> MagicMock:
- return mocker.MagicMock(spec=lambda obj: None, name="convert_to_dto_stub")
+ def mock_received_packet_converter(mock_converter_factory: Callable[[], MagicMock]) -> MagicMock:
+ return mock_converter_factory()
@pytest.fixture
@staticmethod
def converter(
- create_from_dto_stub: MagicMock,
- convert_to_dto_stub: MagicMock,
- ) -> PacketConverterComposite[Any, Any, Any, Any]:
- return PacketConverterComposite(convert_to_dto_stub, create_from_dto_stub)
+ mock_sent_packet_converter: MagicMock,
+ mock_received_packet_converter: MagicMock,
+ ) -> StapledPacketConverter[Any, Any, Any]:
+ return StapledPacketConverter(mock_sent_packet_converter, mock_received_packet_converter)
- def test____create_from_dto_packet____callback(
+ def test____create_from_dto_packet____default(
self,
- converter: PacketConverterComposite[Any, Any, Any, Any],
- create_from_dto_stub: MagicMock,
+ converter: StapledPacketConverter[Any, Any, Any],
+ mock_sent_packet_converter: MagicMock,
+ mock_received_packet_converter: MagicMock,
mocker: MockerFixture,
) -> None:
# Arrange
- create_from_dto_stub.return_value = mocker.sentinel.packet
+ mock_received_packet_converter.create_from_dto_packet.return_value = mocker.sentinel.packet
# Act
packet = converter.create_from_dto_packet(mocker.sentinel.dto_packet)
# Assert
- create_from_dto_stub.assert_called_once_with(mocker.sentinel.dto_packet)
+ mock_sent_packet_converter.convert_to_dto_packet.assert_not_called()
+ mock_sent_packet_converter.create_from_dto_packet.assert_not_called()
+ mock_received_packet_converter.convert_to_dto_packet.assert_not_called()
+ mock_received_packet_converter.create_from_dto_packet.assert_called_once_with(mocker.sentinel.dto_packet)
assert packet is mocker.sentinel.packet
def test____convert_to_dto_packet____callback(
self,
- converter: PacketConverterComposite[Any, Any, Any, Any],
- convert_to_dto_stub: MagicMock,
+ converter: StapledPacketConverter[Any, Any, Any],
+ mock_sent_packet_converter: MagicMock,
+ mock_received_packet_converter: MagicMock,
mocker: MockerFixture,
) -> None:
# Arrange
- convert_to_dto_stub.return_value = mocker.sentinel.dto_packet
+ mock_sent_packet_converter.convert_to_dto_packet.return_value = mocker.sentinel.dto_packet
# Act
dto_packet = converter.convert_to_dto_packet(mocker.sentinel.packet)
# Assert
- convert_to_dto_stub.assert_called_once_with(mocker.sentinel.packet)
+ mock_sent_packet_converter.convert_to_dto_packet.assert_called_once_with(mocker.sentinel.packet)
+ mock_sent_packet_converter.create_from_dto_packet.assert_not_called()
+ mock_received_packet_converter.convert_to_dto_packet.assert_not_called()
+ mock_received_packet_converter.create_from_dto_packet.assert_not_called()
assert dto_packet is mocker.sentinel.dto_packet
-
-
-class TestRequestResponseConverterBuilder:
- @pytest.fixture
- @staticmethod
- def mock_request_converter(mock_converter_factory: Callable[[], MagicMock]) -> MagicMock:
- return mock_converter_factory()
-
- @pytest.fixture
- @staticmethod
- def mock_response_converter(mock_converter_factory: Callable[[], MagicMock]) -> MagicMock:
- return mock_converter_factory()
-
- def test____build_for_client____creates_composite_with_two_converters(
- self,
- mock_request_converter: MagicMock,
- mock_response_converter: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
- mock_request_converter.convert_to_dto_packet.return_value = mocker.sentinel.dto_request
- mock_response_converter.create_from_dto_packet.return_value = mocker.sentinel.response
-
- # Act
- converter: AbstractPacketConverterComposite[Any, Any, Any, Any]
- converter = RequestResponseConverterBuilder.build_for_client(mock_request_converter, mock_response_converter)
- dto_request = converter.convert_to_dto_packet(mocker.sentinel.request)
- response = converter.create_from_dto_packet(mocker.sentinel.dto_response)
-
- # Assert
- assert dto_request is mocker.sentinel.dto_request
- assert response is mocker.sentinel.response
- mock_request_converter.convert_to_dto_packet.assert_called_once_with(mocker.sentinel.request)
- mock_response_converter.create_from_dto_packet.assert_called_once_with(mocker.sentinel.dto_response)
- mock_request_converter.create_from_dto_packet.assert_not_called()
- mock_response_converter.convert_to_dto_packet.assert_not_called()
-
- def test____build_for_server____creates_composite_with_two_converters(
- self,
- mock_request_converter: MagicMock,
- mock_response_converter: MagicMock,
- mocker: MockerFixture,
- ) -> None:
- # Arrange
- mock_request_converter.create_from_dto_packet.return_value = mocker.sentinel.request
- mock_response_converter.convert_to_dto_packet.return_value = mocker.sentinel.dto_response
-
- # Act
- converter: AbstractPacketConverterComposite[Any, Any, Any, Any]
- converter = RequestResponseConverterBuilder.build_for_server(mock_request_converter, mock_response_converter)
- request = converter.create_from_dto_packet(mocker.sentinel.dto_request)
- dto_response = converter.convert_to_dto_packet(mocker.sentinel.response)
-
- # Assert
- assert request is mocker.sentinel.request
- assert dto_response is mocker.sentinel.dto_response
- mock_request_converter.create_from_dto_packet.assert_called_once_with(mocker.sentinel.dto_request)
- mock_response_converter.convert_to_dto_packet.assert_called_once_with(mocker.sentinel.response)
- mock_request_converter.convert_to_dto_packet.assert_not_called()
- mock_response_converter.create_from_dto_packet.assert_not_called()
diff --git a/tests/unit_test/test_serializers/test_abc.py b/tests/unit_test/test_serializers/test_abc.py
index 61014ab0..cde00f51 100644
--- a/tests/unit_test/test_serializers/test_abc.py
+++ b/tests/unit_test/test_serializers/test_abc.py
@@ -22,7 +22,7 @@
@final
-class _IncrementalPacketSerializerForTest(AbstractIncrementalPacketSerializer[Any, Any]):
+class _IncrementalPacketSerializerForTest(AbstractIncrementalPacketSerializer[Any]):
def incremental_serialize(self, packet: Any) -> Generator[bytes, None, None]:
raise NotImplementedError
@@ -116,13 +116,14 @@ def test____deserialize____raise_error_if_extra_data_is_present(
mock_consumer_generator.send.side_effect = StopIteration((mocker.sentinel.packet, b"extra"))
# Act
- with pytest.raises(DeserializeError, match=r"^Extra data caught$"):
+ with pytest.raises(DeserializeError, match=r"^Extra data caught$") as exc_info:
_ = serializer.deserialize(mocker.sentinel.data)
# Assert
mock_incremental_deserialize_func.assert_called_once_with()
mock_consumer_generator.__next__.assert_called_once_with()
mock_consumer_generator.send.assert_called_once_with(mocker.sentinel.data)
+ assert exc_info.value.error_info == {"packet": mocker.sentinel.packet, "extra": b"extra"}
def test____deserialize____consumer_did_not_yield(
self,
@@ -144,7 +145,7 @@ def test____deserialize____consumer_did_not_yield(
mock_consumer_generator.send.assert_not_called()
-class _AutoSeparatedPacketSerializerForTest(AutoSeparatedPacketSerializer[Any, Any]):
+class _AutoSeparatedPacketSerializerForTest(AutoSeparatedPacketSerializer[Any]):
def serialize(self, packet: Any) -> bytes:
raise NotImplementedError
@@ -387,7 +388,7 @@ def test____incremental_deserialize____translate_deserialize_errors(
assert exception.error_info is mocker.sentinel.error_info
-class _FixedSizePacketSerializerForTest(FixedSizePacketSerializer[Any, Any]):
+class _FixedSizePacketSerializerForTest(FixedSizePacketSerializer[Any]):
def serialize(self, packet: Any) -> bytes:
raise NotImplementedError
@@ -565,7 +566,7 @@ def test____incremental_deserialize____translate_deserialize_errors(
assert exception.error_info is mocker.sentinel.error_info
-class _FileBasedPacketSerializerForTest(FileBasedPacketSerializer[Any, Any]):
+class _FileBasedPacketSerializerForTest(FileBasedPacketSerializer[Any]):
def dump_to_file(self, packet: Any, file: IO[bytes]) -> None:
raise NotImplementedError
diff --git a/tests/unit_test/test_serializers/test_base64.py b/tests/unit_test/test_serializers/test_base64.py
index 88e3f257..45b5d799 100644
--- a/tests/unit_test/test_serializers/test_base64.py
+++ b/tests/unit_test/test_serializers/test_base64.py
@@ -53,7 +53,7 @@ def test____serialize____encode_previously_serialized_data(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: Base64EncoderSerializer[Any, Any] = Base64EncoderSerializer(mock_serializer, alphabet=alphabet)
+ serializer: Base64EncoderSerializer[Any] = Base64EncoderSerializer(mock_serializer, alphabet=alphabet)
mock_serializer.serialize.return_value = mocker.sentinel.data_not_encoded
mock_b64encode.return_value = mocker.sentinel.data_encoded
@@ -73,7 +73,7 @@ def test____deserialize____decode_token_then_call_subsequent_deserialize(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: Base64EncoderSerializer[Any, Any] = Base64EncoderSerializer(mock_serializer, alphabet=alphabet)
+ serializer: Base64EncoderSerializer[Any] = Base64EncoderSerializer(mock_serializer, alphabet=alphabet)
mock_b64decode.return_value = mocker.sentinel.data_not_encoded
mock_serializer.deserialize.return_value = mocker.sentinel.packet
@@ -95,7 +95,7 @@ def test____deserialize____translate_binascii_errors(
# Arrange
import binascii
- serializer: Base64EncoderSerializer[Any, Any] = Base64EncoderSerializer(mock_serializer, alphabet=alphabet)
+ serializer: Base64EncoderSerializer[Any] = Base64EncoderSerializer(mock_serializer, alphabet=alphabet)
mock_b64decode.side_effect = binascii.Error()
# Act
@@ -118,7 +118,7 @@ def test____alphabet____urlsafe_by_default(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: Base64EncoderSerializer[Any, Any] = Base64EncoderSerializer(mock_serializer)
+ serializer: Base64EncoderSerializer[Any] = Base64EncoderSerializer(mock_serializer)
mock_serializer.serialize.return_value = mocker.sentinel.data_not_encoded
mock_b64encode.return_value = mocker.sentinel.data_encoded
mock_b64decode.return_value = mocker.sentinel.data_not_encoded
diff --git a/tests/unit_test/test_serializers/test_cbor.py b/tests/unit_test/test_serializers/test_cbor.py
index 23c79558..fcb68899 100644
--- a/tests/unit_test/test_serializers/test_cbor.py
+++ b/tests/unit_test/test_serializers/test_cbor.py
@@ -19,7 +19,7 @@
class TestCBORSerializer(BaseSerializerConfigInstanceCheck):
@pytest.fixture(scope="class")
@staticmethod
- def serializer_cls() -> type[CBORSerializer[Any, Any]]:
+ def serializer_cls() -> type[CBORSerializer]:
return CBORSerializer
@pytest.fixture(params=["encoder", "decoder"])
@@ -104,7 +104,7 @@ def test____dump_to_file____with_config(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: CBORSerializer[Any, Any] = CBORSerializer(encoder_config=encoder_config)
+ serializer: CBORSerializer = CBORSerializer(encoder_config=encoder_config)
mock_encoder.encode.return_value = None
# Act
@@ -132,7 +132,7 @@ def test____load_from_file____with_config(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: CBORSerializer[Any, Any] = CBORSerializer(decoder_config=decoder_config)
+ serializer: CBORSerializer = CBORSerializer(decoder_config=decoder_config)
mock_decoder.decode.return_value = mocker.sentinel.packet
# Act
diff --git a/tests/unit_test/test_serializers/test_compressor.py b/tests/unit_test/test_serializers/test_compressor.py
index 7b032a1e..260ba413 100644
--- a/tests/unit_test/test_serializers/test_compressor.py
+++ b/tests/unit_test/test_serializers/test_compressor.py
@@ -18,7 +18,7 @@
from pytest_mock import MockerFixture
-class _CompressorSerializerForTest(AbstractCompressorSerializer[Any, Any]):
+class _CompressorSerializerForTest(AbstractCompressorSerializer[Any]):
def new_compressor_stream(self) -> Any:
raise NotImplementedError
@@ -366,7 +366,7 @@ class BaseTestCompressorSerializerImplementation:
@pytest.mark.parametrize("method", ["serialize", "incremental_serialize", "deserialize", "incremental_deserialize"])
def test____base_class____implements_default_methods(
self,
- serializer_cls: type[AbstractCompressorSerializer[Any, Any]],
+ serializer_cls: type[AbstractCompressorSerializer[Any]],
method: str,
) -> None:
# Arrange
@@ -378,7 +378,7 @@ def test____base_class____implements_default_methods(
class TestBZ2CompressorSerializer(BaseTestCompressorSerializerImplementation):
@pytest.fixture(scope="class")
@staticmethod
- def serializer_cls() -> type[BZ2CompressorSerializer[Any, Any]]:
+ def serializer_cls() -> type[BZ2CompressorSerializer[Any]]:
return BZ2CompressorSerializer
@pytest.fixture(autouse=True)
@@ -406,7 +406,7 @@ def test____new_compressor_stream____returns_bz2_compressor(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: BZ2CompressorSerializer[Any, Any]
+ serializer: BZ2CompressorSerializer[Any]
if with_compress_level:
serializer = BZ2CompressorSerializer(mock_serializer, compress_level=mocker.sentinel.compresslevel)
else:
@@ -430,7 +430,7 @@ def test____new_decompressor_stream____returns_bz2_decompressor(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: BZ2CompressorSerializer[Any, Any] = BZ2CompressorSerializer(mock_serializer)
+ serializer: BZ2CompressorSerializer[Any] = BZ2CompressorSerializer(mock_serializer)
mock_bz2_decompressor_cls.return_value = mocker.sentinel.stream
# Act
@@ -444,7 +444,7 @@ def test____new_decompressor_stream____returns_bz2_decompressor(
class TestZlibCompressorSerializer(BaseTestCompressorSerializerImplementation):
@pytest.fixture(scope="class")
@staticmethod
- def serializer_cls() -> type[ZlibCompressorSerializer[Any, Any]]:
+ def serializer_cls() -> type[ZlibCompressorSerializer[Any]]:
return ZlibCompressorSerializer
@pytest.fixture
@@ -472,7 +472,7 @@ def test____new_compressor_stream____returns_bz2_compressor(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: ZlibCompressorSerializer[Any, Any]
+ serializer: ZlibCompressorSerializer[Any]
if with_compress_level:
serializer = ZlibCompressorSerializer(mock_serializer, compress_level=mocker.sentinel.compresslevel)
else:
@@ -496,7 +496,7 @@ def test____new_decompressor_stream____returns_bz2_decompressor(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: ZlibCompressorSerializer[Any, Any] = ZlibCompressorSerializer(mock_serializer)
+ serializer: ZlibCompressorSerializer[Any] = ZlibCompressorSerializer(mock_serializer)
mock_zlib_decompressor_cls.return_value = mocker.sentinel.stream
# Act
diff --git a/tests/unit_test/test_serializers/test_encryptor.py b/tests/unit_test/test_serializers/test_encryptor.py
index e1b7d9cb..e4bbc02e 100644
--- a/tests/unit_test/test_serializers/test_encryptor.py
+++ b/tests/unit_test/test_serializers/test_encryptor.py
@@ -64,7 +64,7 @@ def test____serialize____encrypt_data(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: EncryptorSerializer[Any, Any] = EncryptorSerializer(mock_serializer, key=mocker.sentinel.key)
+ serializer: EncryptorSerializer[Any] = EncryptorSerializer(mock_serializer, key=mocker.sentinel.key)
mock_serializer.serialize.return_value = mocker.sentinel.data_before_encryption
mock_fernet.encrypt.return_value = mocker.sentinel.encrypted_data
@@ -83,7 +83,7 @@ def test____deserialize____decrypt_data(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: EncryptorSerializer[Any, Any] = EncryptorSerializer(
+ serializer: EncryptorSerializer[Any] = EncryptorSerializer(
mock_serializer,
key=mocker.sentinel.key,
token_ttl=mocker.sentinel.token_ttl,
@@ -108,7 +108,7 @@ def test____deserialize____translate_fernet_errors(
# Arrange
from cryptography.fernet import InvalidToken
- serializer: EncryptorSerializer[Any, Any] = EncryptorSerializer(
+ serializer: EncryptorSerializer[Any] = EncryptorSerializer(
mock_serializer,
key=mocker.sentinel.key,
token_ttl=mocker.sentinel.token_ttl,
diff --git a/tests/unit_test/test_serializers/test_json.py b/tests/unit_test/test_serializers/test_json.py
index fad04baa..0fc0eaf0 100644
--- a/tests/unit_test/test_serializers/test_json.py
+++ b/tests/unit_test/test_serializers/test_json.py
@@ -20,7 +20,7 @@
class TestJSONSerializer(BaseSerializerConfigInstanceCheck):
@pytest.fixture(scope="class")
@staticmethod
- def serializer_cls() -> type[JSONSerializer[Any, Any]]:
+ def serializer_cls() -> type[JSONSerializer]:
return JSONSerializer
@pytest.fixture(params=["encoder", "decoder"])
@@ -140,7 +140,7 @@ def test____serialize____encode_packet(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: JSONSerializer[Any, Any] = JSONSerializer(
+ serializer: JSONSerializer = JSONSerializer(
encoding=mocker.sentinel.encoding,
unicode_errors=mocker.sentinel.str_errors,
)
@@ -161,7 +161,7 @@ def test____incremental_serialize____encode_packet(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: JSONSerializer[Any, Any] = JSONSerializer(
+ serializer: JSONSerializer = JSONSerializer(
encoding=mocker.sentinel.encoding,
unicode_errors=mocker.sentinel.str_errors,
)
@@ -182,7 +182,7 @@ def test____deserialize____decode_data(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: JSONSerializer[Any, Any] = JSONSerializer(
+ serializer: JSONSerializer = JSONSerializer(
encoding=mocker.sentinel.encoding,
unicode_errors=mocker.sentinel.str_errors,
)
@@ -204,7 +204,7 @@ def test____deserialize____translate_unicode_decode_errors(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: JSONSerializer[Any, Any] = JSONSerializer()
+ serializer: JSONSerializer = JSONSerializer()
mock_bytes = mocker.NonCallableMagicMock()
mock_bytes.decode.side_effect = UnicodeDecodeError("some encoding", b"invalid data", 0, 2, "Bad encoding ?")
@@ -227,7 +227,7 @@ def test____deserialize____translate_json_decode_errors(
# Arrange
from json import JSONDecodeError
- serializer: JSONSerializer[Any, Any] = JSONSerializer()
+ serializer: JSONSerializer = JSONSerializer()
mock_bytes = mocker.NonCallableMagicMock()
mock_decoder.decode.side_effect = JSONDecodeError("Invalid payload", "invalid\ndocument", 8)
@@ -259,7 +259,7 @@ def raw_parse_side_effect() -> Generator[None, bytes, tuple[bytes, bytes]]:
assert data is mocker.sentinel.data
return mock_bytes, b"Hello World !"
- serializer: JSONSerializer[Any, Any] = JSONSerializer(
+ serializer: JSONSerializer = JSONSerializer(
encoding=mocker.sentinel.encoding,
unicode_errors=mocker.sentinel.str_errors,
)
@@ -292,7 +292,7 @@ def raw_parse_side_effect() -> Generator[None, bytes, tuple[bytes, bytes]]:
assert data is mocker.sentinel.data
return mock_bytes, mocker.sentinel.remaining_data
- serializer: JSONSerializer[Any, Any] = JSONSerializer(
+ serializer: JSONSerializer = JSONSerializer(
encoding=mocker.sentinel.encoding,
unicode_errors=mocker.sentinel.str_errors,
)
@@ -328,7 +328,7 @@ def raw_parse_side_effect() -> Generator[None, bytes, tuple[bytes, bytes]]:
assert data is mocker.sentinel.data
return mock_bytes, mocker.sentinel.remaining_data
- serializer: JSONSerializer[Any, Any] = JSONSerializer(
+ serializer: JSONSerializer = JSONSerializer(
encoding=mocker.sentinel.encoding,
unicode_errors=mocker.sentinel.str_errors,
)
diff --git a/tests/unit_test/test_serializers/test_msgpack.py b/tests/unit_test/test_serializers/test_msgpack.py
index 7524113d..58eeded3 100644
--- a/tests/unit_test/test_serializers/test_msgpack.py
+++ b/tests/unit_test/test_serializers/test_msgpack.py
@@ -20,7 +20,7 @@
class TestMessagePackSerializer(BaseSerializerConfigInstanceCheck):
@pytest.fixture(scope="class")
@staticmethod
- def serializer_cls() -> type[MessagePackSerializer[Any, Any]]:
+ def serializer_cls() -> type[MessagePackSerializer]:
return MessagePackSerializer
@pytest.fixture(params=["packer", "unpacker"])
@@ -78,7 +78,7 @@ def test____serialize____with_config(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: MessagePackSerializer[Any, Any] = MessagePackSerializer(packer_config=packer_config)
+ serializer: MessagePackSerializer = MessagePackSerializer(packer_config=packer_config)
mock_packb.return_value = mocker.sentinel.result
# Act
@@ -106,7 +106,7 @@ def test____deserialize____with_config(
# Arrange
import msgpack
- serializer: MessagePackSerializer[Any, Any] = MessagePackSerializer(unpacker_config=unpacker_config)
+ serializer: MessagePackSerializer = MessagePackSerializer(unpacker_config=unpacker_config)
mock_unpackb.return_value = mocker.sentinel.packet
# Act
@@ -134,7 +134,7 @@ def test____deserialize____missing_data(
# Arrange
import msgpack
- serializer: MessagePackSerializer[Any, Any] = MessagePackSerializer()
+ serializer: MessagePackSerializer = MessagePackSerializer()
mock_unpackb.side_effect = msgpack.OutOfData
# Act & Assert
@@ -153,7 +153,7 @@ def test____deserialize____extra_data(
# Arrange
import msgpack
- serializer: MessagePackSerializer[Any, Any] = MessagePackSerializer()
+ serializer: MessagePackSerializer = MessagePackSerializer()
mock_unpackb.side_effect = msgpack.ExtraData(mocker.sentinel.packet, b"extra")
# Act & Assert
@@ -170,7 +170,7 @@ def test____deserialize____any_exception_occurs(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: MessagePackSerializer[Any, Any] = MessagePackSerializer()
+ serializer: MessagePackSerializer = MessagePackSerializer()
mock_unpackb.side_effect = Exception
# Act & Assert
diff --git a/tests/unit_test/test_serializers/test_pickle.py b/tests/unit_test/test_serializers/test_pickle.py
index 1574044e..0ee17ea5 100644
--- a/tests/unit_test/test_serializers/test_pickle.py
+++ b/tests/unit_test/test_serializers/test_pickle.py
@@ -22,7 +22,7 @@
class TestPickleSerializer(BaseSerializerConfigInstanceCheck):
@pytest.fixture(scope="class")
@staticmethod
- def serializer_cls() -> type[PickleSerializer[Any, Any]]:
+ def serializer_cls() -> type[PickleSerializer]:
return PickleSerializer
@pytest.fixture(params=["pickler", "unpickler"])
@@ -123,7 +123,7 @@ def test____serialize____with_config(
) -> None:
# Arrange
mock_pickletools_optimize.return_value = b"optimized pickle"
- serializer: PickleSerializer[Any, Any] = PickleSerializer(
+ serializer: PickleSerializer = PickleSerializer(
pickler_config=pickler_config,
pickler_optimize=pickler_optimize,
)
@@ -159,7 +159,7 @@ def test____serialize____custom_pickler_cls(
# Arrange
mock_other_pickler_cls: MagicMock = mocker.stub()
mock_other_pickler: MagicMock = mock_other_pickler_cls.return_value
- serializer: PickleSerializer[Any, Any] = PickleSerializer(
+ serializer: PickleSerializer = PickleSerializer(
pickler_cls=mock_other_pickler_cls,
pickler_optimize=pickler_optimize,
)
@@ -187,7 +187,7 @@ def test____deserialize____with_config(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: PickleSerializer[Any, Any] = PickleSerializer(unpickler_config=unpickler_config)
+ serializer: PickleSerializer = PickleSerializer(unpickler_config=unpickler_config)
mock_unpickler.load.return_value = mocker.sentinel.packet
# Act
@@ -213,7 +213,7 @@ def test____deserialize____custom_unpickler_cls(
# Arrange
mock_other_unpickler_cls: MagicMock = mocker.stub()
mock_other_unpickler: MagicMock = mock_other_unpickler_cls.return_value
- serializer: PickleSerializer[Any, Any] = PickleSerializer(unpickler_cls=mock_other_unpickler_cls)
+ serializer: PickleSerializer = PickleSerializer(unpickler_cls=mock_other_unpickler_cls)
mock_other_unpickler.load.return_value = mocker.sentinel.packet
del mock_unpickler.load
@@ -241,7 +241,7 @@ def test____deserialize____deserialize_error(
mock_unpickler: MagicMock,
) -> None:
# Arrange
- serializer: PickleSerializer[Any, Any] = PickleSerializer(unpickler_config=unpickler_config)
+ serializer: PickleSerializer = PickleSerializer(unpickler_config=unpickler_config)
mock_unpickler.load.side_effect = exception()
# Act
@@ -261,7 +261,7 @@ def test____deserialize____extra_data(
mocker: MockerFixture,
) -> None:
# Arrange
- serializer: PickleSerializer[Any, Any] = PickleSerializer(unpickler_config=unpickler_config)
+ serializer: PickleSerializer = PickleSerializer(unpickler_config=unpickler_config)
def unpickler_load() -> Any:
assert bytes_io.read(2) == b"da"
diff --git a/tests/unit_test/test_serializers/test_struct.py b/tests/unit_test/test_serializers/test_struct.py
index e3f87da4..40e6da28 100644
--- a/tests/unit_test/test_serializers/test_struct.py
+++ b/tests/unit_test/test_serializers/test_struct.py
@@ -16,7 +16,7 @@
@final
-class _StructSerializerForTest(AbstractStructSerializer[Any, Any]):
+class _StructSerializerForTest(AbstractStructSerializer[Any]):
def iter_values(self, packet: Any) -> Iterable[Any]:
raise NotImplementedError
@@ -211,6 +211,22 @@ def test____dunder_init____compute_right_format(
# Assert
mock_struct_cls.assert_called_once_with(expected_format)
+ @pytest.mark.parametrize("endianness", ["z", "word"], ids=repr)
+ def test____dunder_init____invalid_endianness_character(
+ self,
+ endianness: str,
+ mock_struct_cls: MagicMock,
+ ) -> None:
+ # Arrange
+ namedtuple_cls = collections.namedtuple("namedtuple_cls", ["x", "y"])
+ field_formats: dict[str, str] = {"x": "Q", "y": "I"}
+
+ # Act & Assert
+ with pytest.raises(ValueError, match=r"^Invalid endianness character$"):
+ _ = NamedTupleStructSerializer(namedtuple_cls, field_formats, format_endianness=endianness)
+
+ mock_struct_cls.assert_not_called()
+
@pytest.mark.parametrize("endianness", sorted(_ENDIANNESS_CHARACTERS), ids=repr)
@pytest.mark.parametrize("field", ["x", "y"], ids=repr)
def test____dunder_init____endianness_character_in_fields_format_error(
diff --git a/tests/unit_test/test_sync/test_client/test_abc.py b/tests/unit_test/test_sync/test_client/test_abc.py
index d4562c3b..603aace2 100644
--- a/tests/unit_test/test_sync/test_client/test_abc.py
+++ b/tests/unit_test/test_sync/test_client/test_abc.py
@@ -99,3 +99,36 @@ def test____iter_received_packets____with_given_timeout_stop_if_an_error_occurs(
# Assert
assert client.mock_recv_packet.mock_calls == [mocker.call(timeout) for _ in range(2)]
assert packets == [mocker.sentinel.packet_a]
+
+ def test____iter_received_packets____timeout_decrement(
+ self,
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ client = MockClient(mocker)
+ client.mock_recv_packet.return_value = mocker.sentinel.packet
+ iterator = client.iter_received_packets(timeout=10)
+ now: float = 798546132
+ mocker.patch(
+ "time.perf_counter",
+ side_effect=[
+ now,
+ now + 6,
+ now + 7,
+ now + 12,
+ now + 12,
+ now + 12,
+ ],
+ )
+
+ # Act
+ next(iterator)
+ next(iterator)
+ next(iterator)
+
+ # Assert
+ assert client.mock_recv_packet.call_args_list == [
+ mocker.call(10),
+ mocker.call(4),
+ mocker.call(0),
+ ]
diff --git a/tests/unit_test/test_sync/test_client/test_udp.py b/tests/unit_test/test_sync/test_client/test_udp.py
index 3db7f1d5..ec839575 100644
--- a/tests/unit_test/test_sync/test_client/test_udp.py
+++ b/tests/unit_test/test_sync/test_client/test_udp.py
@@ -1025,6 +1025,40 @@ def test____iter_received_packets_from____closed_client_during_iteration(
with pytest.raises(StopIteration):
_ = next(iterator)
+ def test____iter_received_packets____timeout_decrement(
+ self,
+ client: UDPNetworkEndpoint[Any, Any],
+ sender_address: tuple[str, int],
+ mocker: MockerFixture,
+ ) -> None:
+ # Arrange
+ mock_recvfrom = mocker.patch.object(UDPNetworkEndpoint, "recv_packet_from", return_value=(b"packet", sender_address))
+ iterator = client.iter_received_packets_from(timeout=10)
+ now: float = 798546132
+ mocker.patch(
+ "time.perf_counter",
+ side_effect=[
+ now,
+ now + 6,
+ now + 7,
+ now + 12,
+ now + 12,
+ now + 12,
+ ],
+ )
+
+ # Act
+ next(iterator)
+ next(iterator)
+ next(iterator)
+
+ # Assert
+ assert mock_recvfrom.call_args_list == [
+ mocker.call(timeout=10),
+ mocker.call(timeout=4),
+ mocker.call(timeout=0),
+ ]
+
class TestUDPNetworkClient:
@pytest.fixture
diff --git a/tests/unit_test/test_tools/test_socket.py b/tests/unit_test/test_tools/test_socket.py
index 854670d9..9cabda9b 100644
--- a/tests/unit_test/test_tools/test_socket.py
+++ b/tests/unit_test/test_tools/test_socket.py
@@ -75,11 +75,6 @@ def test____new_socket_address____factory(
class TestSocketProxy:
- @pytest.fixture
- @staticmethod
- def mock_new_socket_address(mocker: MockerFixture) -> MagicMock:
- return mocker.patch("easynetwork.tools.socket.new_socket_address", autospec=True)
-
@pytest.fixture(
params=[
pytest.param(False, id="without_runner"),
@@ -116,53 +111,70 @@ def test____property____access(
if runner_stub is not None:
runner_stub.assert_not_called()
- def test____fileno____sub_call(
+ @pytest.mark.parametrize("family", [int(socket.AF_INET), int(9999999)])
+ def test____family____cast(
self,
+ family: int,
mock_tcp_socket: MagicMock,
- mocker: MockerFixture,
- runner_stub: MagicMock | None,
) -> None:
# Arrange
- mock_tcp_socket.fileno.return_value = mocker.sentinel.fd
- socket_proxy = SocketProxy(mock_tcp_socket, runner=runner_stub)
+ assert type(family) is int
+ mock_tcp_socket.family = family
# Act
- fd = socket_proxy.fileno()
+ socket_proxy = SocketProxy(mock_tcp_socket)
+ socket_proxy_family = socket_proxy.family
# Assert
- mock_tcp_socket.fileno.assert_called_once_with()
- assert fd is mocker.sentinel.fd
- if runner_stub is not None:
- runner_stub.assert_called_once_with(mock_tcp_socket.fileno)
+ try:
+ family = socket.AddressFamily(family)
+ except ValueError:
+ assert type(socket_proxy_family) is int
+ assert socket_proxy_family == family
+ else:
+ assert isinstance(socket_proxy_family, socket.AddressFamily)
+ assert socket_proxy_family is family
+
+ @pytest.mark.parametrize("sock_type", [int(socket.SOCK_STREAM), int(9999999)])
+ def test____type____cast(
+ self,
+ sock_type: int,
+ mock_tcp_socket: MagicMock,
+ ) -> None:
+ # Arrange
+ assert type(sock_type) is int
+ mock_tcp_socket.type = sock_type
+
+ # Act
+ socket_proxy = SocketProxy(mock_tcp_socket)
+ socket_proxy_type = socket_proxy.type
- def test____dup____sub_call(
+ # Assert
+ try:
+ sock_type = socket.SocketKind(sock_type)
+ except ValueError:
+ assert type(socket_proxy_type) is int
+ assert socket_proxy_type == sock_type
+ else:
+ assert isinstance(socket_proxy_type, socket.SocketKind)
+ assert socket_proxy_type is sock_type
+
+ def test____fileno____sub_call(
self,
mock_tcp_socket: MagicMock,
mocker: MockerFixture,
runner_stub: MagicMock | None,
- mock_tcp_socket_factory: Callable[[], MagicMock],
) -> None:
# Arrange
mock_tcp_socket.fileno.return_value = mocker.sentinel.fd
- mock_new_socket = mock_tcp_socket_factory()
- mock_socket_fromfd = mocker.patch("socket.fromfd", autospec=True, return_value=mock_new_socket)
socket_proxy = SocketProxy(mock_tcp_socket, runner=runner_stub)
# Act
- socket = socket_proxy.dup()
+ fd = socket_proxy.fileno()
# Assert
- mock_tcp_socket.dup.assert_not_called()
mock_tcp_socket.fileno.assert_called_once_with()
- mock_socket_fromfd.assert_called_once_with(
- mocker.sentinel.fd,
- mock_tcp_socket.family,
- mock_tcp_socket.type,
- mock_tcp_socket.proto,
- )
- mock_new_socket.setblocking.assert_called_once_with(False)
- mock_new_socket.settimeout.assert_not_called()
- assert socket is mock_new_socket
+ assert fd is mocker.sentinel.fd
if runner_stub is not None:
runner_stub.assert_called_once_with(mock_tcp_socket.fileno)
@@ -236,22 +248,19 @@ def test____socket_address____sub_call(
self,
method: str,
mock_tcp_socket: MagicMock,
- mock_new_socket_address: MagicMock,
mocker: MockerFixture,
runner_stub: MagicMock | None,
) -> None:
# Arrange
socket_proxy = SocketProxy(mock_tcp_socket, runner=runner_stub)
- getattr(mock_tcp_socket, method).return_value = mocker.sentinel.address_to_convert
- mock_new_socket_address.return_value = mocker.sentinel.converted_address
+ getattr(mock_tcp_socket, method).return_value = mocker.sentinel.address
# Act
address = getattr(socket_proxy, method)()
# Assert
getattr(mock_tcp_socket, method).assert_called_once_with()
- mock_new_socket_address.assert_called_once_with(mocker.sentinel.address_to_convert, mock_tcp_socket.family)
- assert address is mocker.sentinel.converted_address
+ assert address is mocker.sentinel.address
if runner_stub is not None:
runner_stub.assert_called_once_with(getattr(mock_tcp_socket, method))
diff --git a/tests/unit_test/test_tools/test_utils.py b/tests/unit_test/test_tools/test_utils.py
index 3e12fd61..2c8e003e 100644
--- a/tests/unit_test/test_tools/test_utils.py
+++ b/tests/unit_test/test_tools/test_utils.py
@@ -20,7 +20,6 @@
iter_bytes,
lock_with_timeout,
make_callback,
- recursively_clear_exception_traceback_frames,
remove_traceback_frames_in_place,
replace_kwargs,
set_reuseport,
@@ -548,106 +547,6 @@ def test____transform_future_exception____make_cancelled_error_from_exception(ex
assert new_exception.__suppress_context__
-def test____recursively_clear_exception_traceback_frames____exception_without_context_nor_cause(
- mocker: MockerFixture,
-) -> None:
- # Arrange
- mock_clear_frames = mocker.patch("traceback.clear_frames", autospec=True)
-
- def func() -> None:
- raise Exception()
-
- # Act
- exception = pytest.raises(Exception, func).value
- assert exception.__context__ is None and exception.__cause__ is None
- recursively_clear_exception_traceback_frames(exception)
-
- # Assert
- mock_clear_frames.assert_called_once_with(exception.__traceback__)
-
-
-def test____recursively_clear_exception_traceback_frames____exception_with_context_but_no_explicit_cause(
- mocker: MockerFixture,
-) -> None:
- # Arrange
- mock_clear_frames = mocker.patch("traceback.clear_frames", autospec=True)
-
- def func() -> None:
- try:
- 1 / 0
- except ZeroDivisionError:
- raise Exception()
-
- # Act
- exception = pytest.raises(Exception, func).value
- assert isinstance(exception.__context__, ZeroDivisionError)
- assert exception.__cause__ is None
- recursively_clear_exception_traceback_frames(exception)
-
- # Assert
- assert mock_clear_frames.mock_calls == [
- mocker.call(exception.__traceback__),
- mocker.call(exception.__context__.__traceback__),
- ]
-
-
-def test____recursively_clear_exception_traceback_frames____exception_with_explicit_cause(
- mocker: MockerFixture,
-) -> None:
- # Arrange
- mock_clear_frames = mocker.patch("traceback.clear_frames", autospec=True)
-
- def func() -> None:
- try:
- 1 / 0
- except ZeroDivisionError as exc:
- raise Exception() from exc
-
- # Act
- exception = pytest.raises(Exception, func).value
- assert isinstance(exception.__context__, ZeroDivisionError)
- assert exception.__cause__ is exception.__context__
- recursively_clear_exception_traceback_frames(exception)
-
- # Assert
- assert mock_clear_frames.mock_calls == [
- mocker.call(exception.__traceback__),
- mocker.call(exception.__context__.__traceback__),
- ]
-
-
-def test____recursively_clear_exception_traceback_frames____exception_with_context_but_different_cause(
- mocker: MockerFixture,
-) -> None:
- # Arrange
- mock_clear_frames = mocker.patch("traceback.clear_frames", autospec=True)
-
- def func() -> None:
- try:
- 1 / 0
- except ZeroDivisionError as exc:
- try:
- raise ValueError()
- except ValueError:
- raise Exception() from exc
-
- # Act
- exception = pytest.raises(Exception, func).value
- assert isinstance(exception.__context__, ValueError)
- assert isinstance(exception.__context__.__context__, ZeroDivisionError)
- assert exception.__context__.__cause__ is None
- assert isinstance(exception.__cause__, ZeroDivisionError)
- recursively_clear_exception_traceback_frames(exception)
-
- # Assert
- assert mock_clear_frames.mock_calls == [
- mocker.call(exception.__traceback__),
- mocker.call(exception.__context__.__traceback__),
- mocker.call(exception.__context__.__context__.__traceback__),
- # mocker.call(exception.__cause__.__traceback__), # Already cleared because of the context nesting
- ]
-
-
@pytest.mark.parametrize("n", [-1, 0, 2, 2000])
def test____remove_traceback_frames_in_place____remove_n_first_traceback(n: int) -> None:
# Arrange
diff --git a/tox.ini b/tox.ini
index a039cd3e..dee189d9 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,13 +1,13 @@
[tox]
minversion = 4.0
envlist =
+ pre-commit
+ mypy-{full,test,docs}
build
- py311-other
+ py311-other-{tests,docstrings}
py311-{unit,functional}-{__standard__,cbor,msgpack,encryption,sniffio}
py311-functional-{asyncio_proactor,uvloop}
coverage
- mypy-{full,test}
- pre-commit
skip_missing_interpreters = true
[base]
@@ -20,10 +20,17 @@ all_extras =
msgpack
encryption
sniffio
- ; uvloop is expressly not added
allowlist_externals =
pdm
+[docs]
+root_dir = {toxinidir}{/}docs
+source_dir = {[docs]root_dir}{/}source
+examples_dir = {[docs]source_dir}{/}_include{/}examples
+
+[pdm]
+sync = pdm sync --no-self
+
[base-pytest]
setenv =
PYTHONASYNCIODEBUG = 1
@@ -32,18 +39,21 @@ unit_tests_rootdir = tests{/}unit_test
functional_tests_rootdir = tests{/}functional_test
xdist_dist = worksteal
-[testenv:py311-other]
+[testenv:py311-other-{tests,docstrings}]
package = wheel
allowlist_externals =
{[base]allowlist_externals}
setenv =
{[base]setenv}
{[base-pytest]setenv}
- PYTEST_ADDOPTS = {[base-pytest]addopts} {posargs}
+ PYTEST_ADDOPTS = {[base-pytest]addopts} --no-cov {posargs}
commands_pre =
- pdm sync --no-self --dev --group=test
+ {[pdm]sync} --dev --group=test
commands =
- pytest -m "not unit and not functional" --no-cov
+ tests: pytest -m "not unit and not functional"
+ docstrings: pytest --doctest-modules {toxinidir}{/}src
+ docstrings: pytest --doctest-modules {[docs]examples_dir}{/}tutorials{/}ftp_server
+ docstrings: pytest --doctest-glob="*.rst" {[docs]source_dir}
[testenv:py311-{unit,functional}-{__standard__,cbor,msgpack,encryption,sniffio}]
package = wheel
@@ -64,7 +74,7 @@ setenv =
passenv =
PYTEST_MAX_WORKERS
commands_pre =
- pdm sync --no-self --dev --group=test
+ {[pdm]sync} --dev --group=test
commands =
__standard__: pytest -n "{env:PYTEST_MAX_WORKERS:auto}" --dist={[base-pytest]xdist_dist} -m "not feature" {env:TESTS_ROOTDIR}
cbor: pytest -m "feature_cbor" {env:TESTS_ROOTDIR}
@@ -77,8 +87,6 @@ package = wheel
platform =
asyncio_proactor: win32
uvloop: linux|darwin
-extras =
- uvloop: uvloop
allowlist_externals =
{[base]allowlist_externals}
setenv =
@@ -92,7 +100,8 @@ setenv =
passenv =
PYTEST_MAX_WORKERS
commands_pre =
- pdm sync --no-self --dev --group=test
+ {[pdm]sync} --dev --group=test
+ uvloop: {[pdm]sync} --dev --group=uvloop
commands =
pytest -n "{env:PYTEST_MAX_WORKERS:auto}" --dist={[base-pytest]xdist_dist} --asyncio-event-loop="{env:ASYNCIO_EVENTLOOP}" -m "asyncio and not feature" {env:TESTS_ROOTDIR}
@@ -131,7 +140,7 @@ commands =
cp -a {envtmpdir}{/}dist{/}. {toxinidir}{/}dist
rm -rf {envtmpdir}{/}dist
-[testenv:mypy-{full,test}]
+[testenv:mypy-{full,test,docs}]
package = wheel
extras =
{[base]all_extras}
@@ -142,11 +151,21 @@ setenv =
MYPY_CACHE_DIR = {envtmpdir}{/}.mypy_cache
MYPY_OPTS = --no-incremental --config-file {toxinidir}{/}pyproject.toml
commands_pre =
- pdm sync --no-self --dev --group=mypy
- test: pdm sync --no-self --dev --group=test
+ {[pdm]sync} --dev --group=mypy
+ test: {[pdm]sync} --dev --group=test
commands =
+ # package
full: mypy {env:MYPY_OPTS} -p easynetwork -p easynetwork_asyncio
+ # tests
test: mypy {env:MYPY_OPTS} {toxinidir}{/}tests
+ # documentation
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}tutorials{/}echo_client_server_tcp
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}tutorials{/}ftp_server
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}tutorials{/}udp_endpoint
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}howto{/}protocols
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}howto{/}serializers
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}howto{/}tcp_clients
+ docs: mypy {env:MYPY_OPTS} {[docs]examples_dir}{/}howto{/}tcp_servers
[testenv:pre-commit]
skip_install = true