Skip to content

Commit

Permalink
Added BufferedIncrementalPacketSerializer base class (#174)
Browse files Browse the repository at this point in the history
  • Loading branch information
francis-clairicia authored Dec 2, 2023
1 parent 781739e commit 3beb7c4
Show file tree
Hide file tree
Showing 63 changed files with 3,753 additions and 439 deletions.
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from __future__ import annotations

import json
from collections.abc import Generator
from typing import TYPE_CHECKING, Any

from easynetwork.exceptions import DeserializeError, IncrementalDeserializeError
from easynetwork.serializers.abc import BufferedIncrementalPacketSerializer

if TYPE_CHECKING:
from _typeshed import ReadableBuffer


class MyJSONSerializer(BufferedIncrementalPacketSerializer[Any, bytearray]):
def __init__(self, *, ensure_ascii: bool = True) -> None:
self._ensure_ascii: bool = ensure_ascii

self._encoding: str
if self._ensure_ascii:
self._encoding = "ascii"
else:
self._encoding = "utf-8"

def _dump(self, packet: Any) -> bytes:
document = json.dumps(packet, ensure_ascii=self._ensure_ascii)
return document.encode(self._encoding)

def _load(self, data: bytes | bytearray) -> Any:
document = data.decode(self._encoding)
return json.loads(document)

def serialize(self, packet: Any) -> bytes:
return self._dump(packet)

def deserialize(self, data: bytes) -> Any:
try:
return self._load(data)
except (UnicodeError, json.JSONDecodeError) as exc:
raise DeserializeError("JSON decode error") from exc

def incremental_serialize(self, packet: Any) -> Generator[bytes, None, None]:
yield self._dump(packet) + b"\r\n"

def incremental_deserialize(self) -> Generator[None, bytes, tuple[Any, bytes]]:
data = yield
newline = b"\r\n"
while (index := data.find(newline)) < 0:
data += yield

remainder = data[index + len(newline) :]
data = data[:index]

try:
document = self._load(data)
except (UnicodeError, json.JSONDecodeError) as exc:
raise IncrementalDeserializeError("JSON decode error", remainder) from exc

return document, remainder

def create_deserializer_buffer(self, sizehint: int) -> bytearray:
buffer_size: int = max(sizehint, 65536)
return bytearray(buffer_size)

def buffered_incremental_deserialize(
self,
buffer: bytearray,
) -> Generator[int | None, int, tuple[Any, ReadableBuffer]]:
buffer_size = len(buffer)
newline = b"\r\n"
separator_length = len(newline)

nb_written_bytes: int = (yield None)

while (index := buffer.find(newline, 0, nb_written_bytes)) < 0:
start_idx: int = nb_written_bytes
if start_idx > buffer_size - separator_length:
raise IncrementalDeserializeError("Too long line", remaining_data=b"")
nb_written_bytes += yield start_idx

remainder: bytearray = buffer[index + separator_length : nb_written_bytes]
data: bytearray = buffer[:index]

try:
document = self._load(data)
except (UnicodeError, json.JSONDecodeError) as exc:
raise IncrementalDeserializeError("JSON decode error", remainder) from exc

return document, remainder
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from __future__ import annotations

import io
from collections.abc import Generator
from typing import Any

from easynetwork.serializers.abc import BufferedIncrementalPacketSerializer


class MySerializer(BufferedIncrementalPacketSerializer[Any, memoryview]):
...

# It can receive either 'bytes' from endpoint or 'memoryviews' from buffered_incremental_deserialize()
def incremental_deserialize(self) -> Generator[None, bytes | memoryview, tuple[Any, bytes]]:
initial_bytes = yield
with io.BytesIO(initial_bytes) as buffer:
while True:
try:
packet = self._load_from_file(buffer)
except EOFError:
pass
else:
break
buffer.write((yield))
buffer.seek(0)

remainder = buffer.read()
return packet, remainder

def _load_from_file(self, file: io.IOBase) -> Any:
...

def create_deserializer_buffer(self, sizehint: int) -> memoryview:
# Don't care about buffer size
buffer = bytearray(sizehint)
return memoryview(buffer)

def buffered_incremental_deserialize(self, buffer: memoryview) -> Generator[None, int, tuple[Any, bytes]]:
incremental_deserialize = self.incremental_deserialize()
# Start the generator
next(incremental_deserialize)

while True:
nb_bytes_written: int = yield
try:
incremental_deserialize.send(buffer[:nb_bytes_written])
except StopIteration as exc:
# incremental_deserialize() returned
return exc.value
3 changes: 3 additions & 0 deletions docs/source/_static/css/details.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
details {
margin-bottom: 1rem;
}
17 changes: 2 additions & 15 deletions docs/source/api/serializers/abc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,6 @@ Top-Level Base Classes

.. automodule:: easynetwork.serializers.abc
:no-docstring:

.. autoclass:: AbstractPacketSerializer
:members:

.. autoclass:: AbstractIncrementalPacketSerializer
:members:


Expand All @@ -25,16 +20,8 @@ Top-Level Base Classes
Stream Base Classes
===================

.. automodule:: easynetwork.serializers.base_stream
:no-docstring:

Here are abstract classes that implement common stream protocol patterns.

.. autoclass:: AutoSeparatedPacketSerializer
:members:

.. autoclass:: FixedSizePacketSerializer
:members:

.. autoclass:: FileBasedPacketSerializer
.. automodule:: easynetwork.serializers.base_stream
:no-docstring:
:members:
13 changes: 12 additions & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
"enum_tools.autoenum",
"sphinx_rtd_theme",
"sphinx_tabs.tabs",
"sphinx_toolbox.collapse",
"sphinx_toolbox.github",
"sphinx_toolbox.sidebar_links",
"sphinx_toolbox.more_autodoc.genericalias",
Expand Down Expand Up @@ -79,8 +80,13 @@
"_socket._RetAddress": "typing.Any",
"_socket.socket": "socket.socket",
"contextvars.Context": "contextvars.Context",
"ReadableBuffer": "bytes | bytearray | memoryview",
"WriteableBuffer": "bytearray | memoryview",
}
autodoc_inherit_docstrings = False
autodoc_mock_imports = [
"_typesched",
]

# -- sphinx.ext.intersphinx configuration ------------------------------------
# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
Expand Down Expand Up @@ -130,7 +136,12 @@
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output

html_theme = "sphinx_rtd_theme"
html_static_path = []
html_static_path = [
"_static",
]
html_css_files = [
"css/details.css",
]

# -- sphinx-rtd-theme configuration ------------------------------------------
# https://sphinx-rtd-theme.readthedocs.io/en/stable/configuring.html
Expand Down
10 changes: 9 additions & 1 deletion docs/source/glossary.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ Glossary
.. glossary::
:sorted:

buffered serializer
See :term:`serializer`.

communication protocol
A set of formal rules describing how to transmit or exchange data, especially across a network.

Expand Down Expand Up @@ -57,7 +60,7 @@ Glossary

Ideally, a serializer should only handle :ref:`primitive types <bltin-types>` and :ref:`constants <built-in-consts>`.

There are 2 types of serializers:
There are 3 types of serializers:

* one-shot serializers

Expand All @@ -72,6 +75,11 @@ Glossary
During deserialization, they have the ability **to know when the** :term:`packet` **is complete** (and wait if incomplete)
and which bytes are not part of the initial :term:`packet`.

* buffered serializers

An incremental serializer specialization that allows the use of a custom in-memory byte buffer,
if supported by the underlying transport layer.

serializer wrapper
A :term:`serializer` that transforms data coming from another :term:`serializer`.

Expand Down
Loading

0 comments on commit 3beb7c4

Please sign in to comment.