Skip to content

Commit

Permalink
convert unpythonic.net.test tests to unpythonic.test.fixtures
Browse files Browse the repository at this point in the history
Doesn't work yet due to a MacroPy issue with `bytes` literals,
which are needed by these tests. See:
    azazel75/macropy#26
  • Loading branch information
Technologicat committed Aug 19, 2020
1 parent f66d553 commit 488b09b
Show file tree
Hide file tree
Showing 2 changed files with 107 additions and 95 deletions.
185 changes: 96 additions & 89 deletions unpythonic/net/test/test_msg.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# -*- coding: utf-8; -*-

from ...syntax import macros, test # noqa: F401
from ...test.fixtures import testset

from io import BytesIO, SEEK_SET

from .fixtures import nettest
Expand All @@ -8,102 +11,106 @@
from ..util import bytessource, streamsource, socketsource

def runtests():
# sans-IO

# Basic use.
# Encode a message:
rawdata = b"hello world"
message = encodemsg(rawdata)
# Decode a message:
decoder = MessageDecoder(bytessource(message))
assert decoder.decode() == b"hello world"
assert decoder.decode() is None # The message should have been consumed by the first decode.

# Decoding a message gets a whole message and only that message.
bio = BytesIO()
bio.write(message)
bio.write(b"junk junk junk")
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
assert decoder.decode() == b"hello world"
assert decoder.decode() is None

# - Messages are received in order.
# - Any leftover bytes already read into the receive buffer by the previous decode
# are consumed *from the buffer* by the next decode. This guarantees it doesn't
# matter if the transport does not honor message boundaries (which is indeed the
# whole point of having this protocol).
# - Note this means that should you wish to stop receiving messages on a particular
# source, and resume reading a raw stream from it instead, you must manually prepend
# the final contents of the receive buffer (`decoder.get_buffered_data()`) to whatever
# data you later receive from that source (since that data has already been placed
# into the receive buffer, so it is no longer available at the source).
# - So it's recommended to have a dedicated channel to communicate using messages,
# e.g. a dedicated TCP connection on which all communication is done with messages.
# This way you don't need to care about the receive buffer.
bio = BytesIO()
bio.write(encodemsg(b"hello world"))
bio.write(encodemsg(b"hello again"))
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
assert decoder.decode() == b"hello world"
assert decoder.decode() == b"hello again"
assert decoder.decode() is None
# TODO: As of MacroPy 1.1.0b2, this test module crashes at macro expansion time
# TODO: due to a MacroPy bug involving `bytes` literals.
# TODO: https://github.com/azazel75/macropy/issues/26
with testset("unpythonic.net.msg"):
with testset("sans-IO"):
with testset("basic usage"):
# Encode a message:
rawdata = b"hello world"
message = encodemsg(rawdata)
# Decode a message:
decoder = MessageDecoder(bytessource(message))
test[decoder.decode() == b"hello world"]
test[decoder.decode() is None] # The message should have been consumed by the first decode.

# Synchronization to message start is performed upon decode.
# It doesn't matter if there is junk between messages (the junk is discarded).
bio = BytesIO()
bio.write(encodemsg(b"hello world"))
bio.write(b"junk junk junk")
bio.write(encodemsg(b"hello again"))
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
assert decoder.decode() == b"hello world"
assert decoder.decode() == b"hello again"
assert decoder.decode() is None
# Decoding a message gets a whole message and only that message.
with testset("decode robustness"):
bio = BytesIO()
bio.write(message)
bio.write(b"junk junk junk")
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
test[decoder.decode() == b"hello world"]
test[decoder.decode() is None]

# Junk containing sync bytes (0xFF) does not confuse or hang the decoder.
bio = BytesIO()
bio.write(encodemsg(b"hello world"))
bio.write(b"\xff" * 10)
bio.write(encodemsg(b"hello again"))
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
assert decoder.decode() == b"hello world"
assert decoder.decode() == b"hello again"
assert decoder.decode() is None
# - Messages are received in order.
# - Any leftover bytes already read into the receive buffer by the previous decode
# are consumed *from the buffer* by the next decode. This guarantees it doesn't
# matter if the transport does not honor message boundaries (which is indeed the
# whole point of having this protocol).
# - Note this means that should you wish to stop receiving messages on a particular
# source, and resume reading a raw stream from it instead, you must manually prepend
# the final contents of the receive buffer (`decoder.get_buffered_data()`) to whatever
# data you later receive from that source (since that data has already been placed
# into the receive buffer, so it is no longer available at the source).
# - So it's recommended to have a dedicated channel to communicate using messages,
# e.g. a dedicated TCP connection on which all communication is done with messages.
# This way you don't need to care about the receive buffer.
with testset("message ordering"):
bio = BytesIO()
bio.write(encodemsg(b"hello world"))
bio.write(encodemsg(b"hello again"))
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
test[decoder.decode() == b"hello world"]
test[decoder.decode() == b"hello again"]
test[decoder.decode() is None]

# Use with TCP sockets.
# Synchronization to message start is performed upon decode.
# It doesn't matter if there is junk between messages (the junk is discarded).
with testset("stream synchronization"):
bio = BytesIO()
bio.write(encodemsg(b"hello world"))
bio.write(b"junk junk junk")
bio.write(encodemsg(b"hello again"))
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
test[decoder.decode() == b"hello world"]
test[decoder.decode() == b"hello again"]
test[decoder.decode() is None]

def server1(sock):
decoder = MessageDecoder(socketsource(sock))
data = decoder.decode()
return data
def client1(sock):
sock.sendall(encodemsg(b"hello world"))
assert nettest(server1, client1) == b"hello world"
# Junk containing sync bytes (0xFF) does not confuse or hang the decoder.
with testset("junk containing sync bytes"):
bio = BytesIO()
bio.write(encodemsg(b"hello world"))
bio.write(b"\xff" * 10)
bio.write(encodemsg(b"hello again"))
bio.seek(0, SEEK_SET)
decoder = MessageDecoder(streamsource(bio))
test[decoder.decode() == b"hello world"]
test[decoder.decode() == b"hello again"]
test[decoder.decode() is None]

def server2(sock):
decoder = MessageDecoder(socketsource(sock))
data = decoder.decode()
return data
def client2(sock):
sock.sendall(encodemsg(b"hello world"))
sock.sendall(encodemsg(b"hello again"))
assert nettest(server2, client2) == b"hello world"
with testset("with TCP sockets"):
def server1(sock):
decoder = MessageDecoder(socketsource(sock))
data = decoder.decode()
return data
def client1(sock):
sock.sendall(encodemsg(b"hello world"))
test[nettest(server1, client1) == b"hello world"]

def server3(sock):
decoder = MessageDecoder(socketsource(sock))
data = []
data.append(decoder.decode())
data.append(decoder.decode())
return data
def client3(sock):
sock.sendall(encodemsg(b"hello world"))
sock.sendall(encodemsg(b"hello again"))
assert nettest(server3, client3) == [b"hello world", b"hello again"]
def server2(sock):
decoder = MessageDecoder(socketsource(sock))
data = decoder.decode()
return data
def client2(sock):
sock.sendall(encodemsg(b"hello world"))
sock.sendall(encodemsg(b"hello again"))
test[nettest(server2, client2) == b"hello world"]

print("All tests PASSED")
def server3(sock):
decoder = MessageDecoder(socketsource(sock))
data = []
data.append(decoder.decode())
data.append(decoder.decode())
return data
def client3(sock):
sock.sendall(encodemsg(b"hello world"))
sock.sendall(encodemsg(b"hello again"))
test[nettest(server3, client3) == [b"hello world", b"hello again"]]

if __name__ == '__main__':
runtests()
17 changes: 11 additions & 6 deletions unpythonic/net/test/test_util.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,22 @@
# -*- coding: utf-8; -*-

from ...syntax import macros, test # noqa: F401
from ...test.fixtures import testset

from .fixtures import nettest

from ..util import recvall, netstringify

def runtests():
assert netstringify(b"hello world") == b"11:hello world,"

server = lambda sock: recvall(1024, sock)
client = lambda sock: [sock.sendall(b"x" * 512), sock.sendall(b"x" * 512)]
assert len(nettest(server, client)) == 1024
# TODO: As of MacroPy 1.1.0b2, this test module crashes at macro expansion time
# TODO: due to a MacroPy bug involving `bytes` literals.
# TODO: https://github.com/azazel75/macropy/issues/26
with testset("unpythonic.net.util"):
test[netstringify(b"hello world") == b"11:hello world,"]

print("All tests PASSED")
server = lambda sock: recvall(1024, sock)
client = lambda sock: [sock.sendall(b"x" * 512), sock.sendall(b"x" * 512)]
test[len(nettest(server, client)) == 1024]

if __name__ == '__main__':
runtests()

0 comments on commit 488b09b

Please sign in to comment.