Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Dec 20, 2024
1 parent be7538e commit af9e396
Show file tree
Hide file tree
Showing 7 changed files with 91 additions and 143 deletions.
84 changes: 29 additions & 55 deletions integration_tests/base_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -750,14 +750,9 @@ def handle_exception(error):
# Create a response with proper error handling
response = Response(
status_code=500,
headers=Headers({
"Content-Type": "text/plain",
"X-Error-Response": "true",
"global_after": "global_after_request",
"server": "robyn"
}),
headers=Headers({"Content-Type": "text/plain", "X-Error-Response": "true", "global_after": "global_after_request", "server": "robyn"}),
description=f"error msg: {error}".encode(),
streaming=False
streaming=False,
)
return response

Expand Down Expand Up @@ -1103,30 +1098,25 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam

# --- Streaming responses ---


@app.get("/stream/sync", streaming=True)
async def sync_stream():
def generator():
for i in range(5):
yield f"Chunk {i}\n".encode()

headers = Headers({"Content-Type": "text/plain"})
return Response(
status_code=200,
description=generator(),
headers=headers
)
return Response(status_code=200, description=generator(), headers=headers)


@app.get("/stream/async", streaming=True)
async def async_stream():
async def generator():
for i in range(5):
yield f"Async Chunk {i}\n".encode()

return Response(
status_code=200,
headers={"Content-Type": "text/plain"},
description=generator()
)

return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=generator())


@app.get("/stream/mixed", streaming=True)
async def mixed_stream():
Expand All @@ -1135,86 +1125,70 @@ async def generator():
yield "String chunk\n".encode()
yield str(42).encode() + b"\n"
yield json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"

return Response(
status_code=200,
headers={"Content-Type": "text/plain"},
description=generator()
)

return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=generator())


@app.get("/stream/events", streaming=True)
async def server_sent_events():
async def event_generator():
import asyncio
import json
import time

# Regular event
yield f"event: message\ndata: {json.dumps({'time': time.time(), 'type': 'start'})}\n\n".encode()
await asyncio.sleep(1)

# Event with ID
yield f"id: 1\nevent: update\ndata: {json.dumps({'progress': 50})}\n\n".encode()
await asyncio.sleep(1)

# Multiple data lines
data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
data = json.dumps({"status": "complete", "results": [1, 2, 3]}, indent=2)
yield f"event: complete\ndata: {data}\n\n".encode()

return Response(
status_code=200,
headers={
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Connection": "keep-alive"
},
description=event_generator()
status_code=200, headers={"Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive"}, description=event_generator()
)


@app.get("/stream/large-file", streaming=True)
async def stream_large_file():
async def file_generator():
# Simulate streaming a large file in chunks
chunk_size = 1024 # 1KB chunks
total_size = 10 * chunk_size # 10KB total

for offset in range(0, total_size, chunk_size):
# Simulate reading file chunk
chunk = b"X" * min(chunk_size, total_size - offset)
yield chunk

return Response(
status_code=200,
headers={
"Content-Type": "application/octet-stream",
"Content-Disposition": "attachment; filename=large-file.bin"
},
description=file_generator()
headers={"Content-Type": "application/octet-stream", "Content-Disposition": "attachment; filename=large-file.bin"},
description=file_generator(),
)


@app.get("/stream/csv", streaming=True)
async def stream_csv():
async def csv_generator():
# CSV header
yield "id,name,value\n".encode()

import asyncio
import random

# Generate rows
for i in range(5):
await asyncio.sleep(0.5) # Simulate data processing
row = f"{i},item-{i},{random.randint(1, 100)}\n"
yield row.encode()

return Response(
status_code=200,
headers={
"Content-Type": "text/csv",
"Content-Disposition": "attachment; filename=data.csv"
},
description=csv_generator()
)

return Response(status_code=200, headers={"Content-Type": "text/csv", "Content-Disposition": "attachment; filename=data.csv"}, description=csv_generator())


def main():
app.set_response_header("server", "robyn")
Expand Down
5 changes: 1 addition & 4 deletions integration_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,10 @@
from typing import List

import pytest
import pytest_asyncio
from robyn import Robyn
from integration_tests.base_routes import app

from integration_tests.helpers.network_helpers import get_network_host


def spawn_process(command: List[str]) -> subprocess.Popen:
if platform.system() == "Windows":
command[0] = "python"
Expand Down Expand Up @@ -129,4 +127,3 @@ def env_file():
env_path.unlink()
del os.environ["ROBYN_PORT"]
del os.environ["ROBYN_HOST"]

19 changes: 10 additions & 9 deletions integration_tests/test_streaming_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
# Mark all tests in this module as async
pytestmark = pytest.mark.asyncio


async def test_sync_stream():
"""Test basic synchronous streaming response."""
async with aiohttp.ClientSession() as client:
Expand All @@ -34,6 +35,7 @@ async def test_sync_stream():
for i, chunk in enumerate(chunks):
assert chunk == f"Chunk {i}\n"


async def test_async_stream():
"""Test asynchronous streaming response."""
async with aiohttp.ClientSession() as client:
Expand All @@ -49,19 +51,15 @@ async def test_async_stream():
for i, chunk in enumerate(chunks):
assert chunk == f"Async Chunk {i}\n"


async def test_mixed_stream():
"""Test streaming of mixed content types."""
async with aiohttp.ClientSession() as client:
async with client.get("http://127.0.0.1:8080/stream/mixed") as response:
assert response.status == 200
assert response.headers["Content-Type"] == "text/plain"

expected = [
b"Binary chunk\n",
b"String chunk\n",
b"42\n",
json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
]
expected = [b"Binary chunk\n", b"String chunk\n", b"42\n", json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"]

chunks = []
async for chunk in response.content:
Expand All @@ -71,6 +69,7 @@ async def test_mixed_stream():
for chunk, expected_chunk in zip(chunks, expected):
assert chunk == expected_chunk


async def test_server_sent_events():
"""Test Server-Sent Events (SSE) streaming."""
async with aiohttp.ClientSession() as client:
Expand Down Expand Up @@ -103,6 +102,7 @@ async def test_server_sent_events():
assert event_data["status"] == "complete"
assert event_data["results"] == [1, 2, 3]


async def test_large_file_stream():
"""Test streaming of large files in chunks."""
async with aiohttp.ClientSession() as client:
Expand All @@ -118,6 +118,7 @@ async def test_large_file_stream():

assert total_size == 10 * 1024 # 10KB total


async def test_csv_stream():
"""Test streaming of CSV data."""
async with aiohttp.ClientSession() as client:
Expand All @@ -132,11 +133,11 @@ async def test_csv_stream():

# Verify header
assert lines[0] == "id,name,value"

# Verify data rows
assert len(lines) == 6 # Header + 5 data rows
for i, line in enumerate(lines[1:], 0):
id_, name, value = line.split(',')
id_, name, value = line.split(",")
assert int(id_) == i
assert name == f"item-{i}"
assert 1 <= int(value) <= 100
assert 1 <= int(value) <= 100
Loading

0 comments on commit af9e396

Please sign in to comment.