From cd5f16cfc6cdaea8bd7acd4f5ffbc0b67fdada5e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 15 Dec 2024 07:11:32 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- integration_tests/base_routes.py | 84 ++++++++----------- integration_tests/test_streaming_responses.py | 27 +++--- 2 files changed, 43 insertions(+), 68 deletions(-) diff --git a/integration_tests/base_routes.py b/integration_tests/base_routes.py index e441d12f..297f0949 100644 --- a/integration_tests/base_routes.py +++ b/integration_tests/base_routes.py @@ -1084,58 +1084,53 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam # --- Streaming responses --- + @app.get("/stream/sync") def sync_stream(): def number_generator(): for i in range(5): yield f"Chunk {i}\n".encode() - - return Response( - status_code=200, - headers={"Content-Type": "text/plain"}, - description=number_generator() - ) + + return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=number_generator()) + @app.get("/stream/async") async def async_stream(): async def async_generator(): import asyncio + for i in range(5): await asyncio.sleep(1) # Simulate async work yield f"Async Chunk {i}\n".encode() - - return Response( - status_code=200, - headers={"Content-Type": "text/plain"}, - description=async_generator() - ) + + return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=async_generator()) + @app.get("/stream/mixed") async def mixed_stream(): async def mixed_generator(): import asyncio + # Binary data yield b"Binary chunk\n" await asyncio.sleep(0.5) - + # String data yield "String chunk\n".encode() await asyncio.sleep(0.5) - + # Integer data yield str(42).encode() + b"\n" await asyncio.sleep(0.5) - + # JSON data import json + data = {"message": "JSON chunk", "number": 123} yield json.dumps(data).encode() + b"\n" - - return Response( - status_code=200, - headers={"Content-Type": "text/plain"}, - description=mixed_generator() - ) + + return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=mixed_generator()) + @app.get("/stream/events") async def server_sent_events(): @@ -1143,73 +1138,60 @@ async def event_generator(): import asyncio import json import time - + # Regular event yield f"event: message\ndata: {json.dumps({'time': time.time(), 'type': 'start'})}\n\n".encode() await asyncio.sleep(1) - + # Event with ID yield f"id: 1\nevent: update\ndata: {json.dumps({'progress': 50})}\n\n".encode() await asyncio.sleep(1) - + # Multiple data lines - data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2) + data = json.dumps({"status": "complete", "results": [1, 2, 3]}, indent=2) yield f"event: complete\ndata: {data}\n\n".encode() - + return Response( - status_code=200, - headers={ - "Content-Type": "text/event-stream", - "Cache-Control": "no-cache", - "Connection": "keep-alive" - }, - description=event_generator() + status_code=200, headers={"Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive"}, description=event_generator() ) + @app.get("/stream/large-file") async def stream_large_file(): async def file_generator(): # Simulate streaming a large file in chunks chunk_size = 1024 # 1KB chunks total_size = 10 * chunk_size # 10KB total - + for offset in range(0, total_size, chunk_size): # Simulate reading file chunk chunk = b"X" * min(chunk_size, total_size - offset) yield chunk - + return Response( status_code=200, - headers={ - "Content-Type": "application/octet-stream", - "Content-Disposition": "attachment; filename=large-file.bin" - }, - description=file_generator() + headers={"Content-Type": "application/octet-stream", "Content-Disposition": "attachment; filename=large-file.bin"}, + description=file_generator(), ) + @app.get("/stream/csv") async def stream_csv(): async def csv_generator(): # CSV header yield "id,name,value\n".encode() - + import asyncio import random - + # Generate rows for i in range(5): await asyncio.sleep(0.5) # Simulate data processing row = f"{i},item-{i},{random.randint(1, 100)}\n" yield row.encode() - - return Response( - status_code=200, - headers={ - "Content-Type": "text/csv", - "Content-Disposition": "attachment; filename=data.csv" - }, - description=csv_generator() - ) + + return Response(status_code=200, headers={"Content-Type": "text/csv", "Content-Disposition": "attachment; filename=data.csv"}, description=csv_generator()) + def main(): app.set_response_header("server", "robyn") diff --git a/integration_tests/test_streaming_responses.py b/integration_tests/test_streaming_responses.py index 3ba872a5..ac93d81d 100644 --- a/integration_tests/test_streaming_responses.py +++ b/integration_tests/test_streaming_responses.py @@ -14,15 +14,13 @@ import json import pytest -from robyn import Robyn -from robyn.robyn import Request from integration_tests.base_routes import app @pytest.mark.asyncio async def test_sync_stream(): """Test basic synchronous streaming response. - + Verifies that: 1. Response has correct content type 2. Chunks are received in correct order @@ -45,7 +43,7 @@ async def test_sync_stream(): @pytest.mark.asyncio async def test_async_stream(): """Test asynchronous streaming response. - + Verifies that: 1. Response has correct content type 2. Chunks are received in correct order with delays @@ -68,7 +66,7 @@ async def test_async_stream(): @pytest.mark.asyncio async def test_mixed_stream(): """Test streaming of mixed content types. - + Verifies that: 1. Response handles different content types: - Binary data @@ -82,12 +80,7 @@ async def test_mixed_stream(): assert response.status_code == 200 assert response.headers["Content-Type"] == "text/plain" - expected = [ - b"Binary chunk\n", - b"String chunk\n", - b"42\n", - json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n" - ] + expected = [b"Binary chunk\n", b"String chunk\n", b"42\n", json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"] chunks = [] async for chunk in response.content: @@ -101,7 +94,7 @@ async def test_mixed_stream(): @pytest.mark.asyncio async def test_server_sent_events(): """Test Server-Sent Events (SSE) streaming. - + Verifies that: 1. Response has correct SSE headers 2. Events are properly formatted with: @@ -143,7 +136,7 @@ async def test_server_sent_events(): @pytest.mark.asyncio async def test_large_file_stream(): """Test streaming of large files in chunks. - + Verifies that: 1. Response has correct headers for file download 2. Content is streamed in correct chunk sizes @@ -166,7 +159,7 @@ async def test_large_file_stream(): @pytest.mark.asyncio async def test_csv_stream(): """Test streaming of CSV data. - + Verifies that: 1. Response has correct CSV headers 2. CSV content is properly formatted @@ -184,11 +177,11 @@ async def test_csv_stream(): # Verify header assert lines[0] == "id,name,value" - + # Verify data rows assert len(lines) == 6 # Header + 5 data rows for i, line in enumerate(lines[1:], 0): - id_, name, value = line.split(',') + id_, name, value = line.split(",") assert int(id_) == i assert name == f"item-{i}" - assert 1 <= int(value) <= 100 \ No newline at end of file + assert 1 <= int(value) <= 100