From 16ec256468a48834ac20be2ac23fc0aa3e84baa9 Mon Sep 17 00:00:00 2001 From: dmitry krokhin Date: Wed, 6 Dec 2023 15:32:35 +0300 Subject: [PATCH] batch size optional limit check --- sharded_queue/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sharded_queue/__init__.py b/sharded_queue/__init__.py index 22c8a32..634a51a 100644 --- a/sharded_queue/__init__.py +++ b/sharded_queue/__init__.py @@ -234,7 +234,8 @@ async def process(self, tube: Tube, limit: Optional[int] = None) -> int: batch_size = instance.batch_size() if batch_size is None: batch_size = self.settings.batch_size - batch_size = min(limit, batch_size) + if limit is not None: + batch_size = min(limit, batch_size) processed = False for pipe in pipes: msgs = await storage.range(pipe, batch_size)