Skip to content

Commit

Permalink
default batch size fix
Browse files Browse the repository at this point in the history
  • Loading branch information
nekufa committed Dec 6, 2023
1 parent 1e268d1 commit 947b421
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ class NotifyHandler(Handler):
# users = await UserRepository.find_all([r.user_id for r in requests])
# await mailer.send_all([construct_message(user) for user in users])

def batch_size(self) -> None:
def batch_size(self) -> Optional[int]:
'''
Optionally, you can ovveride global batch size per handler
'''
Expand Down
9 changes: 6 additions & 3 deletions sharded_queue/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ async def handle(self, *requests: T) -> None:
async def stop(self) -> None:
pass

def batch_size(self) -> int:
return self.settings.batch_size
def batch_size(self) -> Optional[int]:
pass


class Tube(NamedTuple):
Expand Down Expand Up @@ -231,7 +231,10 @@ async def process(self, tube: Tube, limit: Optional[int] = None) -> int:
if tube.handler is RecurrentHandler:
batch_size = self.settings.recurrent_tasks_limit
else:
batch_size = min(limit, instance.batch_size())
batch_size = instance.batch_size()
if not batch_size:
batch_size = self.settings.batch_size
batch_size = min(limit, batch_size)
processed = False
for pipe in pipes:
msgs = await storage.range(pipe, batch_size)
Expand Down

0 comments on commit 947b421

Please sign in to comment.