Skip to content

Commit

Permalink
optional handler batch size
Browse files Browse the repository at this point in the history
  • Loading branch information
nekufa committed Dec 6, 2023
1 parent 1b0b600 commit 1e268d1
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 9 deletions.
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,12 @@ class NotifyHandler(Handler):
'''
# users = await UserRepository.find_all([r.user_id for r in requests])
# await mailer.send_all([construct_message(user) for user in users])

def batch_size(self) -> None:
'''
Optionally, you can ovveride global batch size per handler
'''
return 128
```

## Usage example
Expand Down
15 changes: 6 additions & 9 deletions sharded_queue/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ async def handle(self, *requests: T) -> None:
async def stop(self) -> None:
pass

def batch_size(self) -> int:
return self.settings.batch_size


class Tube(NamedTuple):
handler: type[Handler]
Expand Down Expand Up @@ -180,12 +183,6 @@ async def acquire_tube(

return None

def page_size(self, limit: Optional[int] = None) -> int:
if limit is None:
return self.settings.batch_size

return min(limit, self.settings.batch_size)

async def loop(
self,
limit: Optional[int] = None,
Expand Down Expand Up @@ -232,12 +229,12 @@ async def process(self, tube: Tube, limit: Optional[int] = None) -> int:
limit is None or limit > processed_counter
):
if tube.handler is RecurrentHandler:
page_size = self.settings.recurrent_tasks_limit
batch_size = self.settings.recurrent_tasks_limit
else:
page_size = self.page_size(limit)
batch_size = min(limit, instance.batch_size())
processed = False
for pipe in pipes:
msgs = await storage.range(pipe, page_size)
msgs = await storage.range(pipe, batch_size)
if not len(msgs):
continue

Expand Down

0 comments on commit 1e268d1

Please sign in to comment.