From f7c3e0f87dda1943903ba43e4e43b4eb1604f596 Mon Sep 17 00:00:00 2001 From: parkervg Date: Sun, 1 Sep 2024 18:35:09 -0400 Subject: [PATCH] max_tokens and stop_at should default to None --- blendsql/ingredients/generate.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/blendsql/ingredients/generate.py b/blendsql/ingredients/generate.py index 3c9d672e..1ec5b9fc 100644 --- a/blendsql/ingredients/generate.py +++ b/blendsql/ingredients/generate.py @@ -15,7 +15,11 @@ def generate(model: Model, *args, **kwargs) -> str: @generate.register(OpenaiLLM) def generate_openai( - model: OpenaiLLM, prompt, max_tokens: Optional[int], stop_at: List[str], **kwargs + model: OpenaiLLM, + prompt, + max_tokens: Optional[int] = None, + stop_at: Optional[List[str]] = None, + **kwargs, ) -> str: """This function only exists because of a bug in guidance https://github.com/guidance-ai/guidance/issues/881 @@ -36,7 +40,11 @@ def generate_openai( @generate.register(AnthropicLLM) def generate_anthropic( - model: AnthropicLLM, prompt, max_tokens: Optional[int], stop_at: List[str], **kwargs + model: AnthropicLLM, + prompt, + max_tokens: Optional[int] = None, + stop_at: Optional[List[str]] = None, + **kwargs, ): client = model.model_obj.engine.anthropic return (