From c23e406f2db6451184787b3bbe39daf0102d1d66 Mon Sep 17 00:00:00 2001 From: kingbri <8082010+bdashore3@users.noreply.github.com> Date: Fri, 13 Dec 2024 01:02:37 -0500 Subject: [PATCH] Sampling: Add max_completion_tokens Conforms with OAI's updated spec Signed-off-by: kingbri <8082010+bdashore3@users.noreply.github.com> --- common/sampling.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/common/sampling.py b/common/sampling.py index c2b4c3c..7e5ded4 100644 --- a/common/sampling.py +++ b/common/sampling.py @@ -25,7 +25,9 @@ class BaseSamplerRequest(BaseModel): max_tokens: Optional[int] = Field( default_factory=lambda: get_default_sampler_value("max_tokens"), - validation_alias=AliasChoices("max_tokens", "max_length"), + validation_alias=AliasChoices( + "max_tokens", "max_completion_tokens", "max_length" + ), description="Aliases: max_length", examples=[150], ge=0,