From 502c048b77a050d6d69029fc9bb31c49714f5296 Mon Sep 17 00:00:00 2001 From: Sam Stoelinga Date: Fri, 25 Oct 2024 22:05:47 -0700 Subject: [PATCH] [Bugfix] Steaming continuous_usage_stats default to False (#9709) Signed-off-by: Sam Stoelinga Signed-off-by: Maxime Fournioux <55544262+mfournioux@users.noreply.github.com> --- vllm/entrypoints/openai/protocol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vllm/entrypoints/openai/protocol.py b/vllm/entrypoints/openai/protocol.py index 733decf80a711..a212c0d608ddb 100644 --- a/vllm/entrypoints/openai/protocol.py +++ b/vllm/entrypoints/openai/protocol.py @@ -127,7 +127,7 @@ class ResponseFormat(OpenAIBaseModel): class StreamOptions(OpenAIBaseModel): include_usage: Optional[bool] = True - continuous_usage_stats: Optional[bool] = True + continuous_usage_stats: Optional[bool] = False class FunctionDefinition(OpenAIBaseModel):