From c20eb85c34d1c375a164cfcfcae73291e7cfbd05 Mon Sep 17 00:00:00 2001 From: Fei Date: Wed, 31 Jul 2024 01:16:01 -0700 Subject: [PATCH] [Bugfix] fix logit processor excceed vocab size issue (#6927) --- vllm/entrypoints/openai/logits_processors.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/vllm/entrypoints/openai/logits_processors.py b/vllm/entrypoints/openai/logits_processors.py index 31eb5aa628c52..f8e04e7f18e0f 100644 --- a/vllm/entrypoints/openai/logits_processors.py +++ b/vllm/entrypoints/openai/logits_processors.py @@ -58,6 +58,12 @@ def get_logits_processors( "Found token_id in logit_bias that is not " "an integer or string representing an integer") from exc + # Check if token_id is within the vocab size + for token_id, bias in clamped_logit_bias.items(): + if token_id < 0 or token_id >= tokenizer.vocab_size: + raise ValueError("token_id in logit_bias contains " + "out-of-vocab token id") + def logit_bias_logits_processor(token_ids: List[int], logits: torch.Tensor) -> torch.Tensor: for token_id, bias in clamped_logit_bias.items():