Skip to content

Commit

Permalink
[Bugfix] fix logit processor excceed vocab size issue (#6927)
Browse files Browse the repository at this point in the history
  • Loading branch information
FeiDeng authored Jul 31, 2024
1 parent 533d193 commit c0644cf
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions vllm/entrypoints/openai/logits_processors.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@ def get_logits_processors(
"Found token_id in logit_bias that is not "
"an integer or string representing an integer") from exc

# Check if token_id is within the vocab size
for token_id, bias in clamped_logit_bias.items():
if token_id < 0 or token_id >= tokenizer.vocab_size:
raise ValueError("token_id in logit_bias contains "
"out-of-vocab token id")

def logit_bias_logits_processor(token_ids: List[int],
logits: torch.Tensor) -> torch.Tensor:
for token_id, bias in clamped_logit_bias.items():
Expand Down

0 comments on commit c0644cf

Please sign in to comment.