From 360802762c80c43fba027e7e9818f8d9f1dc5145 Mon Sep 17 00:00:00 2001 From: kingbri Date: Thu, 22 Feb 2024 21:44:15 -0500 Subject: [PATCH] Model: Fix logit bias token checks Accidentally checked on the token bias tensor which didn't contain the token IDs. Check if the index exists on the id_to_piece list instead. Signed-off-by: kingbri --- backends/exllamav2/model.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/backends/exllamav2/model.py b/backends/exllamav2/model.py index 16293927..5575931a 100644 --- a/backends/exllamav2/model.py +++ b/backends/exllamav2/model.py @@ -749,12 +749,12 @@ def generate_gen(self, prompt: str, **kwargs): ) # Map logits to the tensor with their biases - for token, bias in logit_bias.items(): - if token in gen_settings.token_bias: - gen_settings.token_bias[token] = bias + for token_id, bias in logit_bias.items(): + if 0 <= token_id < len(self.tokenizer.id_to_piece): + gen_settings.token_bias[token_id] = bias else: logger.warning( - f"Logit bias: Token {token} not present " + f"Logit bias: Token {token_id} not present " "in the model's vocab. Skipping." )