Skip to content

Commit

Permalink
[Bugfix] Fix xgrammar failing to read a vocab_size from LlavaConfig o…
Browse files Browse the repository at this point in the history
…n PixtralHF. (vllm-project#11043)
  • Loading branch information
sjuxax authored Dec 10, 2024
1 parent ebf7780 commit e35879c
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions vllm/model_executor/guided_decoding/xgrammar_decoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def from_guided_params(cls,
else:
json_str = guided_params.json
return cls(json_str=json_str,
vocab_size=model_config.hf_config.vocab_size,
vocab_size=model_config.hf_text_config.vocab_size,
encoded_vocab=encoded_vocab,
stop_token_ids=stop_token_ids,
backend_str=backend_str,
Expand All @@ -168,15 +168,15 @@ def from_guided_params(cls,
else:
grammar_str = guided_params.grammar
return cls(grammar_str=grammar_str,
vocab_size=model_config.hf_config.vocab_size,
vocab_size=model_config.hf_text_config.vocab_size,
encoded_vocab=encoded_vocab,
stop_token_ids=stop_token_ids,
backend_str=backend_str,
tokenizer_hash=tokenizer_hash,
max_threads=max_threads)
elif guided_params.json_object:
return cls(json_object=True,
vocab_size=model_config.hf_config.vocab_size,
vocab_size=model_config.hf_text_config.vocab_size,
encoded_vocab=encoded_vocab,
stop_token_ids=stop_token_ids,
backend_str=backend_str,
Expand Down

0 comments on commit e35879c

Please sign in to comment.