Skip to content

Commit

Permalink
[CI/Build] Fix lint errors in mistral tokenizer (vllm-project#9504)
Browse files Browse the repository at this point in the history
Signed-off-by: charlifu <[email protected]>
  • Loading branch information
DarkLight1337 authored and charlifu committed Oct 23, 2024
1 parent 45f3000 commit 93333e7
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions vllm/transformers_utils/tokenizers/mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
import re
from dataclasses import dataclass
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast

from huggingface_hub import HfApi, hf_hub_download
from mistral_common.protocol.instruct.request import ChatCompletionRequest
# yapf: disable
from mistral_common.tokens.tokenizers.mistral import ChatCompletionRequest
from mistral_common.tokens.tokenizers.mistral import (
MistralTokenizer as PublicMistralTokenizer)
# yapf: enable
Expand Down Expand Up @@ -166,7 +166,7 @@ def apply_chat_template(self,
tools: Optional[Dict[str, Any]] = None,
**kwargs) -> List[int]:

last_message = messages[-1]
last_message = cast(Dict[str, Any], messages[-1])
if last_message["role"] == "assistant":
last_message["prefix"] = True

Expand Down

0 comments on commit 93333e7

Please sign in to comment.