From f54c406d2ac7b1b5e7f9a79cc925b3510c2b3b65 Mon Sep 17 00:00:00 2001 From: nunjunj Date: Thu, 15 Aug 2024 21:52:31 +0700 Subject: [PATCH] fix lint --- vllm/entrypoints/llm.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/vllm/entrypoints/llm.py b/vllm/entrypoints/llm.py index 2ba37be8026aa..c199e5a844aa3 100644 --- a/vllm/entrypoints/llm.py +++ b/vllm/entrypoints/llm.py @@ -1,6 +1,5 @@ from contextlib import contextmanager -from typing import (ClassVar, List, Optional, Sequence, Union, cast, - overload) +from typing import ClassVar, List, Optional, Sequence, Union, cast, overload from tqdm import tqdm from transformers import PreTrainedTokenizer, PreTrainedTokenizerFast @@ -8,10 +7,9 @@ from vllm.engine.arg_utils import EngineArgs from vllm.engine.llm_engine import LLMEngine from vllm.entrypoints.chat_utils import (ChatCompletionMessageParam, - parse_chat_messages, - apply_chat_template) -from vllm.inputs import (PromptInputs, TextPrompt, TokensPrompt, - parse_and_batch_prompt) + apply_chat_template, + parse_chat_messages) +from vllm.inputs import (PromptInputs, TextPrompt, TokensPrompt) from vllm.inputs.parse import parse_and_batch_prompt from vllm.logger import init_logger from vllm.lora.request import LoRARequest