From 7a9946eb256cd69fe6180851143fcc76b1634e89 Mon Sep 17 00:00:00 2001 From: Woosuk Kwon Date: Wed, 4 Sep 2024 13:05:50 -0700 Subject: [PATCH] [CI] Change test input in Gemma LoRA test (#8163) --- tests/lora/test_gemma.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/lora/test_gemma.py b/tests/lora/test_gemma.py index 58cac3156c9c1..f7c1d4f041c12 100644 --- a/tests/lora/test_gemma.py +++ b/tests/lora/test_gemma.py @@ -13,7 +13,7 @@ def do_sample(llm: vllm.LLM, lora_path: str, lora_id: int) -> List[str]: prompts = [ "Quote: Imagination is", "Quote: Be yourself;", - "Quote: So many books,", + "Quote: Painting is poetry that is seen rather than felt,", ] sampling_params = vllm.SamplingParams(temperature=0, max_tokens=32) outputs = llm.generate( @@ -41,7 +41,8 @@ def test_gemma_lora(gemma_lora_files): expected_lora_output = [ "more important than knowledge.\nAuthor: Albert Einstein\n", "everyone else is already taken.\nAuthor: Oscar Wilde\n", - "so little time\nAuthor: Frank Zappa\n", + "and poetry is painting that is felt rather than seen.\n" + "Author: Leonardo da Vinci\n", ] output1 = do_sample(llm, gemma_lora_files, lora_id=1)