From a363fef23b1d5070e9a784010daf5651c1eaea95 Mon Sep 17 00:00:00 2001 From: piEsposito Date: Sun, 7 Jul 2024 03:01:16 -0300 Subject: [PATCH] fix bug --- tiny_ai_client/models.py | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/tiny_ai_client/models.py b/tiny_ai_client/models.py index f4b318e..73c566a 100644 --- a/tiny_ai_client/models.py +++ b/tiny_ai_client/models.py @@ -85,25 +85,19 @@ def stream( if isinstance(images, PIL_Image.Image): images = [images] self.chat.append(Message(text=message, role="user", images=images)) + text = "" for chunk in self.client_wrapper.stream( max_new_tokens=max_new_tokens, temperature=temperature, chat=self.chat, timeout=timeout, ): + text += chunk yield chunk # After streaming, update the chat history self.chat.append( Message( - text="".join( - chunk - for chunk in self.client_wrapper.stream( - max_new_tokens=max_new_tokens, - temperature=temperature, - chat=self.chat, - timeout=timeout, - ) - ), + text=text, role="assistant", ) ) @@ -196,23 +190,16 @@ async def astream( if isinstance(images, PIL_Image.Image): images = [images] self.chat.append(Message(text=message, role="user", images=images)) + text = "" async for chunk in self.client_wrapper.astream( max_new_tokens=max_new_tokens, temperature=temperature, chat=self.chat, timeout=timeout, ): + text += chunk yield chunk - # After streaming, update the chat history - full_response = "" - async for chunk in self.client_wrapper.astream( - max_new_tokens=max_new_tokens, - temperature=temperature, - chat=self.chat, - timeout=timeout, - ): - full_response += chunk - self.chat.append(Message(text=full_response, role="assistant")) + self.chat.append(Message(text=text, role="assistant")) class ToolCall(BaseModel):