Skip to content

Commit

Permalink
feat: Run AWEL flow with interactive mode
Browse files Browse the repository at this point in the history
  • Loading branch information
fangyinc committed Mar 27, 2024
1 parent f176d37 commit 0cd477a
Show file tree
Hide file tree
Showing 8 changed files with 171 additions and 92 deletions.
123 changes: 87 additions & 36 deletions dbgpt/client/_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import functools
import json
import time
import uuid
from typing import Any, Dict

import click
Expand Down Expand Up @@ -107,6 +108,15 @@ def add_chat_options(func):
required=False,
help=_("The extra json data to run AWEL flow."),
)
@click.option(
"-i",
"--interactive",
type=bool,
default=False,
required=False,
is_flag=True,
help=_("Whether use interactive mode to run AWEL flow"),
)
@functools.wraps(func)
def _wrapper(*args, **kwargs):
return func(*args, **kwargs)
Expand All @@ -117,7 +127,7 @@ def _wrapper(*args, **kwargs):
@click.command(name="flow")
@add_base_flow_options
@add_chat_options
def run_flow(name: str, uid: str, data: str, **kwargs):
def run_flow(name: str, uid: str, data: str, interactive: bool, **kwargs):
"""Run a AWEL flow."""
client = Client()

Expand All @@ -134,9 +144,9 @@ def run_flow(name: str, uid: str, data: str, **kwargs):
json_data["chat_mode"] = "chat_flow"
stream = "stream" in json_data and str(json_data["stream"]).lower() in ["true", "1"]
if stream:
loop.run_until_complete(_chat_stream(client, json_data))
loop.run_until_complete(_chat_stream(client, interactive, json_data))
else:
loop.run_until_complete(_chat(client, json_data))
loop.run_until_complete(_chat(client, interactive, json_data))


def _parse_json_data(data: str, **kwargs):
Expand All @@ -160,36 +170,77 @@ def _parse_json_data(data: str, **kwargs):
return json_data


async def _chat_stream(client: Client, json_data: Dict[str, Any]):
start_time = time.time()
try:
cl.info("Chat stream started")
cl.info(f"JSON data: {json.dumps(json_data, ensure_ascii=False)}")
full_text = ""
async for out in client.chat_stream(**json_data):
if out.choices:
text = out.choices[0].delta.content
if text:
full_text += text
cl.print(text, end="")
end_time = time.time()
time_cost = round(end_time - start_time, 2)

cl.success(f"\n:tada: Chat stream finished, timecost: {time_cost} s")
except Exception as e:
cl.error(f"Chat stream failed: {e}", exit_code=1)


async def _chat(client: Client, json_data: Dict[str, Any]):
start_time = time.time()
try:
cl.info("Chat started")
cl.info(f"JSON data: {json.dumps(json_data, ensure_ascii=False)}")
res = await client.chat(**json_data)
if res.choices:
text = res.choices[0].message.content
cl.markdown(text)
time_cost = round(time.time() - start_time, 2)
cl.success(f"\n:tada: Chat stream finished, timecost: {time_cost} s")
except Exception as e:
cl.error(f"Chat failed: {e}", exit_code=1)
async def _chat_stream(client: Client, interactive: bool, json_data: Dict[str, Any]):
user_input = json_data.get("messages", "")
if "conv_uid" not in json_data and interactive:
json_data["conv_uid"] = str(uuid.uuid4())
first_message = True
while True:
try:
if interactive and not user_input:
cl.print("Type 'exit' or 'quit' to exit.")
while not user_input:
user_input = cl.ask("You")
if user_input.lower() in ["exit", "quit", "q"]:
break
start_time = time.time()
json_data["messages"] = user_input
if first_message:
cl.info("You: " + user_input)
cl.info("Chat stream started")
cl.debug(f"JSON data: {json.dumps(json_data, ensure_ascii=False)}")
full_text = ""
cl.print("Bot: ")
async for out in client.chat_stream(**json_data):
if out.choices:
text = out.choices[0].delta.content
if text:
full_text += text
cl.print(text, end="")
end_time = time.time()
time_cost = round(end_time - start_time, 2)
cl.success(f"\n:tada: Chat stream finished, timecost: {time_cost} s")
except Exception as e:
cl.error(f"Chat stream failed: {e}", exit_code=1)
finally:
first_message = False
if interactive:
user_input = ""
else:
break


async def _chat(client: Client, interactive: bool, json_data: Dict[str, Any]):
user_input = json_data.get("messages", "")
if "conv_uid" not in json_data and interactive:
json_data["conv_uid"] = str(uuid.uuid4())
first_message = True
while True:
try:
if interactive and not user_input:
cl.print("Type 'exit' or 'quit' to exit.")
while not user_input:
user_input = cl.ask("You")
if user_input.lower() in ["exit", "quit", "q"]:
break
start_time = time.time()
json_data["messages"] = user_input
if first_message:
cl.info("You: " + user_input)

cl.info("Chat started")
cl.debug(f"JSON data: {json.dumps(json_data, ensure_ascii=False)}")
res = await client.chat(**json_data)
if res.choices:
text = res.choices[0].message.content
cl.markdown(text)
time_cost = round(time.time() - start_time, 2)
cl.success(f"\n:tada: Chat stream finished, timecost: {time_cost} s")
except Exception as e:
cl.error(f"Chat failed: {e}", exit_code=1)
finally:
first_message = False
if interactive:
user_input = ""
else:
break
4 changes: 4 additions & 0 deletions dbgpt/util/console/console.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from rich.console import Console
from rich.markdown import Markdown
from rich.prompt import Prompt
from rich.theme import Theme


Expand Down Expand Up @@ -65,3 +66,6 @@ def print(self, *objects: Any, sep: str = " ", end: str = "\n", **kwargs):
def markdown(self, msg: str, **kwargs):
md = Markdown(msg)
self.console.print(md, **kwargs)

def ask(self, msg: str, **kwargs):
return Prompt.ask(msg, **kwargs)
27 changes: 16 additions & 11 deletions i18n/locales/fr/LC_MESSAGES/dbgpt_client.po
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-27 03:37+0800\n"
"POT-Creation-Date: 2024-03-27 09:43+0800\n"
"PO-Revision-Date: 2024-03-27 03:21+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
Expand All @@ -18,54 +18,59 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\n"

#: ../dbgpt/client/_cli.py:29
#: ../dbgpt/client/_cli.py:30
#, fuzzy
msgid "The name of the AWEL flow"
msgstr "Le nom du flux"

#: ../dbgpt/client/_cli.py:36
#: ../dbgpt/client/_cli.py:37
#, fuzzy
msgid "The uid of the AWEL flow"
msgstr "L'UID du flux"

#: ../dbgpt/client/_cli.py:54
#: ../dbgpt/client/_cli.py:55
#, fuzzy
msgid "The messages to run AWEL flow"
msgstr "Les messages du flux"

#: ../dbgpt/client/_cli.py:61
#: ../dbgpt/client/_cli.py:62
#, fuzzy
msgid "The model name of AWEL flow"
msgstr "Le modèle du flux"

#: ../dbgpt/client/_cli.py:70
#: ../dbgpt/client/_cli.py:71
msgid "Whether use stream mode to run AWEL flow"
msgstr ""

#: ../dbgpt/client/_cli.py:78
#: ../dbgpt/client/_cli.py:79
#, fuzzy
msgid "The temperature to run AWEL flow"
msgstr "La température du flux"

#: ../dbgpt/client/_cli.py:85
#: ../dbgpt/client/_cli.py:86
#, fuzzy
msgid "The max new tokens to run AWEL flow"
msgstr "Le nombre maximal de nouveaux tokens du flux"

#: ../dbgpt/client/_cli.py:92
#: ../dbgpt/client/_cli.py:93
#, fuzzy
msgid "The conversation id of the AWEL flow"
msgstr "L'identifiant de conversation du flux"

#: ../dbgpt/client/_cli.py:100
#: ../dbgpt/client/_cli.py:101
msgid "The json data to run AWEL flow, if set, will overwrite other options"
msgstr ""

#: ../dbgpt/client/_cli.py:108
#: ../dbgpt/client/_cli.py:109
#, fuzzy
msgid "The extra json data to run AWEL flow."
msgstr "Les données JSON supplémentaires du flux"

#: ../dbgpt/client/_cli.py:118
#, fuzzy
msgid "Whether use interactive mode to run AWEL flow"
msgstr "La température du flux"

#~ msgid "Whether to stream the flow, default is False"
#~ msgstr "Indique si le flux doit être diffusé en continu, par défaut False"

Expand Down
27 changes: 16 additions & 11 deletions i18n/locales/ja/LC_MESSAGES/dbgpt_client.po
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-03-27 03:37+0800\n"
"POT-Creation-Date: 2024-03-27 09:43+0800\n"
"PO-Revision-Date: 2024-03-27 03:21+0800\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
Expand All @@ -18,54 +18,59 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=1; plural=0;\n"

#: ../dbgpt/client/_cli.py:29
#: ../dbgpt/client/_cli.py:30
#, fuzzy
msgid "The name of the AWEL flow"
msgstr "フローの名前"

#: ../dbgpt/client/_cli.py:36
#: ../dbgpt/client/_cli.py:37
#, fuzzy
msgid "The uid of the AWEL flow"
msgstr "フローのUID"

#: ../dbgpt/client/_cli.py:54
#: ../dbgpt/client/_cli.py:55
#, fuzzy
msgid "The messages to run AWEL flow"
msgstr "フローのメッセージ"

#: ../dbgpt/client/_cli.py:61
#: ../dbgpt/client/_cli.py:62
#, fuzzy
msgid "The model name of AWEL flow"
msgstr "フローのモデル"

#: ../dbgpt/client/_cli.py:70
#: ../dbgpt/client/_cli.py:71
msgid "Whether use stream mode to run AWEL flow"
msgstr ""

#: ../dbgpt/client/_cli.py:78
#: ../dbgpt/client/_cli.py:79
#, fuzzy
msgid "The temperature to run AWEL flow"
msgstr "フローの温度"

#: ../dbgpt/client/_cli.py:85
#: ../dbgpt/client/_cli.py:86
#, fuzzy
msgid "The max new tokens to run AWEL flow"
msgstr "フローの最大新トークン数"

#: ../dbgpt/client/_cli.py:92
#: ../dbgpt/client/_cli.py:93
#, fuzzy
msgid "The conversation id of the AWEL flow"
msgstr "フローの会話ID"

#: ../dbgpt/client/_cli.py:100
#: ../dbgpt/client/_cli.py:101
msgid "The json data to run AWEL flow, if set, will overwrite other options"
msgstr ""

#: ../dbgpt/client/_cli.py:108
#: ../dbgpt/client/_cli.py:109
#, fuzzy
msgid "The extra json data to run AWEL flow."
msgstr "フローの追加のJSONデータ"

#: ../dbgpt/client/_cli.py:118
#, fuzzy
msgid "Whether use interactive mode to run AWEL flow"
msgstr "フローの温度"

#~ msgid "Whether to stream the flow, default is False"
#~ msgstr "フローをストリーミングするかどうか、デフォルトはFalse"

Expand Down
Loading

0 comments on commit 0cd477a

Please sign in to comment.