forked from chatchat-space/Langchain-Chatchat
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathwebui.py
72 lines (63 loc) · 2.24 KB
/
webui.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# 运行方式:
# 1. 安装必要的包:pip install streamlit-option-menu streamlit-chatbox>=1.1.6
# 2. 运行本机fastchat服务:python server\llm_api.py 或者 运行对应的sh文件
# 3. 运行API服务器:python server/api.py。如果使用api = ApiRequest(no_remote_api=True),该步可以跳过。
# 4. 运行WEB UI:streamlit run webui.py --server.port 7860
import streamlit as st
from webui_pages.utils import *
from streamlit_option_menu import option_menu
from webui_pages import *
import os
from configs import VERSION
from server.utils import api_address
api = ApiRequest(base_url=api_address())
if __name__ == "__main__":
st.set_page_config(
"Langchain-Chatchat WebUI",
os.path.join("img", "chatchat_icon_blue_square_v2.png"),
initial_sidebar_state="expanded",
menu_items={
'Get Help': 'https://github.com/chatchat-space/Langchain-Chatchat',
'Report a bug': "https://github.com/chatchat-space/Langchain-Chatchat/issues",
'About': f"""欢迎使用 Langchain-Chatchat WebUI {VERSION}!"""
}
)
if not chat_box.chat_inited:
st.toast(
f"欢迎使用 [`Langchain-Chatchat`](https://github.com/chatchat-space/Langchain-Chatchat) ! \n\n"
f"当前使用模型`{LLM_MODEL}`, 您可以开始提问了."
)
pages = {
"对话": {
"icon": "chat",
"func": dialogue_page,
},
"知识库管理": {
"icon": "hdd-stack",
"func": knowledge_base_page,
},
}
with st.sidebar:
st.image(
os.path.join(
"img",
"logo-long-chatchat-trans-v2.png"
),
use_column_width=True
)
st.caption(
f"""<p align="right">当前版本:{VERSION}</p>""",
unsafe_allow_html=True,
)
options = list(pages)
icons = [x["icon"] for x in pages.values()]
default_index = 0
selected_page = option_menu(
"",
options=options,
icons=icons,
# menu_icon="chat-quote",
default_index=default_index,
)
if selected_page in pages:
pages[selected_page]["func"](api)