WEB UI中对话功能已完成。目前很多参数还不支持,待API接口完善后再加入。
This commit is contained in:
parent
dc74bdab41
commit
4651b50176
6
webui.py
6
webui.py
|
|
@ -1,3 +1,9 @@
|
||||||
|
# 运行方式:
|
||||||
|
# 1. 安装必要的包:pip install streamlit-option-menu streamlit-chatbox>=1.1.3
|
||||||
|
# 2. 运行本机fastchat服务:python server\llm_api.py 或者 运行对应的sh文件
|
||||||
|
# 3. 运行API服务器:python server/api.py。如果使用api = ApiRequest(no_remote_api=True),该步可以跳过。
|
||||||
|
# 4. 运行WEB UI:streamlit run webui.py --server.port 7860
|
||||||
|
|
||||||
import streamlit as st
|
import streamlit as st
|
||||||
from webui_pages.utils import *
|
from webui_pages.utils import *
|
||||||
from streamlit_option_menu import option_menu
|
from streamlit_option_menu import option_menu
|
||||||
|
|
|
||||||
|
|
@ -2,37 +2,85 @@ import streamlit as st
|
||||||
from webui_pages.utils import *
|
from webui_pages.utils import *
|
||||||
from streamlit_chatbox import *
|
from streamlit_chatbox import *
|
||||||
|
|
||||||
chat_box = ChatBox()
|
|
||||||
|
|
||||||
|
|
||||||
def dialogue_page(api: ApiRequest):
|
def dialogue_page(api: ApiRequest):
|
||||||
|
chat_box = ChatBox(
|
||||||
|
greetings=[
|
||||||
|
f"欢迎使用 [`Langchain-Chatglm`](https://github.com/chatchat-space/langchain-chatglm) ! 当前使用模型`{LLM_MODEL}`, 您可以开始提问了.",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
with st.sidebar:
|
with st.sidebar:
|
||||||
|
def on_mode_change():
|
||||||
|
mode = st.session_state.dialogue_mode
|
||||||
|
text = f"已切换到 {mode} 模式。"
|
||||||
|
if mode == "知识库问答":
|
||||||
|
cur_kb = st.session_state.get("selected_kb")
|
||||||
|
if cur_kb:
|
||||||
|
text = f"{text} 当前知识库: `{cur_kb}`。"
|
||||||
|
chat_box.ai_say(text, not_render=True)
|
||||||
|
|
||||||
dialogue_mode = st.radio("请选择对话模式",
|
dialogue_mode = st.radio("请选择对话模式",
|
||||||
["LLM 对话",
|
["LLM 对话",
|
||||||
"知识库问答",
|
"知识库问答",
|
||||||
"Bing 搜索问答"])
|
"Bing 搜索问答",
|
||||||
history_len = st.slider("历史对话轮数:", 1, 10, 1)
|
"Duck 搜索问答",
|
||||||
|
],
|
||||||
|
on_change=on_mode_change,
|
||||||
|
key="dialogue_mode",
|
||||||
|
)
|
||||||
|
history_len = st.slider("历史对话轮数:", 1, 10, 1, disabled=True)
|
||||||
|
# todo: support history len
|
||||||
|
if st.button("清除历史对话"):
|
||||||
|
chat_box.reset_history()
|
||||||
|
|
||||||
|
def on_kb_change():
|
||||||
|
chat_box.ai_say(f"已加载知识库: {st.session_state.selected_kb}", not_render=True)
|
||||||
|
|
||||||
if dialogue_mode == "知识库问答":
|
if dialogue_mode == "知识库问答":
|
||||||
selected_kb = st.selectbox("请选择知识库:", get_kb_list())
|
kb_list = api.list_knowledge_bases()
|
||||||
with st.expander(f"{selected_kb} 中已存储文件"):
|
selected_kb = st.selectbox(
|
||||||
st.write(get_kb_files(selected_kb))
|
"请选择知识库:",
|
||||||
|
kb_list,
|
||||||
|
on_change=on_kb_change,
|
||||||
|
key="selected_kb",
|
||||||
|
)
|
||||||
|
top_k = st.slider("匹配知识条数:", 1, 20, 3, disabled=True)
|
||||||
|
score_threshold = st.slider("知识匹配分数阈值:", 0, 1000, 0, disabled=True)
|
||||||
|
chunk_content = st.checkbox("关联上下文", False, disabled=True)
|
||||||
|
chunk_size = st.slider("关联长度:", 0, 500, 250, disabled=True)
|
||||||
|
|
||||||
# Display chat messages from history on app rerun
|
# Display chat messages from history on app rerun
|
||||||
chat_box.output_messages()
|
chat_box.output_messages()
|
||||||
|
|
||||||
if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"):
|
if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"):
|
||||||
chat_box.user_say(prompt)
|
chat_box.user_say(prompt)
|
||||||
chat_box.ai_say("正在思考...")
|
if dialogue_mode == "LLM 对话":
|
||||||
# with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r:
|
chat_box.ai_say("正在思考...")
|
||||||
# todo: support history len
|
text = ""
|
||||||
text = ""
|
r = api.chat_chat(prompt, no_remote_api=True)
|
||||||
r = api.chat_chat(prompt, no_remote_api=True)
|
for t in r:
|
||||||
for t in r:
|
text += t
|
||||||
text += t
|
chat_box.update_msg(text)
|
||||||
chat_box.update_msg(text)
|
chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标
|
||||||
chat_box.update_msg(text, streaming=False)
|
elif dialogue_mode == "知识库问答":
|
||||||
# with api.chat_chat(prompt) as r:
|
chat_box.ai_say(f"正在查询知识库: `{selected_kb}` ...")
|
||||||
# for t in r.iter_text(None):
|
text = ""
|
||||||
# text += t
|
for t in api.knowledge_base_chat(prompt, selected_kb):
|
||||||
# chat_box.update_msg(text)
|
text += t
|
||||||
# chat_box.update_msg(text, streaming=False)
|
chat_box.update_msg(text)
|
||||||
|
chat_box.update_msg(text, streaming=False)
|
||||||
|
elif dialogue_mode == "Bing 搜索问答":
|
||||||
|
chat_box.ai_say("正在执行Bing搜索...")
|
||||||
|
text = ""
|
||||||
|
for t in api.bing_search_chat(prompt):
|
||||||
|
text += t
|
||||||
|
chat_box.update_msg(text)
|
||||||
|
chat_box.update_msg(text, streaming=False)
|
||||||
|
elif dialogue_mode == "Duck 搜索问答":
|
||||||
|
chat_box.ai_say("正在执行Duckduck搜索...")
|
||||||
|
text = ""
|
||||||
|
for t in api.duckduckgo_search_chat(prompt):
|
||||||
|
text += t
|
||||||
|
chat_box.update_msg(text)
|
||||||
|
chat_box.update_msg(text, streaming=False)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue