update streamlit ui: support latest knowledge base and search engine
chat api; export messages to markdown.
This commit is contained in:
parent
27d49be706
commit
3318cef751
|
|
@ -18,5 +18,6 @@ unstructured[local-inference]
|
|||
|
||||
streamlit>=1.25.0
|
||||
streamlit-option-menu
|
||||
streamlit-chatbox>=1.1.0
|
||||
streamlit-antd-components
|
||||
streamlit-chatbox>=1.1.6
|
||||
httpx
|
||||
|
|
|
|||
9
webui.py
9
webui.py
|
|
@ -1,5 +1,5 @@
|
|||
# 运行方式:
|
||||
# 1. 安装必要的包:pip install streamlit-option-menu streamlit-chatbox>=1.1.4
|
||||
# 1. 安装必要的包:pip install streamlit-option-menu streamlit-chatbox>=1.1.6
|
||||
# 2. 运行本机fastchat服务:python server\llm_api.py 或者 运行对应的sh文件
|
||||
# 3. 运行API服务器:python server/api.py。如果使用api = ApiRequest(no_remote_api=True),该步可以跳过。
|
||||
# 4. 运行WEB UI:streamlit run webui.py --server.port 7860
|
||||
|
|
@ -9,11 +9,16 @@ from webui_pages.utils import *
|
|||
from streamlit_option_menu import option_menu
|
||||
from webui_pages import *
|
||||
|
||||
api = ApiRequest()
|
||||
|
||||
api = ApiRequest(base_url="http://127.0.0.1:7861", no_remote_api=False)
|
||||
|
||||
if __name__ == "__main__":
|
||||
st.set_page_config("langchain-chatglm WebUI", layout="wide")
|
||||
|
||||
if not chat_box.chat_inited:
|
||||
st.toast(f"欢迎使用 [`Langchain-Chatglm`](https://github.com/chatchat-space/langchain-chatglm) ! \n\n当前使用模型`{LLM_MODEL}`, 您可以开始提问了.")
|
||||
st.toast(" ")
|
||||
|
||||
# pages = {"对话1": {"icon": "chat",
|
||||
# "func": dialogue_page,
|
||||
# },
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
from .dialogue import dialogue_page
|
||||
from .dialogue import dialogue_page, chat_box
|
||||
from .knowledge_base import knowledge_base_page
|
||||
from .model_config import model_config_page
|
||||
|
|
@ -1 +1 @@
|
|||
from .dialogue import dialogue_page
|
||||
from .dialogue import dialogue_page, chat_box
|
||||
|
|
@ -1,41 +1,18 @@
|
|||
import streamlit as st
|
||||
from webui_pages.utils import *
|
||||
from streamlit_chatbox import *
|
||||
from datetime import datetime
|
||||
import streamlit_antd_components as sac
|
||||
from server.chat.search_engine_chat import SEARCH_ENGINES
|
||||
|
||||
|
||||
chat_box = ChatBox(
|
||||
greetings=[
|
||||
f"欢迎使用 [`Langchain-Chatglm`](https://github.com/chatchat-space/langchain-chatglm) ! 当前使用模型`{LLM_MODEL}`, 您可以开始提问了.",
|
||||
]
|
||||
)
|
||||
chat_box = ChatBox()
|
||||
|
||||
def dialogue_page(api: ApiRequest):
|
||||
chat_box.init_session()
|
||||
|
||||
with st.sidebar:
|
||||
def on_mode_change():
|
||||
mode = st.session_state.dialogue_mode
|
||||
text = f"已切换到 {mode} 模式。"
|
||||
if mode == "知识库问答":
|
||||
cur_kb = st.session_state.get("selected_kb")
|
||||
if cur_kb:
|
||||
text = f"{text} 当前知识库: `{cur_kb}`。"
|
||||
chat_box.ai_say(text, not_render=True)
|
||||
|
||||
dialogue_mode = st.radio("请选择对话模式",
|
||||
["LLM 对话",
|
||||
"知识库问答",
|
||||
"Bing 搜索问答",
|
||||
"Duck 搜索问答",
|
||||
],
|
||||
on_change=on_mode_change,
|
||||
key="dialogue_mode",
|
||||
)
|
||||
history_len = st.slider("历史对话轮数:", 1, 10, 1, disabled=True)
|
||||
# todo: support history len
|
||||
with st.expander("会话管理", True):
|
||||
if st.button("清除历史对话内容"):
|
||||
chat_box.reset_history()
|
||||
|
||||
col_input, col_btn = st.columns(2)
|
||||
new_chat_name = col_input.text_input(
|
||||
"新会话名称",
|
||||
|
|
@ -51,33 +28,58 @@ def dialogue_page(api: ApiRequest):
|
|||
st.session_state.new_chat_name = ""
|
||||
col_btn.button("新建会话", on_click=on_btn_new_chat)
|
||||
|
||||
cols = st.columns(2)
|
||||
chat_list = chat_box.get_chat_names()
|
||||
try:
|
||||
index = chat_list.index(chat_box.cur_chat_name)
|
||||
except:
|
||||
index = 0
|
||||
cur_chat_name = cols[0].selectbox("当前会话:", chat_list, index, label_visibility="collapsed")
|
||||
cur_chat_name = sac.buttons(chat_list, 0)
|
||||
chat_box.use_chat_name(cur_chat_name)
|
||||
if cols[1].button("清除会话"):
|
||||
chat_box.del_chat_name(cur_chat_name)
|
||||
|
||||
cols = st.columns(3)
|
||||
export_btn = cols[0]
|
||||
if cols[1].button("Clear"):
|
||||
chat_box.reset_history()
|
||||
|
||||
if cols[2].button("Delete"):
|
||||
chat_box.del_chat_name(cur_chat_name, disabled=len(chat_list) <= 1)
|
||||
|
||||
def on_mode_change():
|
||||
mode = st.session_state.dialogue_mode
|
||||
text = f"已切换到 {mode} 模式。"
|
||||
if mode == "知识库问答":
|
||||
cur_kb = st.session_state.get("selected_kb")
|
||||
if cur_kb:
|
||||
text = f"{text} 当前知识库: `{cur_kb}`。"
|
||||
st.toast(text)
|
||||
# sac.alert(text, description="descp", type="success", closable=True, banner=True)
|
||||
|
||||
dialogue_mode = st.radio("请选择对话模式",
|
||||
["LLM 对话",
|
||||
"知识库问答",
|
||||
"搜索引擎问答",
|
||||
],
|
||||
on_change=on_mode_change,
|
||||
key="dialogue_mode",
|
||||
)
|
||||
history_len = st.slider("历史对话轮数:", 1, 10, 1, disabled=True)
|
||||
# todo: support history len
|
||||
|
||||
def on_kb_change():
|
||||
chat_box.ai_say(f"已加载知识库: {st.session_state.selected_kb}", not_render=True)
|
||||
st.toast(f"已加载知识库: {st.session_state.selected_kb}")
|
||||
|
||||
if dialogue_mode == "知识库问答":
|
||||
with st.expander("知识库配置", True):
|
||||
kb_list = api.list_knowledge_bases()
|
||||
kb_list = api.list_knowledge_bases(no_remote_api=True)
|
||||
selected_kb = st.selectbox(
|
||||
"请选择知识库:",
|
||||
kb_list,
|
||||
on_change=on_kb_change,
|
||||
key="selected_kb",
|
||||
)
|
||||
top_k = st.slider("匹配知识条数:", 1, 20, 3)
|
||||
kb_top_k = st.slider("匹配知识条数:", 1, 20, 3)
|
||||
score_threshold = st.slider("知识匹配分数阈值:", 0, 1000, 0, disabled=True)
|
||||
chunk_content = st.checkbox("关联上下文", False, disabled=True)
|
||||
chunk_size = st.slider("关联长度:", 0, 500, 250, disabled=True)
|
||||
elif dialogue_mode == "搜索引擎问答":
|
||||
search_engine = sac.buttons(SEARCH_ENGINES.keys(), 0)
|
||||
se_top_k = st.slider("匹配搜索结果条数:", 1, 20, 3)
|
||||
|
||||
# Display chat messages from history on app rerun
|
||||
chat_box.output_messages()
|
||||
|
|
@ -93,23 +95,32 @@ def dialogue_page(api: ApiRequest):
|
|||
chat_box.update_msg(text)
|
||||
chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标
|
||||
elif dialogue_mode == "知识库问答":
|
||||
chat_box.ai_say(f"正在查询知识库: `{selected_kb}` ...")
|
||||
chat_box.ai_say([
|
||||
f"正在查询知识库: `{selected_kb}` ...",
|
||||
Markdown("...", in_expander=True, title="知识库匹配结果"),
|
||||
])
|
||||
text = ""
|
||||
for t in api.knowledge_base_chat(prompt, selected_kb, top_k):
|
||||
text += t
|
||||
chat_box.update_msg(text)
|
||||
chat_box.update_msg(text, streaming=False)
|
||||
elif dialogue_mode == "Bing 搜索问答":
|
||||
chat_box.ai_say("正在执行Bing搜索...")
|
||||
for d in api.knowledge_base_chat(prompt, selected_kb, kb_top_k):
|
||||
text += d["answer"]
|
||||
chat_box.update_msg(text, 0)
|
||||
chat_box.update_msg("\n\n".join(d["docs"]), 1, streaming=False)
|
||||
chat_box.update_msg(text, 0, streaming=False)
|
||||
elif dialogue_mode == "搜索引擎问答":
|
||||
chat_box.ai_say([
|
||||
f"正在执行{search_engine}搜索...",
|
||||
Markdown("...", in_expander=True, title="网络搜索结果"),
|
||||
])
|
||||
text = ""
|
||||
for t in api.bing_search_chat(prompt):
|
||||
text += t
|
||||
chat_box.update_msg(text)
|
||||
chat_box.update_msg(text, streaming=False)
|
||||
elif dialogue_mode == "Duck 搜索问答":
|
||||
chat_box.ai_say("正在执行Duckduck搜索...")
|
||||
text = ""
|
||||
for t in api.duckduckgo_search_chat(prompt):
|
||||
text += t
|
||||
chat_box.update_msg(text)
|
||||
chat_box.update_msg(text, streaming=False)
|
||||
for d in api.bing_search_chat(prompt, search_engine, se_top_k):
|
||||
text += d["answer"]
|
||||
chat_box.update_msg(text, 0)
|
||||
chat_box.update_msg("\n\n".join(d["docs"]), 1, streaming=False)
|
||||
chat_box.update_msg(text, 0, streaming=False)
|
||||
|
||||
now = datetime.now()
|
||||
cols[0].download_button(
|
||||
"Export",
|
||||
"".join(chat_box.export2md(cur_chat_name)),
|
||||
file_name=f"{now:%Y-%m-%d %H.%M}_{cur_chat_name}.md",
|
||||
mime="text/markdown",
|
||||
)
|
||||
|
|
|
|||
Loading…
Reference in New Issue