From 4651b50176a111ab2acbb82a0c4d2d67f789b301 Mon Sep 17 00:00:00 2001 From: liunux4odoo Date: Thu, 3 Aug 2023 13:41:31 +0800 Subject: [PATCH] =?UTF-8?q?WEB=20UI=E4=B8=AD=E5=AF=B9=E8=AF=9D=E5=8A=9F?= =?UTF-8?q?=E8=83=BD=E5=B7=B2=E5=AE=8C=E6=88=90=E3=80=82=E7=9B=AE=E5=89=8D?= =?UTF-8?q?=E5=BE=88=E5=A4=9A=E5=8F=82=E6=95=B0=E8=BF=98=E4=B8=8D=E6=94=AF?= =?UTF-8?q?=E6=8C=81=EF=BC=8C=E5=BE=85API=E6=8E=A5=E5=8F=A3=E5=AE=8C?= =?UTF-8?q?=E5=96=84=E5=90=8E=E5=86=8D=E5=8A=A0=E5=85=A5=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- webui.py | 6 +++ webui_pages/dialogue/dialogue.py | 90 ++++++++++++++++++++++++-------- 2 files changed, 75 insertions(+), 21 deletions(-) diff --git a/webui.py b/webui.py index bd02735..fe58240 100644 --- a/webui.py +++ b/webui.py @@ -1,3 +1,9 @@ +# 运行方式: +# 1. 安装必要的包:pip install streamlit-option-menu streamlit-chatbox>=1.1.3 +# 2. 运行本机fastchat服务:python server\llm_api.py 或者 运行对应的sh文件 +# 3. 运行API服务器:python server/api.py。如果使用api = ApiRequest(no_remote_api=True),该步可以跳过。 +# 4. 运行WEB UI:streamlit run webui.py --server.port 7860 + import streamlit as st from webui_pages.utils import * from streamlit_option_menu import option_menu diff --git a/webui_pages/dialogue/dialogue.py b/webui_pages/dialogue/dialogue.py index f01c0ac..5f7faf9 100644 --- a/webui_pages/dialogue/dialogue.py +++ b/webui_pages/dialogue/dialogue.py @@ -2,37 +2,85 @@ import streamlit as st from webui_pages.utils import * from streamlit_chatbox import * -chat_box = ChatBox() - def dialogue_page(api: ApiRequest): + chat_box = ChatBox( + greetings=[ + f"欢迎使用 [`Langchain-Chatglm`](https://github.com/chatchat-space/langchain-chatglm) ! 当前使用模型`{LLM_MODEL}`, 您可以开始提问了.", + ] + ) + with st.sidebar: + def on_mode_change(): + mode = st.session_state.dialogue_mode + text = f"已切换到 {mode} 模式。" + if mode == "知识库问答": + cur_kb = st.session_state.get("selected_kb") + if cur_kb: + text = f"{text} 当前知识库: `{cur_kb}`。" + chat_box.ai_say(text, not_render=True) + dialogue_mode = st.radio("请选择对话模式", ["LLM 对话", "知识库问答", - "Bing 搜索问答"]) - history_len = st.slider("历史对话轮数:", 1, 10, 1) + "Bing 搜索问答", + "Duck 搜索问答", + ], + on_change=on_mode_change, + key="dialogue_mode", + ) + history_len = st.slider("历史对话轮数:", 1, 10, 1, disabled=True) + # todo: support history len + if st.button("清除历史对话"): + chat_box.reset_history() + + def on_kb_change(): + chat_box.ai_say(f"已加载知识库: {st.session_state.selected_kb}", not_render=True) + if dialogue_mode == "知识库问答": - selected_kb = st.selectbox("请选择知识库:", get_kb_list()) - with st.expander(f"{selected_kb} 中已存储文件"): - st.write(get_kb_files(selected_kb)) + kb_list = api.list_knowledge_bases() + selected_kb = st.selectbox( + "请选择知识库:", + kb_list, + on_change=on_kb_change, + key="selected_kb", + ) + top_k = st.slider("匹配知识条数:", 1, 20, 3, disabled=True) + score_threshold = st.slider("知识匹配分数阈值:", 0, 1000, 0, disabled=True) + chunk_content = st.checkbox("关联上下文", False, disabled=True) + chunk_size = st.slider("关联长度:", 0, 500, 250, disabled=True) # Display chat messages from history on app rerun chat_box.output_messages() if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"): chat_box.user_say(prompt) - chat_box.ai_say("正在思考...") - # with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: - # todo: support history len - text = "" - r = api.chat_chat(prompt, no_remote_api=True) - for t in r: - text += t - chat_box.update_msg(text) - chat_box.update_msg(text, streaming=False) - # with api.chat_chat(prompt) as r: - # for t in r.iter_text(None): - # text += t - # chat_box.update_msg(text) - # chat_box.update_msg(text, streaming=False) + if dialogue_mode == "LLM 对话": + chat_box.ai_say("正在思考...") + text = "" + r = api.chat_chat(prompt, no_remote_api=True) + for t in r: + text += t + chat_box.update_msg(text) + chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标 + elif dialogue_mode == "知识库问答": + chat_box.ai_say(f"正在查询知识库: `{selected_kb}` ...") + text = "" + for t in api.knowledge_base_chat(prompt, selected_kb): + text += t + chat_box.update_msg(text) + chat_box.update_msg(text, streaming=False) + elif dialogue_mode == "Bing 搜索问答": + chat_box.ai_say("正在执行Bing搜索...") + text = "" + for t in api.bing_search_chat(prompt): + text += t + chat_box.update_msg(text) + chat_box.update_msg(text, streaming=False) + elif dialogue_mode == "Duck 搜索问答": + chat_box.ai_say("正在执行Duckduck搜索...") + text = "" + for t in api.duckduckgo_search_chat(prompt): + text += t + chat_box.update_msg(text) + chat_box.update_msg(text, streaming=False)