From bcfd3f5af54eccc00ff7f8d600788c188205d0a3 Mon Sep 17 00:00:00 2001 From: imClumsyPanda Date: Tue, 1 Aug 2023 14:47:38 +0800 Subject: [PATCH] add webui_pages --- configs/model_config.py | 4 +- webui.py | 50 ++------------------ webui_pages/__init__.py | 3 ++ webui_pages/dialogue/__init__.py | 1 + webui_pages/dialogue/dialogue.py | 36 ++++++++++++++ webui_pages/knowledge_base/__init__.py | 1 + webui_pages/knowledge_base/knowledge_base.py | 6 +++ webui_pages/model_config/__init__.py | 1 + webui_pages/model_config/model_config.py | 5 ++ webui_utils.py => webui_pages/utils.py | 0 10 files changed, 59 insertions(+), 48 deletions(-) create mode 100644 webui_pages/__init__.py create mode 100644 webui_pages/dialogue/__init__.py create mode 100644 webui_pages/dialogue/dialogue.py create mode 100644 webui_pages/knowledge_base/__init__.py create mode 100644 webui_pages/knowledge_base/knowledge_base.py create mode 100644 webui_pages/model_config/__init__.py create mode 100644 webui_pages/model_config/model_config.py rename webui_utils.py => webui_pages/utils.py (100%) diff --git a/configs/model_config.py b/configs/model_config.py index eb4c4a9..b9e5206 100644 --- a/configs/model_config.py +++ b/configs/model_config.py @@ -21,7 +21,7 @@ embedding_model_dict = { "text2vec-sentence": "shibing624/text2vec-base-chinese-sentence", "text2vec-multilingual": "shibing624/text2vec-base-multilingual", "m3e-small": "moka-ai/m3e-small", - "m3e-base": "moka-ai/m3e-base", + "m3e-base": "/Users/liuqian/Downloads/ChatGLM-6B/m3e-base", "m3e-large": "moka-ai/m3e-large", } @@ -46,7 +46,7 @@ llm_model_dict = { }, "chatglm2-6b": { - "local_model_path": "THUDM/chatglm2-6b", + "local_model_path": "/Users/liuqian/Downloads/ChatGLM-6B/chatglm2-6b", # "THUDM/chatglm2-6b", "api_base_url": "http://localhost:8888/v1", # "name"修改为fastchat服务中的"api_base_url" "api_key": "EMPTY" }, diff --git a/webui.py b/webui.py index a79f4c0..c06646e 100644 --- a/webui.py +++ b/webui.py @@ -1,63 +1,21 @@ import streamlit as st -from streamlit_chatbox import * -from webui_utils import * +from webui_pages.utils import * from streamlit_option_menu import option_menu - +from webui_pages import * api = ApiRequest() -def dialogue_page(): - with st.sidebar: - dialogue_mode = st.radio("请选择对话模式", - ["LLM 对话", - "知识库问答", - "Bing 搜索问答"]) - history_len = st.slider("历史对话轮数:", 1, 10, 1) - if dialogue_mode == "知识库问答": - selected_kb = st.selectbox("请选择知识库:", get_kb_list()) - with st.expander(f"{selected_kb} 中已存储文件"): - st.write(get_kb_files(selected_kb)) - - # Display chat messages from history on app rerun - chat_box.output_messages() - - if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"): - chat_box.user_say(prompt) - chat_box.ai_say("正在思考...") - # with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len - text = "" - r = api.chat_chat(prompt, no_remote_api=True) - for t in r: - text += t - chat_box.update_msg(text) - chat_box.update_msg(text, streaming=False) - # with api.chat_chat(prompt) as r: - # for t in r.iter_text(None): - # text += t - # chat_box.update_msg(text) - # chat_box.update_msg(text, streaming=False) - -def knowledge_base_edit_page(): - pass - - -def config_page(): - pass - - if __name__ == "__main__": st.set_page_config("langchain-chatglm WebUI") - chat_box = ChatBox() - pages = {"对话": {"icon": "chat", "func": dialogue_page, }, "知识库管理": {"icon": "database-fill-gear", - "func": knowledge_base_edit_page, + "func": knowledge_base_page, }, "模型配置": {"icon": "gear", - "func": config_page, + "func": model_config_page, } } diff --git a/webui_pages/__init__.py b/webui_pages/__init__.py new file mode 100644 index 0000000..8d3ae10 --- /dev/null +++ b/webui_pages/__init__.py @@ -0,0 +1,3 @@ +from .dialogue import dialogue_page +from .knowledge_base import knowledge_base_page +from .model_config import model_config_page \ No newline at end of file diff --git a/webui_pages/dialogue/__init__.py b/webui_pages/dialogue/__init__.py new file mode 100644 index 0000000..b3aad16 --- /dev/null +++ b/webui_pages/dialogue/__init__.py @@ -0,0 +1 @@ +from .dialogue import dialogue_page \ No newline at end of file diff --git a/webui_pages/dialogue/dialogue.py b/webui_pages/dialogue/dialogue.py new file mode 100644 index 0000000..08b348c --- /dev/null +++ b/webui_pages/dialogue/dialogue.py @@ -0,0 +1,36 @@ +import streamlit as st +from webui_pages.utils import * +from streamlit_chatbox import * + +chat_box = ChatBox() + +def dialogue_page(): + with st.sidebar: + dialogue_mode = st.radio("请选择对话模式", + ["LLM 对话", + "知识库问答", + "Bing 搜索问答"]) + history_len = st.slider("历史对话轮数:", 1, 10, 1) + if dialogue_mode == "知识库问答": + selected_kb = st.selectbox("请选择知识库:", get_kb_list()) + with st.expander(f"{selected_kb} 中已存储文件"): + st.write(get_kb_files(selected_kb)) + + # Display chat messages from history on app rerun + chat_box.output_messages() + + if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"): + chat_box.user_say(prompt) + chat_box.ai_say("正在思考...") + # with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len + text = "" + r = api.chat_chat(prompt, no_remote_api=True) + for t in r: + text += t + chat_box.update_msg(text) + chat_box.update_msg(text, streaming=False) + # with api.chat_chat(prompt) as r: + # for t in r.iter_text(None): + # text += t + # chat_box.update_msg(text) + # chat_box.update_msg(text, streaming=False) \ No newline at end of file diff --git a/webui_pages/knowledge_base/__init__.py b/webui_pages/knowledge_base/__init__.py new file mode 100644 index 0000000..b7b37a0 --- /dev/null +++ b/webui_pages/knowledge_base/__init__.py @@ -0,0 +1 @@ +from .knowledge_base import knowledge_base_page \ No newline at end of file diff --git a/webui_pages/knowledge_base/knowledge_base.py b/webui_pages/knowledge_base/knowledge_base.py new file mode 100644 index 0000000..8515b62 --- /dev/null +++ b/webui_pages/knowledge_base/knowledge_base.py @@ -0,0 +1,6 @@ +import streamlit as st +from webui_pages.utils import * + +def knowledge_base_page(): + st.write(123) + pass \ No newline at end of file diff --git a/webui_pages/model_config/__init__.py b/webui_pages/model_config/__init__.py new file mode 100644 index 0000000..3cfc701 --- /dev/null +++ b/webui_pages/model_config/__init__.py @@ -0,0 +1 @@ +from .model_config import model_config_page \ No newline at end of file diff --git a/webui_pages/model_config/model_config.py b/webui_pages/model_config/model_config.py new file mode 100644 index 0000000..cc19d93 --- /dev/null +++ b/webui_pages/model_config/model_config.py @@ -0,0 +1,5 @@ +import streamlit as st +from webui_pages.utils import * + +def model_config_page(): + pass \ No newline at end of file diff --git a/webui_utils.py b/webui_pages/utils.py similarity index 100% rename from webui_utils.py rename to webui_pages/utils.py