add webui_pages
This commit is contained in:
parent
c8a75ab11f
commit
bcfd3f5af5
|
|
@ -21,7 +21,7 @@ embedding_model_dict = {
|
|||
"text2vec-sentence": "shibing624/text2vec-base-chinese-sentence",
|
||||
"text2vec-multilingual": "shibing624/text2vec-base-multilingual",
|
||||
"m3e-small": "moka-ai/m3e-small",
|
||||
"m3e-base": "moka-ai/m3e-base",
|
||||
"m3e-base": "/Users/liuqian/Downloads/ChatGLM-6B/m3e-base",
|
||||
"m3e-large": "moka-ai/m3e-large",
|
||||
}
|
||||
|
||||
|
|
@ -46,7 +46,7 @@ llm_model_dict = {
|
|||
},
|
||||
|
||||
"chatglm2-6b": {
|
||||
"local_model_path": "THUDM/chatglm2-6b",
|
||||
"local_model_path": "/Users/liuqian/Downloads/ChatGLM-6B/chatglm2-6b", # "THUDM/chatglm2-6b",
|
||||
"api_base_url": "http://localhost:8888/v1", # "name"修改为fastchat服务中的"api_base_url"
|
||||
"api_key": "EMPTY"
|
||||
},
|
||||
|
|
|
|||
50
webui.py
50
webui.py
|
|
@ -1,63 +1,21 @@
|
|||
import streamlit as st
|
||||
from streamlit_chatbox import *
|
||||
from webui_utils import *
|
||||
from webui_pages.utils import *
|
||||
from streamlit_option_menu import option_menu
|
||||
|
||||
from webui_pages import *
|
||||
|
||||
api = ApiRequest()
|
||||
|
||||
def dialogue_page():
|
||||
with st.sidebar:
|
||||
dialogue_mode = st.radio("请选择对话模式",
|
||||
["LLM 对话",
|
||||
"知识库问答",
|
||||
"Bing 搜索问答"])
|
||||
history_len = st.slider("历史对话轮数:", 1, 10, 1)
|
||||
if dialogue_mode == "知识库问答":
|
||||
selected_kb = st.selectbox("请选择知识库:", get_kb_list())
|
||||
with st.expander(f"{selected_kb} 中已存储文件"):
|
||||
st.write(get_kb_files(selected_kb))
|
||||
|
||||
# Display chat messages from history on app rerun
|
||||
chat_box.output_messages()
|
||||
|
||||
if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"):
|
||||
chat_box.user_say(prompt)
|
||||
chat_box.ai_say("正在思考...")
|
||||
# with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len
|
||||
text = ""
|
||||
r = api.chat_chat(prompt, no_remote_api=True)
|
||||
for t in r:
|
||||
text += t
|
||||
chat_box.update_msg(text)
|
||||
chat_box.update_msg(text, streaming=False)
|
||||
# with api.chat_chat(prompt) as r:
|
||||
# for t in r.iter_text(None):
|
||||
# text += t
|
||||
# chat_box.update_msg(text)
|
||||
# chat_box.update_msg(text, streaming=False)
|
||||
|
||||
def knowledge_base_edit_page():
|
||||
pass
|
||||
|
||||
|
||||
def config_page():
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
st.set_page_config("langchain-chatglm WebUI")
|
||||
|
||||
chat_box = ChatBox()
|
||||
|
||||
pages = {"对话": {"icon": "chat",
|
||||
"func": dialogue_page,
|
||||
},
|
||||
"知识库管理": {"icon": "database-fill-gear",
|
||||
"func": knowledge_base_edit_page,
|
||||
"func": knowledge_base_page,
|
||||
},
|
||||
"模型配置": {"icon": "gear",
|
||||
"func": config_page,
|
||||
"func": model_config_page,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
from .dialogue import dialogue_page
|
||||
from .knowledge_base import knowledge_base_page
|
||||
from .model_config import model_config_page
|
||||
|
|
@ -0,0 +1 @@
|
|||
from .dialogue import dialogue_page
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
import streamlit as st
|
||||
from webui_pages.utils import *
|
||||
from streamlit_chatbox import *
|
||||
|
||||
chat_box = ChatBox()
|
||||
|
||||
def dialogue_page():
|
||||
with st.sidebar:
|
||||
dialogue_mode = st.radio("请选择对话模式",
|
||||
["LLM 对话",
|
||||
"知识库问答",
|
||||
"Bing 搜索问答"])
|
||||
history_len = st.slider("历史对话轮数:", 1, 10, 1)
|
||||
if dialogue_mode == "知识库问答":
|
||||
selected_kb = st.selectbox("请选择知识库:", get_kb_list())
|
||||
with st.expander(f"{selected_kb} 中已存储文件"):
|
||||
st.write(get_kb_files(selected_kb))
|
||||
|
||||
# Display chat messages from history on app rerun
|
||||
chat_box.output_messages()
|
||||
|
||||
if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"):
|
||||
chat_box.user_say(prompt)
|
||||
chat_box.ai_say("正在思考...")
|
||||
# with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len
|
||||
text = ""
|
||||
r = api.chat_chat(prompt, no_remote_api=True)
|
||||
for t in r:
|
||||
text += t
|
||||
chat_box.update_msg(text)
|
||||
chat_box.update_msg(text, streaming=False)
|
||||
# with api.chat_chat(prompt) as r:
|
||||
# for t in r.iter_text(None):
|
||||
# text += t
|
||||
# chat_box.update_msg(text)
|
||||
# chat_box.update_msg(text, streaming=False)
|
||||
|
|
@ -0,0 +1 @@
|
|||
from .knowledge_base import knowledge_base_page
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
import streamlit as st
|
||||
from webui_pages.utils import *
|
||||
|
||||
def knowledge_base_page():
|
||||
st.write(123)
|
||||
pass
|
||||
|
|
@ -0,0 +1 @@
|
|||
from .model_config import model_config_page
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
import streamlit as st
|
||||
from webui_pages.utils import *
|
||||
|
||||
def model_config_page():
|
||||
pass
|
||||
Loading…
Reference in New Issue