From 2c5b6bb0adaf0380447beca4634daa19743418f0 Mon Sep 17 00:00:00 2001 From: liunux4odoo Date: Tue, 1 Aug 2023 14:18:30 +0800 Subject: [PATCH] =?UTF-8?q?streamlit=20ui=20=E5=AE=9E=E7=8E=B0LLM=E6=B5=81?= =?UTF-8?q?=E5=BC=8F=E5=AF=B9=E8=AF=9D?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- requirements.txt | 3 ++- webui.py | 52 ++++++++++++++++++++++-------------------------- 2 files changed, 26 insertions(+), 29 deletions(-) diff --git a/requirements.txt b/requirements.txt index 2c57bad..89eef3d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,4 +17,5 @@ pydantic~=1.10.11 unstructured[local-inference] streamlit>=1.25.0 -streamlit-option-menu \ No newline at end of file +streamlit-option-menu +streamlit-chatbox>=1.1.0 diff --git a/webui.py b/webui.py index 08d2d25..1976a7f 100644 --- a/webui.py +++ b/webui.py @@ -1,6 +1,10 @@ import streamlit as st +from streamlit_chatbox import * +from webui_utils import * from streamlit_option_menu import option_menu -import openai + + +api = ApiRequest() def dialogue_page(): with st.sidebar: @@ -8,37 +12,30 @@ def dialogue_page(): ["LLM 对话", "知识库问答", "Bing 搜索问答"]) + history_len = st.slider("历史对话轮数:", 1, 10, 1) if dialogue_mode == "知识库问答": - selected_kb = st.selectbox("请选择知识库:", ["知识库1", "知识库2"]) + selected_kb = st.selectbox("请选择知识库:", get_kb_list()) with st.expander(f"{selected_kb} 中已存储文件"): - st.write("123") + st.write(get_kb_files(selected_kb)) # Display chat messages from history on app rerun - for message in st.session_state.messages: - with st.chat_message(message["role"]): - st.markdown(message["content"]) + chat_box.output_messages() if prompt := st.chat_input("What is up?"): - st.session_state.messages.append({"role": "user", "content": prompt}) - with st.chat_message("user"): - st.markdown(prompt) - - with st.chat_message("assistant"): - message_placeholder = st.empty() - full_response = "" - for response in openai.ChatCompletion.create( - model=OPENAI_MODEL, - messages=[ - {"role": m["role"], "content": m["content"]} - for m in st.session_state.messages - ], - stream=True, - ): - full_response += response.choices[0].delta.get("content", "") - message_placeholder.markdown(full_response + "▌") - message_placeholder.markdown(full_response) - st.session_state.messages.append({"role": "assistant", "content": full_response}) - + chat_box.user_say(prompt) + chat_box.ai_say("正在思考...") + # with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len + text = "" + r = api.chat_chat(prompt, no_remote_api=True) + for t in r: + text += t + chat_box.update_msg(text) + chat_box.update_msg(text, streaming=False) + # with api.chat_chat(prompt) as r: + # for t in r.iter_text(None): + # text += t + # chat_box.update_msg(text) + # chat_box.update_msg(text, streaming=False) def knowledge_base_edit_page(): pass @@ -51,8 +48,7 @@ def config_page(): if __name__ == "__main__": st.set_page_config("langchain-chatglm WebUI") - if "messages" not in st.session_state: - st.session_state.messages = [] + chat_box = ChatBox() pages = {"对话": {"icon": "chat", "func": dialogue_page,