fix bug in webui.py

This commit is contained in:
imClumsyPanda 2023-04-16 08:59:06 +08:00
parent dc0cdfba90
commit f87a5f59ca
2 changed files with 14 additions and 6 deletions

View File

@ -11,7 +11,7 @@ import datetime
from typing import List from typing import List
# return top-k text chunk from vector store # return top-k text chunk from vector store
VECTOR_SEARCH_TOP_K = 10 VECTOR_SEARCH_TOP_K = 6
# LLM input history length # LLM input history length
LLM_HISTORY_LEN = 3 LLM_HISTORY_LEN = 3

View File

@ -4,6 +4,12 @@ import shutil
from chains.local_doc_qa import LocalDocQA from chains.local_doc_qa import LocalDocQA
from configs.model_config import * from configs.model_config import *
# return top-k text chunk from vector store
VECTOR_SEARCH_TOP_K = 6
# LLM input history length
LLM_HISTORY_LEN = 3
def get_file_list(): def get_file_list():
if not os.path.exists("content"): if not os.path.exists("content"):
@ -49,7 +55,8 @@ def init_model():
try: try:
local_doc_qa.init_cfg() local_doc_qa.init_cfg()
return """模型已成功加载,请选择文件后点击"加载文件"按钮""" return """模型已成功加载,请选择文件后点击"加载文件"按钮"""
except: except Exception as e:
print(e)
return """模型未成功加载,请重新选择后点击"加载模型"按钮""" return """模型未成功加载,请重新选择后点击"加载模型"按钮"""
@ -60,14 +67,15 @@ def reinit_model(llm_model, embedding_model, llm_history_len, top_k, history):
llm_history_len=llm_history_len, llm_history_len=llm_history_len,
top_k=top_k) top_k=top_k)
model_status = """模型已成功重新加载,请选择文件后点击"加载文件"按钮""" model_status = """模型已成功重新加载,请选择文件后点击"加载文件"按钮"""
except: except Exception as e:
print(e)
model_status = """模型未成功重新加载,请重新选择后点击"加载模型"按钮""" model_status = """模型未成功重新加载,请重新选择后点击"加载模型"按钮"""
return history + [[None, model_status]] return history + [[None, model_status]]
def get_vector_store(filepath, history): def get_vector_store(filepath, history):
if local_doc_qa.llm and local_doc_qa.llm: if local_doc_qa.llm and local_doc_qa.embeddings:
vs_path = local_doc_qa.init_knowledge_vector_store(["content/" + filepath]) vs_path = local_doc_qa.init_knowledge_vector_store(["content/" + filepath])
if vs_path: if vs_path:
file_status = "文件已成功加载,请开始提问" file_status = "文件已成功加载,请开始提问"
@ -123,7 +131,7 @@ with gr.Blocks(css=block_css) as demo:
interactive=True) interactive=True)
llm_history_len = gr.Slider(0, llm_history_len = gr.Slider(0,
10, 10,
value=3, value=LLM_HISTORY_LEN,
step=1, step=1,
label="LLM history len", label="LLM history len",
interactive=True) interactive=True)
@ -133,7 +141,7 @@ with gr.Blocks(css=block_css) as demo:
interactive=True) interactive=True)
top_k = gr.Slider(1, top_k = gr.Slider(1,
20, 20,
value=6, value=VECTOR_SEARCH_TOP_K,
step=1, step=1,
label="向量匹配 top k", label="向量匹配 top k",
interactive=True) interactive=True)