From a231f92d984f32cea665ecc02a217c276ffadf4f Mon Sep 17 00:00:00 2001 From: imClumsyPanda Date: Sat, 15 Apr 2023 20:01:36 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=20webui.py=20=E4=B8=AD=20llm?= =?UTF-8?q?=5Fhistory=5Flen=20=E5=92=8C=20vector=5Fsearch=5Ftop=5Fk=20?= =?UTF-8?q?=E6=98=BE=E7=A4=BA=E5=80=BC=E4=B8=8E=E5=90=AF=E5=8A=A8=E8=AE=BE?= =?UTF-8?q?=E7=BD=AE=E9=BB=98=E8=AE=A4=E5=80=BC=E4=B8=8D=E4=B8=80=E8=87=B4?= =?UTF-8?q?=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- chains/local_doc_qa.py | 3 +++ embedding/text2vec-large-chinese | 1 + llm/chatglm-6b | 1 + webui.py | 8 ++++++++ 4 files changed, 13 insertions(+) create mode 160000 embedding/text2vec-large-chinese create mode 160000 llm/chatglm-6b diff --git a/chains/local_doc_qa.py b/chains/local_doc_qa.py index f9b7207..7554854 100644 --- a/chains/local_doc_qa.py +++ b/chains/local_doc_qa.py @@ -17,6 +17,7 @@ VECTOR_SEARCH_TOP_K = 6 # LLM input history length LLM_HISTORY_LEN = 3 +<<<<<<< HEAD def load_file(filepath): if filepath.lower().endswith(".pdf"): @@ -29,6 +30,8 @@ def load_file(filepath): docs = loader.load_and_split(text_splitter=textsplitter) return docs +======= +>>>>>>> cba44ca (修复 webui.py 中 llm_history_len 和 vector_search_top_k 显示值与启动设置默认值不一致的问题) class LocalDocQA: llm: object = None diff --git a/embedding/text2vec-large-chinese b/embedding/text2vec-large-chinese new file mode 160000 index 0000000..b23825b --- /dev/null +++ b/embedding/text2vec-large-chinese @@ -0,0 +1 @@ +Subproject commit b23825b5841818578dd225b5420c4b026ff58aa3 diff --git a/llm/chatglm-6b b/llm/chatglm-6b new file mode 160000 index 0000000..4de8efe --- /dev/null +++ b/llm/chatglm-6b @@ -0,0 +1 @@ +Subproject commit 4de8efebc837788ffbfc0a15663de8553da362a2 diff --git a/webui.py b/webui.py index 9f143b8..f4eacaa 100644 --- a/webui.py +++ b/webui.py @@ -15,7 +15,15 @@ LLM_HISTORY_LEN = 3 <<<<<<< HEAD ======= +<<<<<<< HEAD >>>>>>> f87a5f5 (fix bug in webui.py) +======= +# return top-k text chunk from vector store +VECTOR_SEARCH_TOP_K = 6 + +# LLM input history length +LLM_HISTORY_LEN = 3 +>>>>>>> cba44ca (修复 webui.py 中 llm_history_len 和 vector_search_top_k 显示值与启动设置默认值不一致的问题) def get_file_list(): if not os.path.exists("content"):