From c620ab0d6c4fd66bdbfee9b63bba7b8355435f31 Mon Sep 17 00:00:00 2001 From: imClumsyPanda Date: Wed, 26 Apr 2023 23:20:08 +0800 Subject: [PATCH] update model_config.py --- configs/model_config.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/configs/model_config.py b/configs/model_config.py index 99c4ddc..c7f3649 100644 --- a/configs/model_config.py +++ b/configs/model_config.py @@ -31,7 +31,10 @@ USE_PTUNING_V2 = False # LLM running device LLM_DEVICE = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu" -VS_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "vector_store") +VS_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "vector_store", "") -UPLOAD_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "content") +UPLOAD_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "content", "") +# 基于上下文的prompt模版,请务必保留"{question}"和"{context}" +PROMPT_TEMPLATE = """基于以下已知信息,简洁和专业的来回答用户的问题,问题是"{question}"。如果无法从中得到答案,请说 "根据已知信息无法回答该问题" 或 "没有提供足够的相关信息",不允许在答案中添加编造成分,答案请使用中文。已知内容如下: +{context} """ \ No newline at end of file