2023-04-13 23:01:52 +08:00
|
|
|
|
import torch.cuda
|
|
|
|
|
|
import torch.backends
|
2023-04-25 20:14:33 +08:00
|
|
|
|
import os
|
2023-04-13 23:01:52 +08:00
|
|
|
|
|
|
|
|
|
|
embedding_model_dict = {
|
|
|
|
|
|
"ernie-tiny": "nghuyong/ernie-3.0-nano-zh",
|
|
|
|
|
|
"ernie-base": "nghuyong/ernie-3.0-base-zh",
|
2023-04-27 07:40:57 +08:00
|
|
|
|
"text2vec-base": "shibing624/text2vec-base-chinese",
|
2023-04-13 23:01:52 +08:00
|
|
|
|
"text2vec": "GanymedeNil/text2vec-large-chinese",
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# Embedding model name
|
2023-04-13 23:20:45 +08:00
|
|
|
|
EMBEDDING_MODEL = "text2vec"
|
2023-04-13 23:01:52 +08:00
|
|
|
|
|
|
|
|
|
|
# Embedding running device
|
|
|
|
|
|
EMBEDDING_DEVICE = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
|
|
|
|
|
|
|
|
|
|
|
|
# supported LLM models
|
|
|
|
|
|
llm_model_dict = {
|
2023-04-27 07:40:57 +08:00
|
|
|
|
"chatyuan": "ClueAI/ChatYuan-large-v2",
|
2023-04-13 23:01:52 +08:00
|
|
|
|
"chatglm-6b-int4-qe": "THUDM/chatglm-6b-int4-qe",
|
|
|
|
|
|
"chatglm-6b-int4": "THUDM/chatglm-6b-int4",
|
2023-04-27 07:48:43 +08:00
|
|
|
|
"chatglm-6b-int8": "THUDM/chatglm-6b-int8",
|
2023-04-13 23:01:52 +08:00
|
|
|
|
"chatglm-6b": "THUDM/chatglm-6b",
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# LLM model name
|
2023-04-21 21:22:25 +08:00
|
|
|
|
LLM_MODEL = "chatglm-6b"
|
2023-04-13 23:01:52 +08:00
|
|
|
|
|
2023-04-15 14:43:12 +08:00
|
|
|
|
# Use p-tuning-v2 PrefixEncoder
|
|
|
|
|
|
USE_PTUNING_V2 = False
|
|
|
|
|
|
|
2023-04-13 23:01:52 +08:00
|
|
|
|
# LLM running device
|
|
|
|
|
|
LLM_DEVICE = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
|
|
|
|
|
|
|
2023-04-26 23:20:08 +08:00
|
|
|
|
VS_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "vector_store", "")
|
2023-04-19 23:02:47 +08:00
|
|
|
|
|
2023-04-26 23:20:08 +08:00
|
|
|
|
UPLOAD_ROOT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "content", "")
|
2023-04-19 23:02:47 +08:00
|
|
|
|
|
2023-04-26 23:20:08 +08:00
|
|
|
|
# 基于上下文的prompt模版,请务必保留"{question}"和"{context}"
|
|
|
|
|
|
PROMPT_TEMPLATE = """基于以下已知信息,简洁和专业的来回答用户的问题,问题是"{question}"。如果无法从中得到答案,请说 "根据已知信息无法回答该问题" 或 "没有提供足够的相关信息",不允许在答案中添加编造成分,答案请使用中文。已知内容如下:
|
2023-04-28 00:02:42 +08:00
|
|
|
|
{context} """
|
|
|
|
|
|
|
|
|
|
|
|
# 匹配后单段上下文长度
|
|
|
|
|
|
CHUNK_SIZE = 500
|