2023-04-25 20:14:33 +08:00
|
|
|
|
import os
|
2023-09-15 17:52:22 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 可以指定一个绝对路径,统一存放所有的Embedding和LLM模型。
|
|
|
|
|
|
# 每个模型可以是一个单独的目录,也可以是某个目录下的二级子目录
|
|
|
|
|
|
MODEL_ROOT_PATH = ""
|
|
|
|
|
|
|
|
|
|
|
|
# 在以下字典中修改属性值,以指定本地embedding模型存储位置。支持3种设置方法:
|
|
|
|
|
|
# 1、将对应的值修改为模型绝对路径
|
|
|
|
|
|
# 2、不修改此处的值(以 text2vec 为例):
|
|
|
|
|
|
# 2.1 如果{MODEL_ROOT_PATH}下存在如下任一子目录:
|
|
|
|
|
|
# - text2vec
|
|
|
|
|
|
# - GanymedeNil/text2vec-large-chinese
|
|
|
|
|
|
# - text2vec-large-chinese
|
|
|
|
|
|
# 2.2 如果以上本地路径不存在,则使用huggingface模型
|
|
|
|
|
|
MODEL_PATH = {
|
|
|
|
|
|
"embed_model": {
|
|
|
|
|
|
"ernie-tiny": "nghuyong/ernie-3.0-nano-zh",
|
|
|
|
|
|
"ernie-base": "nghuyong/ernie-3.0-base-zh",
|
|
|
|
|
|
"text2vec-base": "shibing624/text2vec-base-chinese",
|
|
|
|
|
|
"text2vec": "GanymedeNil/text2vec-large-chinese",
|
|
|
|
|
|
"text2vec-paraphrase": "shibing624/text2vec-base-chinese-paraphrase",
|
|
|
|
|
|
"text2vec-sentence": "shibing624/text2vec-base-chinese-sentence",
|
|
|
|
|
|
"text2vec-multilingual": "shibing624/text2vec-base-multilingual",
|
|
|
|
|
|
"text2vec-bge-large-chinese": "shibing624/text2vec-bge-large-chinese",
|
|
|
|
|
|
"m3e-small": "moka-ai/m3e-small",
|
|
|
|
|
|
"m3e-base": "moka-ai/m3e-base",
|
|
|
|
|
|
"m3e-large": "moka-ai/m3e-large",
|
|
|
|
|
|
"bge-small-zh": "BAAI/bge-small-zh",
|
|
|
|
|
|
"bge-base-zh": "BAAI/bge-base-zh",
|
|
|
|
|
|
"bge-large-zh": "BAAI/bge-large-zh",
|
|
|
|
|
|
"bge-large-zh-noinstruct": "BAAI/bge-large-zh-noinstruct",
|
|
|
|
|
|
"piccolo-base-zh": "sensenova/piccolo-base-zh",
|
|
|
|
|
|
"piccolo-large-zh": "sensenova/piccolo-large-zh",
|
|
|
|
|
|
"text-embedding-ada-002": "your OPENAI_API_KEY",
|
|
|
|
|
|
},
|
|
|
|
|
|
# TODO: add all supported llm models
|
|
|
|
|
|
"llm_model": {
|
|
|
|
|
|
"chatglm-6b": "THUDM/chatglm-6b",
|
|
|
|
|
|
"chatglm2-6b": "THUDM/chatglm2-6b",
|
|
|
|
|
|
"chatglm2-6b-int4": "THUDM/chatglm2-6b-int4",
|
|
|
|
|
|
"chatglm2-6b-32k": "THUDM/chatglm2-6b-32k",
|
2023-09-16 07:15:08 +08:00
|
|
|
|
"baichuan-7b": "baichuan-inc/Baichuan-7B",
|
2023-09-15 17:52:22 +08:00
|
|
|
|
},
|
2023-04-13 23:01:52 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-07-27 23:22:07 +08:00
|
|
|
|
# 选用的 Embedding 名称
|
2023-08-01 14:12:28 +08:00
|
|
|
|
EMBEDDING_MODEL = "m3e-base"
|
2023-04-13 23:01:52 +08:00
|
|
|
|
|
2023-08-31 17:44:48 +08:00
|
|
|
|
# Embedding 模型运行设备。设为"auto"会自动检测,也可手动设定为"cuda","mps","cpu"其中之一。
|
|
|
|
|
|
EMBEDDING_DEVICE = "auto"
|
2023-04-13 23:01:52 +08:00
|
|
|
|
|
2023-09-15 17:52:22 +08:00
|
|
|
|
# LLM 名称
|
|
|
|
|
|
LLM_MODEL = "chatglm2-6b"
|
2023-07-27 23:22:07 +08:00
|
|
|
|
|
2023-09-15 17:52:22 +08:00
|
|
|
|
# LLM 运行设备。设为"auto"会自动检测,也可手动设定为"cuda","mps","cpu"其中之一。
|
|
|
|
|
|
LLM_DEVICE = "auto"
|
|
|
|
|
|
|
|
|
|
|
|
# 历史对话轮数
|
|
|
|
|
|
HISTORY_LEN = 3
|
|
|
|
|
|
|
|
|
|
|
|
# LLM通用对话参数
|
|
|
|
|
|
TEMPERATURE = 0.7
|
|
|
|
|
|
# TOP_P = 0.95 # ChatOpenAI暂不支持该参数
|
2023-05-31 22:11:28 +08:00
|
|
|
|
|
2023-08-01 14:12:28 +08:00
|
|
|
|
|
2023-09-15 17:52:22 +08:00
|
|
|
|
ONLINE_LLM_MODEL = {
|
2023-07-11 23:40:49 +08:00
|
|
|
|
# 调用chatgpt时如果报出: urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='api.openai.com', port=443):
|
|
|
|
|
|
# Max retries exceeded with url: /v1/chat/completions
|
|
|
|
|
|
# 则需要将urllib3版本修改为1.25.11
|
2023-07-21 09:29:43 +08:00
|
|
|
|
# 如果依然报urllib3.exceptions.MaxRetryError: HTTPSConnectionPool,则将https改为http
|
|
|
|
|
|
# 参考https://zhuanlan.zhihu.com/p/350015032
|
2023-07-11 23:40:49 +08:00
|
|
|
|
|
|
|
|
|
|
# 如果报出:raise NewConnectionError(
|
2023-07-12 23:09:28 +08:00
|
|
|
|
# urllib3.exceptions.NewConnectionError: <urllib3.connection.HTTPSConnection object at 0x000001FE4BDB85E0>:
|
2023-07-11 23:40:49 +08:00
|
|
|
|
# Failed to establish a new connection: [WinError 10060]
|
2023-07-19 10:57:09 +08:00
|
|
|
|
# 则是因为内地和香港的IP都被OPENAI封了,需要切换为日本、新加坡等地
|
2023-08-24 13:51:17 +08:00
|
|
|
|
|
|
|
|
|
|
# 如果出现WARNING: Retrying langchain.chat_models.openai.acompletion_with_retry.<locals>._completion_with_retry in
|
|
|
|
|
|
# 4.0 seconds as it raised APIConnectionError: Error communicating with OpenAI.
|
|
|
|
|
|
# 需要添加代理访问(正常开的代理软件可能会拦截不上)需要设置配置openai_proxy 或者 使用环境遍历OPENAI_PROXY 进行设置
|
2023-09-01 18:09:59 +08:00
|
|
|
|
# 比如: "openai_proxy": 'http://127.0.0.1:4780'
|
2023-08-14 18:53:22 +08:00
|
|
|
|
"gpt-3.5-turbo": {
|
2023-09-15 22:56:13 +08:00
|
|
|
|
"api_base_url": "https://api.openai.com/v1",
|
2023-09-15 17:52:22 +08:00
|
|
|
|
"api_key": "your OPENAI_API_KEY",
|
|
|
|
|
|
"openai_proxy": "your OPENAI_PROXY",
|
2023-07-11 19:36:50 +08:00
|
|
|
|
},
|
2023-09-15 17:52:22 +08:00
|
|
|
|
# 线上模型。请在server_config中为每个在线API设置不同的端口
|
2023-09-03 15:52:17 +08:00
|
|
|
|
# 具体注册及api key获取请前往 http://open.bigmodel.cn
|
2023-09-15 01:48:02 +08:00
|
|
|
|
"zhipu-api": {
|
2023-09-14 22:27:57 +08:00
|
|
|
|
"api_key": "",
|
2023-09-02 15:33:57 +08:00
|
|
|
|
"version": "chatglm_pro", # 可选包括 "chatglm_lite", "chatglm_std", "chatglm_pro"
|
2023-09-15 17:52:22 +08:00
|
|
|
|
"provider": "ChatGLMWorker",
|
2023-09-01 23:58:09 +08:00
|
|
|
|
},
|
2023-09-15 17:52:22 +08:00
|
|
|
|
# 具体注册及api key获取请前往 https://api.minimax.chat/
|
2023-09-13 15:42:12 +08:00
|
|
|
|
"minimax-api": {
|
2023-09-12 15:24:47 +08:00
|
|
|
|
"group_id": "",
|
|
|
|
|
|
"api_key": "",
|
|
|
|
|
|
"is_pro": False,
|
|
|
|
|
|
"provider": "MiniMaxWorker",
|
|
|
|
|
|
},
|
2023-09-15 17:52:22 +08:00
|
|
|
|
# 具体注册及api key获取请前往 https://xinghuo.xfyun.cn/
|
2023-09-13 15:42:12 +08:00
|
|
|
|
"xinghuo-api": {
|
2023-09-13 13:51:05 +08:00
|
|
|
|
"APPID": "",
|
|
|
|
|
|
"APISecret": "",
|
|
|
|
|
|
"api_key": "",
|
|
|
|
|
|
"is_v2": False,
|
|
|
|
|
|
"provider": "XingHuoWorker",
|
2023-09-14 15:30:06 +08:00
|
|
|
|
},
|
2023-09-14 23:37:34 +08:00
|
|
|
|
# 百度千帆 API,申请方式请参考 https://cloud.baidu.com/doc/WENXINWORKSHOP/s/4lilb2lpf
|
|
|
|
|
|
"qianfan-api": {
|
2023-09-15 17:52:22 +08:00
|
|
|
|
"version": "ernie-bot-turbo", # 当前支持 "ernie-bot" 或 "ernie-bot-turbo", 更多的见官方文档。
|
|
|
|
|
|
"version_url": "", # 也可以不填写version,直接填写在千帆申请模型发布的API地址
|
2023-09-14 15:30:06 +08:00
|
|
|
|
"api_key": "",
|
|
|
|
|
|
"secret_key": "",
|
2023-09-15 17:52:22 +08:00
|
|
|
|
"provider": "QianFanWorker",
|
2023-09-13 15:42:12 +08:00
|
|
|
|
},
|
2023-09-16 09:09:27 +08:00
|
|
|
|
# 火山方舟 API
|
|
|
|
|
|
"fangzhou-api": {
|
|
|
|
|
|
"version": "chatglm-6b-model", # 当前支持 "chatglm-6b-model", 更多的见文档模型支持列表中方舟部分。
|
|
|
|
|
|
"version_url": "", # 可以不填写version,直接填写在方舟申请模型发布的API地址
|
|
|
|
|
|
"api_key": "",
|
|
|
|
|
|
"secret_key": "",
|
|
|
|
|
|
"provider": "FangZhouWorker",
|
|
|
|
|
|
},
|
2023-09-13 15:42:12 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
2023-09-15 17:52:22 +08:00
|
|
|
|
# 通常情况下不需要更改以下内容
|
2023-08-04 09:16:28 +08:00
|
|
|
|
|
2023-07-27 23:22:07 +08:00
|
|
|
|
# nltk 模型存储路径
|
2023-05-05 18:44:37 +08:00
|
|
|
|
NLTK_DATA_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "nltk_data")
|