bug修复和提示词修改 (#2230)

* 更新Langchain依赖到0.0.343以上版本

* 更改了一些提示词和依赖,修改了openai异常的问题

* 注释data的打印日志
This commit is contained in:
zR 2023-11-30 12:09:53 +08:00 committed by GitHub
parent c4fe3393b3
commit 40918c21de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 35 additions and 89 deletions

View File

@ -1,7 +1,6 @@
# prompt模板使用Jinja2语法简单点就是用双大括号代替f-string的单大括号
# 本配置文件支持热加载修改prompt模板后无需重启服务。
# LLM对话支持的变量
# - input: 用户输入内容
@ -76,9 +75,6 @@ Answer the following questions as best you can. If it is in order, you can use s
{tools}
Please note that the "知识库查询工具" is information about the "西交利物浦大学" ,and if a question is asked about it, you must answer with the knowledge base
Please note that the "天气查询工具" can only be used once since Question begin.
Use the following format:
Question: the input question you must answer1
Thought: you should always think about what to do and what tools to use.

View File

@ -1,6 +1,6 @@
# API requirements
langchain>=0.0.342
langchain>=0.0.343
langchain-experimental>=0.0.42
fschat[model_worker]>=0.2.33
xformers>=0.0.22.post7

View File

@ -1,6 +1,6 @@
# API requirements
langchain>=0.0.342
langchain>=0.0.343
langchain-experimental>=0.0.42
fschat[model_worker]>=0.2.33
xformers>=0.0.22.post7

View File

@ -1,4 +1,4 @@
langchain>=0.0.342
langchain>=0.0.343
fschat>=0.2.33
openai>=1.3.5
# sentence_transformers

View File

@ -40,11 +40,9 @@ def get_ChatOpenAI(
verbose: bool = True,
**kwargs: Any,
) -> ChatOpenAI:
## 以下模型是Langchain原生支持的模型这些模型不会走Fschat封装
config_models = list_config_llm_models()
## 非Langchain原生支持的模型走Fschat封装
config = get_model_worker_config(model_name)
if config.get("openai-api"):
model_name = config.get("model_name")
model = ChatOpenAI(
streaming=streaming,
verbose=verbose,
@ -57,10 +55,7 @@ def get_ChatOpenAI(
openai_proxy=config.get("openai_proxy"),
**kwargs
)
return model
def get_OpenAI(
model_name: str,
temperature: float,
@ -71,53 +66,9 @@ def get_OpenAI(
verbose: bool = True,
**kwargs: Any,
) -> OpenAI:
## 以下模型是Langchain原生支持的模型这些模型不会走Fschat封装
config_models = list_config_llm_models()
if model_name in config_models.get("langchain", {}):
config = config_models["langchain"][model_name]
if model_name == "Azure-OpenAI":
model = AzureOpenAI(
streaming=streaming,
verbose=verbose,
callbacks=callbacks,
deployment_name=config.get("deployment_name"),
model_version=config.get("model_version"),
openai_api_type=config.get("openai_api_type"),
openai_api_base=config.get("api_base_url"),
openai_api_version=config.get("api_version"),
openai_api_key=config.get("api_key"),
openai_proxy=config.get("openai_proxy"),
temperature=temperature,
max_tokens=max_tokens,
echo=echo,
)
elif model_name == "OpenAI":
model = OpenAI(
streaming=streaming,
verbose=verbose,
callbacks=callbacks,
model_name=config.get("model_name"),
openai_api_base=config.get("api_base_url"),
openai_api_key=config.get("api_key"),
openai_proxy=config.get("openai_proxy"),
temperature=temperature,
max_tokens=max_tokens,
echo=echo,
)
elif model_name == "Anthropic":
model = Anthropic(
streaming=streaming,
verbose=verbose,
callbacks=callbacks,
model_name=config.get("model_name"),
anthropic_api_key=config.get("api_key"),
echo=echo,
)
## TODO 支持其他的Langchain原生支持的模型
else:
## 非Langchain原生支持的模型走Fschat封装
config = get_model_worker_config(model_name)
if config.get("openai-api"):
model_name = config.get("model_name")
model = OpenAI(
streaming=streaming,
verbose=verbose,
@ -131,7 +82,6 @@ def get_OpenAI(
echo=echo,
**kwargs
)
return model

View File

@ -276,8 +276,8 @@ class ApiRequest:
"max_tokens": max_tokens,
}
print(f"received input message:")
pprint(data)
# print(f"received input message:")
# pprint(data)
response = self.post(
"/chat/fastchat",