debug for fastchat_openai_llm
This commit is contained in:
parent
6f967a069d
commit
90fe40b221
|
|
@ -154,6 +154,15 @@ llm_model_dict = {
|
|||
"provides": "FastChatOpenAILLMChain", # 使用fastchat api时,需保证"provides"为"FastChatOpenAILLMChain"
|
||||
"api_base_url": "http://localhost:8000/v1", # "name"修改为fastchat服务中的"api_base_url"
|
||||
"api_key": "EMPTY"
|
||||
},
|
||||
# 通过 fastchat 调用的模型请参考如下格式
|
||||
"fastchat-chatglm-6b-int4": {
|
||||
"name": "chatglm-6b-int4", # "name"修改为fastchat服务中的"model_name"
|
||||
"pretrained_model_name": "chatglm-6b-int4",
|
||||
"local_model_path": None,
|
||||
"provides": "FastChatOpenAILLMChain", # 使用fastchat api时,需保证"provides"为"FastChatOpenAILLMChain"
|
||||
"api_base_url": "http://localhost:8001/v1", # "name"修改为fastchat服务中的"api_base_url"
|
||||
"api_key": "EMPTY"
|
||||
},
|
||||
"fastchat-chatglm2-6b": {
|
||||
"name": "chatglm2-6b", # "name"修改为fastchat服务中的"model_name"
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ class FastChatOpenAILLMChain(RemoteRpcModel, Chain, ABC):
|
|||
# create a chat completion
|
||||
completion = openai.ChatCompletion.create(
|
||||
model=self.model_name,
|
||||
messages=build_message_list(prompt)
|
||||
messages=build_message_list(prompt,history=history)
|
||||
)
|
||||
print(f"response:{completion.choices[0].message.content}")
|
||||
print(f"+++++++++++++++++++++++++++++++++++")
|
||||
|
|
|
|||
Loading…
Reference in New Issue