update chatglm_llm.py

This commit is contained in:
imClumsyPanda 2023-04-22 12:20:08 +08:00
parent 2224decef3
commit 54c983f4bc
1 changed files with 9 additions and 8 deletions

View File

@ -87,7 +87,7 @@ class ChatGLM(LLM):
response, _ = self.model.chat( response, _ = self.model.chat(
self.tokenizer, self.tokenizer,
prompt, prompt,
history=[],#self.history[-self.history_len:] if self.history_len>0 else history=self.history[-self.history_len:] if self.history_len > 0 else [],
max_length=self.max_token, max_length=self.max_token,
temperature=self.temperature, temperature=self.temperature,
) )
@ -159,7 +159,8 @@ class ChatGLM(LLM):
new_prefix_state_dict[k[len("transformer.prefix_encoder."):]] = v new_prefix_state_dict[k[len("transformer.prefix_encoder."):]] = v
self.model.transformer.prefix_encoder.load_state_dict(new_prefix_state_dict) self.model.transformer.prefix_encoder.load_state_dict(new_prefix_state_dict)
self.model.transformer.prefix_encoder.float() self.model.transformer.prefix_encoder.float()
except Exception: except Exception as e:
print(e)
print("加载PrefixEncoder模型参数失败") print("加载PrefixEncoder模型参数失败")
self.model = self.model.eval() self.model = self.model.eval()