From 59aff96add54f5c2dfddccaae2c84f7edfaaab1f Mon Sep 17 00:00:00 2001 From: imClumsyPanda Date: Sat, 22 Apr 2023 21:20:18 +0800 Subject: [PATCH] update chatglm_llm.py --- models/chatglm_llm.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/models/chatglm_llm.py b/models/chatglm_llm.py index c951b78..5608cbb 100644 --- a/models/chatglm_llm.py +++ b/models/chatglm_llm.py @@ -115,7 +115,8 @@ class ChatGLM(LLM): prefix_encoder_file.close() model_config.pre_seq_len = prefix_encoder_config['pre_seq_len'] model_config.prefix_projection = prefix_encoder_config['prefix_projection'] - except Exception: + except Exception as e: + print(e) print("加载PrefixEncoder config.json失败") if torch.cuda.is_available() and llm_device.lower().startswith("cuda"):