incorrect arg order and support zero his
This commit is contained in:
parent
bd39b87f67
commit
a04244cf6a
|
|
@ -38,7 +38,7 @@ class ChatGLM(LLM):
|
|||
response, _ = self.model.chat(
|
||||
self.tokenizer,
|
||||
prompt,
|
||||
history=self.history[-self.history_len:],
|
||||
history=self.history[-self.history_len:] if self.history_len>0 else [],
|
||||
max_length=self.max_token,
|
||||
temperature=self.temperature,
|
||||
)
|
||||
|
|
|
|||
4
webui.py
4
webui.py
|
|
@ -74,7 +74,7 @@ with gr.Blocks(css="""
|
|||
label="llm model",
|
||||
value="chatglm-6b",
|
||||
interactive=True)
|
||||
LLM_HISTORY_LEN = gr.Slider(1,
|
||||
LLM_HISTORY_LEN = gr.Slider(0,
|
||||
10,
|
||||
value=3,
|
||||
step=1,
|
||||
|
|
@ -95,7 +95,7 @@ with gr.Blocks(css="""
|
|||
kb.init_cfg(args[0], args[1], args[2], args[3]),
|
||||
show_progress=True,
|
||||
api_name="init_cfg",
|
||||
inputs=[llm_model, embedding_model, VECTOR_SEARCH_TOP_K, LLM_HISTORY_LEN]
|
||||
inputs=[llm_model, embedding_model, LLM_HISTORY_LEN,VECTOR_SEARCH_TOP_K]
|
||||
).then(
|
||||
get_model_status, chatbot, chatbot
|
||||
)
|
||||
|
|
|
|||
Loading…
Reference in New Issue