remove xformers from requirements*.txt; check llm_model before change model in webui
This commit is contained in:
parent
0b25d7b079
commit
83e25f8011
|
|
@ -1,7 +1,6 @@
|
||||||
langchain==0.0.317
|
langchain==0.0.317
|
||||||
langchain-experimental==0.0.30
|
langchain-experimental==0.0.30
|
||||||
fschat[model_worker]==0.2.31
|
fschat[model_worker]==0.2.31
|
||||||
xformers==0.0.22.post4
|
|
||||||
openai
|
openai
|
||||||
sentence_transformers
|
sentence_transformers
|
||||||
transformers>=4.34
|
transformers>=4.34
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
langchain==0.0.317
|
langchain==0.0.317
|
||||||
langchain-experimental==0.0.30
|
langchain-experimental==0.0.30
|
||||||
fschat[model_worker]==0.2.31
|
fschat[model_worker]==0.2.31
|
||||||
xformers==0.0.22.post4
|
|
||||||
openai
|
openai
|
||||||
sentence_transformers>=2.2.2
|
sentence_transformers>=2.2.2
|
||||||
transformers>=4.34
|
transformers>=4.34
|
||||||
|
|
|
||||||
|
|
@ -88,6 +88,7 @@ def dialogue_page(api: ApiRequest):
|
||||||
)
|
)
|
||||||
|
|
||||||
def on_llm_change():
|
def on_llm_change():
|
||||||
|
if llm_model:
|
||||||
config = api.get_model_config(llm_model)
|
config = api.get_model_config(llm_model)
|
||||||
if not config.get("online_api"): # 只有本地model_worker可以切换模型
|
if not config.get("online_api"): # 只有本地model_worker可以切换模型
|
||||||
st.session_state["prev_llm_model"] = llm_model
|
st.session_state["prev_llm_model"] = llm_model
|
||||||
|
|
@ -118,7 +119,8 @@ def dialogue_page(api: ApiRequest):
|
||||||
on_change=on_llm_change,
|
on_change=on_llm_change,
|
||||||
key="llm_model",
|
key="llm_model",
|
||||||
)
|
)
|
||||||
if (st.session_state.get("prev_llm_model") != llm_model
|
if (llm_model
|
||||||
|
and st.session_state.get("prev_llm_model") != llm_model
|
||||||
and not api.get_model_config(llm_model).get("online_api")
|
and not api.get_model_config(llm_model).get("online_api")
|
||||||
and llm_model not in running_models):
|
and llm_model not in running_models):
|
||||||
with st.spinner(f"正在加载模型: {llm_model},请勿进行操作或刷新页面"):
|
with st.spinner(f"正在加载模型: {llm_model},请勿进行操作或刷新页面"):
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue