remove xformers from requirements*.txt; check llm_model before change model in webui

This commit is contained in:
liunux4odoo 2023-10-20 15:01:39 +08:00
parent 0b25d7b079
commit 83e25f8011
3 changed files with 9 additions and 9 deletions

View File

@ -1,7 +1,6 @@
langchain==0.0.317 langchain==0.0.317
langchain-experimental==0.0.30 langchain-experimental==0.0.30
fschat[model_worker]==0.2.31 fschat[model_worker]==0.2.31
xformers==0.0.22.post4
openai openai
sentence_transformers sentence_transformers
transformers>=4.34 transformers>=4.34

View File

@ -1,7 +1,6 @@
langchain==0.0.317 langchain==0.0.317
langchain-experimental==0.0.30 langchain-experimental==0.0.30
fschat[model_worker]==0.2.31 fschat[model_worker]==0.2.31
xformers==0.0.22.post4
openai openai
sentence_transformers>=2.2.2 sentence_transformers>=2.2.2
transformers>=4.34 transformers>=4.34

View File

@ -88,10 +88,11 @@ def dialogue_page(api: ApiRequest):
) )
def on_llm_change(): def on_llm_change():
config = api.get_model_config(llm_model) if llm_model:
if not config.get("online_api"): # 只有本地model_worker可以切换模型 config = api.get_model_config(llm_model)
st.session_state["prev_llm_model"] = llm_model if not config.get("online_api"): # 只有本地model_worker可以切换模型
st.session_state["cur_llm_model"] = st.session_state.llm_model st.session_state["prev_llm_model"] = llm_model
st.session_state["cur_llm_model"] = st.session_state.llm_model
def llm_model_format_func(x): def llm_model_format_func(x):
if x in running_models: if x in running_models:
@ -118,9 +119,10 @@ def dialogue_page(api: ApiRequest):
on_change=on_llm_change, on_change=on_llm_change,
key="llm_model", key="llm_model",
) )
if (st.session_state.get("prev_llm_model") != llm_model if (llm_model
and not api.get_model_config(llm_model).get("online_api") and st.session_state.get("prev_llm_model") != llm_model
and llm_model not in running_models): and not api.get_model_config(llm_model).get("online_api")
and llm_model not in running_models):
with st.spinner(f"正在加载模型: {llm_model},请勿进行操作或刷新页面"): with st.spinner(f"正在加载模型: {llm_model},请勿进行操作或刷新页面"):
prev_model = st.session_state.get("prev_llm_model") prev_model = st.session_state.get("prev_llm_model")
r = api.change_llm_model(prev_model, llm_model) r = api.change_llm_model(prev_model, llm_model)