update llm_api.py and webui.py
This commit is contained in:
parent
2c5b6bb0ad
commit
c8a75ab11f
|
|
@ -1,7 +1,7 @@
|
||||||
|
|
||||||
from multiprocessing import Process, Queue
|
from multiprocessing import Process, Queue
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
||||||
from configs.model_config import llm_model_dict, LLM_MODEL, LLM_DEVICE, LOG_PATH, logger
|
from configs.model_config import llm_model_dict, LLM_MODEL, LLM_DEVICE, LOG_PATH, logger
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
@ -31,7 +31,7 @@ def create_controller_app(
|
||||||
|
|
||||||
controller = Controller(dispatch_method)
|
controller = Controller(dispatch_method)
|
||||||
sys.modules["fastchat.serve.controller"].controller = controller
|
sys.modules["fastchat.serve.controller"].controller = controller
|
||||||
#todo 替换fastchat的日志文件
|
# todo 替换fastchat的日志文件
|
||||||
sys.modules["fastchat.serve.controller"].logger = logger
|
sys.modules["fastchat.serve.controller"].logger = logger
|
||||||
logger.info(f"controller dispatch method: {dispatch_method}")
|
logger.info(f"controller dispatch method: {dispatch_method}")
|
||||||
return app
|
return app
|
||||||
|
|
@ -199,9 +199,6 @@ def run_openai_api(q):
|
||||||
uvicorn.run(app, host=host_ip, port=openai_api_port)
|
uvicorn.run(app, host=host_ip, port=openai_api_port)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
logger.info(llm_model_dict[LLM_MODEL])
|
logger.info(llm_model_dict[LLM_MODEL])
|
||||||
model_path = llm_model_dict[LLM_MODEL]["local_model_path"]
|
model_path = llm_model_dict[LLM_MODEL]["local_model_path"]
|
||||||
|
|
@ -243,7 +240,6 @@ if __name__ == "__main__":
|
||||||
# model_worker_process.join()
|
# model_worker_process.join()
|
||||||
openai_api_process.join()
|
openai_api_process.join()
|
||||||
|
|
||||||
|
|
||||||
# 服务启动后接口调用示例:
|
# 服务启动后接口调用示例:
|
||||||
# import openai
|
# import openai
|
||||||
# openai.api_key = "EMPTY" # Not support yet
|
# openai.api_key = "EMPTY" # Not support yet
|
||||||
|
|
|
||||||
2
webui.py
2
webui.py
|
|
@ -21,7 +21,7 @@ def dialogue_page():
|
||||||
# Display chat messages from history on app rerun
|
# Display chat messages from history on app rerun
|
||||||
chat_box.output_messages()
|
chat_box.output_messages()
|
||||||
|
|
||||||
if prompt := st.chat_input("What is up?"):
|
if prompt := st.chat_input("请输入对话内容,换行请使用Ctrl+Enter"):
|
||||||
chat_box.user_say(prompt)
|
chat_box.user_say(prompt)
|
||||||
chat_box.ai_say("正在思考...")
|
chat_box.ai_say("正在思考...")
|
||||||
# with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len
|
# with api.chat_fastchat([{"role": "user", "content": "prompt"}], stream=streaming) as r: # todo: support history len
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue