From 4318197ac760dd912da2ef7f8d1364cef69a1ed5 Mon Sep 17 00:00:00 2001 From: imClumsyPanda Date: Thu, 17 Aug 2023 21:30:40 +0800 Subject: [PATCH] reformat server_config.py.example --- configs/server_config.py.example | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/configs/server_config.py.example b/configs/server_config.py.example index 24ce6b4..fd83fc0 100644 --- a/configs/server_config.py.example +++ b/configs/server_config.py.example @@ -1,5 +1,4 @@ -from .model_config import LLM_MODEL, llm_model_dict, LLM_DEVICE - +from .model_config import LLM_MODEL, LLM_DEVICE # API 是否开启跨域,默认为False,如果需要开启,请设置为True # is open cross domain @@ -23,7 +22,7 @@ API_SERVER = { # fastchat openai_api server FSCHAT_OPENAI_API = { "host": DEFAULT_BIND_HOST, - "port": 8888, # model_config.llm_model_dict中模型配置的api_base_url需要与这里一致。 + "port": 8888, # model_config.llm_model_dict中模型配置的api_base_url需要与这里一致。 } # fastchat model_worker server @@ -56,7 +55,6 @@ FSCHAT_MODEL_WORKERS = { }, } - # fastchat multi model worker server FSCHAT_MULTI_MODEL_WORKERS = { # todo @@ -76,12 +74,14 @@ def fschat_controller_address() -> str: port = FSCHAT_CONTROLLER["port"] return f"http://{host}:{port}" + def fschat_model_worker_address(model_name: str = LLM_MODEL) -> str: if model := FSCHAT_MODEL_WORKERS.get(model_name): host = model["host"] port = model["port"] return f"http://{host}:{port}" + def fschat_openai_api_address() -> str: host = FSCHAT_OPENAI_API["host"] port = FSCHAT_OPENAI_API["port"]