*_chat 接口中,当 max_tokens=0时,将其设为 None,避免 swagger ui 中默认值为0时访问出错

This commit is contained in:
liunux4odoo 2023-11-26 16:47:58 +08:00
parent 75a2cba078
commit 8b695dba03
6 changed files with 17 additions and 1 deletions

View File

@ -44,6 +44,9 @@ async def agent_chat(query: str = Body(..., description="用户输入", examples
prompt_name: str = prompt_name,
) -> AsyncIterable[str]:
callback = CustomAsyncIteratorCallbackHandler()
if isinstance(max_tokens, int) and max_tokens <= 0:
max_tokens = None
model = get_ChatOpenAI(
model_name=model_name,
temperature=temperature,

View File

@ -45,7 +45,8 @@ async def chat(query: str = Body(..., description="用户输入", examples=["恼
chat_type="llm_chat",
query=query)
callbacks.append(conversation_callback)
if isinstance(max_tokens, int) and max_tokens <= 0:
max_tokens = None
model = get_ChatOpenAI(
model_name=model_name,

View File

@ -28,6 +28,9 @@ async def completion(query: str = Body(..., description="用户输入", examples
echo: bool = echo,
) -> AsyncIterable[str]:
callback = AsyncIteratorCallbackHandler()
if isinstance(max_tokens, int) and max_tokens <= 0:
max_tokens = None
model = get_OpenAI(
model_name=model_name,
temperature=temperature,

View File

@ -114,6 +114,9 @@ async def file_chat(query: str = Body(..., description="用户输入", examples=
async def knowledge_base_chat_iterator() -> AsyncIterable[str]:
callback = AsyncIteratorCallbackHandler()
if isinstance(max_tokens, int) and max_tokens <= 0:
max_tokens = None
model = get_ChatOpenAI(
model_name=model_name,
temperature=temperature,

View File

@ -49,6 +49,9 @@ async def knowledge_base_chat(query: str = Body(..., description="用户输入",
prompt_name: str = prompt_name,
) -> AsyncIterable[str]:
callback = AsyncIteratorCallbackHandler()
if isinstance(max_tokens, int) and max_tokens <= 0:
max_tokens = None
model = get_ChatOpenAI(
model_name=model_name,
temperature=temperature,

View File

@ -148,6 +148,9 @@ async def search_engine_chat(query: str = Body(..., description="用户输入",
prompt_name: str = prompt_name,
) -> AsyncIterable[str]:
callback = AsyncIteratorCallbackHandler()
if isinstance(max_tokens, int) and max_tokens <= 0:
max_tokens = None
model = get_ChatOpenAI(
model_name=model_name,
temperature=temperature,