merge from dev

This commit is contained in:
liunux4odoo 2023-12-13 16:52:40 +08:00
parent db008c1af8
commit c8fef3380c
3 changed files with 4 additions and 7 deletions

View File

@ -12,6 +12,7 @@ torch==2.1.0 ##on Windows system, install the cuda version manually from https:
torchvision #on Windows system, install the cuda version manually from https://pytorch.org/
torchaudio #on Windows system, install the cuda version manually from https://pytorch.org/
fastapi>=0.104
sse_starlette
nltk>=3.8.1
uvicorn>=0.24.0.post1
starlette~=0.27.0

View File

@ -12,6 +12,7 @@ torch==2.1.0 ##on Windows system, install the cuda version manually from https:
torchvision #on Windows system, install the cuda version manually from https://pytorch.org/
torchaudio #on Windows system, install the cuda version manually from https://pytorch.org/
fastapi>=0.104
sse_starlette
nltk>=3.8.1
uvicorn>=0.24.0.post1
starlette~=0.27.0

View File

@ -1,5 +1,5 @@
from fastapi import Body, Request
from fastapi.responses import StreamingResponse
from sse_starlette.sse import EventSourceResponse
from fastapi.concurrency import run_in_threadpool
from configs import (LLM_MODELS, VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD, TEMPERATURE)
from server.utils import wrap_done, get_ChatOpenAI
@ -119,9 +119,4 @@ async def knowledge_base_chat(query: str = Body(..., description="用户输入",
ensure_ascii=False)
await task
return StreamingResponse(knowledge_base_chat_iterator(query=query,
top_k=top_k,
history=history,
model_name=model_name,
prompt_name=prompt_name),
media_type="text/event-stream")
return EventSourceResponse(knowledge_base_chat_iterator(query, kb, top_k, history))