use streamingresponse to return

This commit is contained in:
wvivi2023 2024-01-11 09:06:27 +08:00
parent 565a94c1bb
commit 0c0c87a3fb
2 changed files with 13 additions and 2 deletions

View File

@ -1,5 +1,6 @@
from fastapi import Body
from sse_starlette.sse import EventSourceResponse
from fastapi.responses import StreamingResponse
from configs import LLM_MODELS, TEMPERATURE
from server.utils import wrap_done, get_ChatOpenAI
from langchain.chains import LLMChain
@ -100,4 +101,6 @@ async def chat(query: str = Body(..., description="用户输入", examples=["恼
await task
return EventSourceResponse(chat_iterator())
#return EventSourceResponse(chat_iterator())
return StreamingResponse(chat_iterator(),
media_type="text/event-stream")

View File

@ -1,4 +1,5 @@
from fastapi import Body, Request
from fastapi.responses import StreamingResponse
from sse_starlette.sse import EventSourceResponse
from fastapi.concurrency import run_in_threadpool
from configs import (LLM_MODELS,
@ -145,5 +146,12 @@ async def knowledge_base_chat(query: str = Body(..., description="用户输入",
ensure_ascii=False)
await task
return EventSourceResponse(knowledge_base_chat_iterator(query, top_k, history,model_name,prompt_name))
return StreamingResponse(knowledge_base_chat_iterator(query=query,
top_k=top_k,
history=history,
model_name=model_name,
prompt_name=prompt_name),
media_type="text/event-stream")
#return EventSourceResponse(knowledge_base_chat_iterator(query, top_k, history,model_name,prompt_name))