2023-07-27 23:22:07 +08:00
|
|
|
|
import os
|
|
|
|
|
|
import urllib
|
2023-08-14 11:46:36 +08:00
|
|
|
|
from fastapi import File, Form, Body, Query, UploadFile
|
2023-09-08 08:55:12 +08:00
|
|
|
|
from configs.model_config import (DEFAULT_VS_TYPE, EMBEDDING_MODEL,
|
|
|
|
|
|
VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD,
|
|
|
|
|
|
logger,)
|
|
|
|
|
|
from server.utils import BaseResponse, ListResponse, run_in_thread_pool
|
|
|
|
|
|
from server.knowledge_base.utils import (validate_kb_name, list_files_from_folder,get_file_path,
|
|
|
|
|
|
files2docs_in_thread, KnowledgeFile)
|
2023-08-14 11:46:36 +08:00
|
|
|
|
from fastapi.responses import StreamingResponse, FileResponse
|
2023-09-08 08:55:12 +08:00
|
|
|
|
from pydantic import Json
|
2023-08-04 20:26:14 +08:00
|
|
|
|
import json
|
2023-08-08 17:41:58 +08:00
|
|
|
|
from server.knowledge_base.kb_service.base import KBServiceFactory
|
2023-09-08 08:55:12 +08:00
|
|
|
|
from server.db.repository.knowledge_file_repository import get_file_detail
|
2023-08-16 13:18:58 +08:00
|
|
|
|
from typing import List, Dict
|
|
|
|
|
|
from langchain.docstore.document import Document
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DocumentWithScore(Document):
|
|
|
|
|
|
score: float = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def search_docs(query: str = Body(..., description="用户输入", examples=["你好"]),
|
|
|
|
|
|
knowledge_base_name: str = Body(..., description="知识库名称", examples=["samples"]),
|
|
|
|
|
|
top_k: int = Body(VECTOR_SEARCH_TOP_K, description="匹配向量数"),
|
|
|
|
|
|
score_threshold: float = Body(SCORE_THRESHOLD, description="知识库匹配相关度阈值,取值范围在0-1之间,SCORE越小,相关度越高,取到1相当于不筛选,建议设置在0.5左右", ge=0, le=1),
|
|
|
|
|
|
) -> List[DocumentWithScore]:
|
|
|
|
|
|
kb = KBServiceFactory.get_service_by_name(knowledge_base_name)
|
|
|
|
|
|
if kb is None:
|
2023-08-19 15:14:45 +08:00
|
|
|
|
return []
|
2023-08-16 13:18:58 +08:00
|
|
|
|
docs = kb.search_docs(query, top_k, score_threshold)
|
|
|
|
|
|
data = [DocumentWithScore(**x[0].dict(), score=x[1]) for x in docs]
|
|
|
|
|
|
|
|
|
|
|
|
return data
|
2023-07-27 23:22:07 +08:00
|
|
|
|
|
|
|
|
|
|
|
2023-08-28 13:50:35 +08:00
|
|
|
|
async def list_files(
|
2023-08-09 22:57:36 +08:00
|
|
|
|
knowledge_base_name: str
|
2023-08-19 15:14:45 +08:00
|
|
|
|
) -> ListResponse:
|
2023-07-27 23:22:07 +08:00
|
|
|
|
if not validate_kb_name(knowledge_base_name):
|
|
|
|
|
|
return ListResponse(code=403, msg="Don't attack me", data=[])
|
|
|
|
|
|
|
|
|
|
|
|
knowledge_base_name = urllib.parse.unquote(knowledge_base_name)
|
2023-08-07 20:37:16 +08:00
|
|
|
|
kb = KBServiceFactory.get_service_by_name(knowledge_base_name)
|
|
|
|
|
|
if kb is None:
|
2023-07-27 23:22:07 +08:00
|
|
|
|
return ListResponse(code=404, msg=f"未找到知识库 {knowledge_base_name}", data=[])
|
|
|
|
|
|
else:
|
2023-08-28 13:50:35 +08:00
|
|
|
|
all_doc_names = kb.list_files()
|
2023-08-19 15:14:45 +08:00
|
|
|
|
return ListResponse(data=all_doc_names)
|
2023-07-27 23:22:07 +08:00
|
|
|
|
|
|
|
|
|
|
|
2023-09-08 08:55:12 +08:00
|
|
|
|
def _save_files_in_thread(files: List[UploadFile],
|
|
|
|
|
|
knowledge_base_name: str,
|
|
|
|
|
|
override: bool):
|
|
|
|
|
|
'''
|
|
|
|
|
|
通过多线程将上传的文件保存到对应知识库目录内。
|
|
|
|
|
|
生成器返回保存结果:{"code":200, "msg": "xxx", "data": {"knowledge_base_name":"xxx", "file_name": "xxx"}}
|
|
|
|
|
|
'''
|
|
|
|
|
|
def save_file(file: UploadFile, knowledge_base_name: str, override: bool) -> dict:
|
|
|
|
|
|
'''
|
|
|
|
|
|
保存单个文件。
|
|
|
|
|
|
'''
|
|
|
|
|
|
try:
|
|
|
|
|
|
filename = file.filename
|
|
|
|
|
|
file_path = get_file_path(knowledge_base_name=knowledge_base_name, doc_name=filename)
|
|
|
|
|
|
data = {"knowledge_base_name": knowledge_base_name, "file_name": filename}
|
|
|
|
|
|
|
|
|
|
|
|
file_content = file.file.read() # 读取上传文件的内容
|
|
|
|
|
|
if (os.path.isfile(file_path)
|
|
|
|
|
|
and not override
|
|
|
|
|
|
and os.path.getsize(file_path) == len(file_content)
|
|
|
|
|
|
):
|
|
|
|
|
|
# TODO: filesize 不同后的处理
|
|
|
|
|
|
file_status = f"文件 {filename} 已存在。"
|
|
|
|
|
|
logger.warn(file_status)
|
|
|
|
|
|
return dict(code=404, msg=file_status, data=data)
|
|
|
|
|
|
|
|
|
|
|
|
with open(file_path, "wb") as f:
|
|
|
|
|
|
f.write(file_content)
|
|
|
|
|
|
return dict(code=200, msg=f"成功上传文件 {filename}", data=data)
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
msg = f"{filename} 文件上传失败,报错信息为: {e}"
|
|
|
|
|
|
logger.error(msg)
|
|
|
|
|
|
return dict(code=500, msg=msg, data=data)
|
|
|
|
|
|
|
|
|
|
|
|
params = [{"file": file, "knowledge_base_name": knowledge_base_name, "override": override} for file in files]
|
|
|
|
|
|
for result in run_in_thread_pool(save_file, params=params):
|
|
|
|
|
|
yield result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# 似乎没有单独增加一个文件上传API接口的必要
|
|
|
|
|
|
# def upload_files(files: List[UploadFile] = File(..., description="上传文件,支持多文件"),
|
|
|
|
|
|
# knowledge_base_name: str = Form(..., description="知识库名称", examples=["samples"]),
|
|
|
|
|
|
# override: bool = Form(False, description="覆盖已有文件")):
|
|
|
|
|
|
# '''
|
|
|
|
|
|
# API接口:上传文件。流式返回保存结果:{"code":200, "msg": "xxx", "data": {"knowledge_base_name":"xxx", "file_name": "xxx"}}
|
|
|
|
|
|
# '''
|
|
|
|
|
|
# def generate(files, knowledge_base_name, override):
|
|
|
|
|
|
# for result in _save_files_in_thread(files, knowledge_base_name=knowledge_base_name, override=override):
|
|
|
|
|
|
# yield json.dumps(result, ensure_ascii=False)
|
|
|
|
|
|
|
|
|
|
|
|
# return StreamingResponse(generate(files, knowledge_base_name=knowledge_base_name, override=override), media_type="text/event-stream")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: 等langchain.document_loaders支持内存文件的时候再开通
|
|
|
|
|
|
# def files2docs(files: List[UploadFile] = File(..., description="上传文件,支持多文件"),
|
|
|
|
|
|
# knowledge_base_name: str = Form(..., description="知识库名称", examples=["samples"]),
|
|
|
|
|
|
# override: bool = Form(False, description="覆盖已有文件"),
|
|
|
|
|
|
# save: bool = Form(True, description="是否将文件保存到知识库目录")):
|
|
|
|
|
|
# def save_files(files, knowledge_base_name, override):
|
|
|
|
|
|
# for result in _save_files_in_thread(files, knowledge_base_name=knowledge_base_name, override=override):
|
|
|
|
|
|
# yield json.dumps(result, ensure_ascii=False)
|
|
|
|
|
|
|
|
|
|
|
|
# def files_to_docs(files):
|
|
|
|
|
|
# for result in files2docs_in_thread(files):
|
|
|
|
|
|
# yield json.dumps(result, ensure_ascii=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def upload_docs(files: List[UploadFile] = File(..., description="上传文件,支持多文件"),
|
|
|
|
|
|
knowledge_base_name: str = Form(..., description="知识库名称", examples=["samples"]),
|
2023-08-09 18:15:14 +08:00
|
|
|
|
override: bool = Form(False, description="覆盖已有文件"),
|
2023-09-08 08:55:12 +08:00
|
|
|
|
to_vector_store: bool = Form(True, description="上传文件后是否进行向量化"),
|
|
|
|
|
|
docs: Json = Form({}, description="自定义的docs", examples=[{"test.txt": [Document(page_content="custom doc")]}]),
|
2023-08-20 19:10:29 +08:00
|
|
|
|
not_refresh_vs_cache: bool = Form(False, description="暂不保存向量库(用于FAISS)"),
|
2023-08-19 15:14:45 +08:00
|
|
|
|
) -> BaseResponse:
|
2023-09-08 08:55:12 +08:00
|
|
|
|
'''
|
|
|
|
|
|
API接口:上传文件,并/或向量化
|
|
|
|
|
|
'''
|
2023-07-27 23:22:07 +08:00
|
|
|
|
if not validate_kb_name(knowledge_base_name):
|
|
|
|
|
|
return BaseResponse(code=403, msg="Don't attack me")
|
|
|
|
|
|
|
2023-08-07 20:37:16 +08:00
|
|
|
|
kb = KBServiceFactory.get_service_by_name(knowledge_base_name)
|
|
|
|
|
|
if kb is None:
|
2023-08-04 15:12:14 +08:00
|
|
|
|
return BaseResponse(code=404, msg=f"未找到知识库 {knowledge_base_name}")
|
2023-07-27 23:22:07 +08:00
|
|
|
|
|
2023-09-08 08:55:12 +08:00
|
|
|
|
failed_files = {}
|
|
|
|
|
|
file_names = list(docs.keys())
|
|
|
|
|
|
|
|
|
|
|
|
# 先将上传的文件保存到磁盘
|
|
|
|
|
|
for result in _save_files_in_thread(files, knowledge_base_name=knowledge_base_name, override=override):
|
|
|
|
|
|
filename = result["data"]["file_name"]
|
|
|
|
|
|
if result["code"] != 200:
|
|
|
|
|
|
failed_files[filename] = result["msg"]
|
|
|
|
|
|
|
|
|
|
|
|
if filename not in file_names:
|
|
|
|
|
|
file_names.append(filename)
|
|
|
|
|
|
|
|
|
|
|
|
# 对保存的文件进行向量化
|
|
|
|
|
|
if to_vector_store:
|
|
|
|
|
|
result = await update_docs(
|
|
|
|
|
|
knowledge_base_name=knowledge_base_name,
|
|
|
|
|
|
file_names=file_names,
|
|
|
|
|
|
override_custom_docs=True,
|
|
|
|
|
|
docs=docs,
|
|
|
|
|
|
not_refresh_vs_cache=True,
|
|
|
|
|
|
)
|
|
|
|
|
|
failed_files.update(result.data["failed_files"])
|
|
|
|
|
|
if not not_refresh_vs_cache:
|
|
|
|
|
|
kb.save_vector_store()
|
|
|
|
|
|
|
|
|
|
|
|
return BaseResponse(code=200, msg="文件上传与向量化完成", data={"failed_files": failed_files})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def delete_docs(knowledge_base_name: str = Body(..., examples=["samples"]),
|
|
|
|
|
|
file_names: List[str] = Body(..., examples=[["file_name.md", "test.txt"]]),
|
2023-08-09 18:15:14 +08:00
|
|
|
|
delete_content: bool = Body(False),
|
2023-08-20 19:10:29 +08:00
|
|
|
|
not_refresh_vs_cache: bool = Body(False, description="暂不保存向量库(用于FAISS)"),
|
2023-08-19 15:14:45 +08:00
|
|
|
|
) -> BaseResponse:
|
2023-07-27 23:22:07 +08:00
|
|
|
|
if not validate_kb_name(knowledge_base_name):
|
|
|
|
|
|
return BaseResponse(code=403, msg="Don't attack me")
|
|
|
|
|
|
|
|
|
|
|
|
knowledge_base_name = urllib.parse.unquote(knowledge_base_name)
|
2023-08-07 20:37:16 +08:00
|
|
|
|
kb = KBServiceFactory.get_service_by_name(knowledge_base_name)
|
|
|
|
|
|
if kb is None:
|
2023-08-04 15:12:14 +08:00
|
|
|
|
return BaseResponse(code=404, msg=f"未找到知识库 {knowledge_base_name}")
|
2023-08-05 23:35:20 +08:00
|
|
|
|
|
2023-09-08 08:55:12 +08:00
|
|
|
|
failed_files = {}
|
|
|
|
|
|
for file_name in file_names:
|
|
|
|
|
|
if not kb.exist_doc(file_name):
|
|
|
|
|
|
failed_files[file_name] = f"未找到文件 {file_name}"
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
kb_file = KnowledgeFile(filename=file_name,
|
|
|
|
|
|
knowledge_base_name=knowledge_base_name)
|
|
|
|
|
|
kb.delete_doc(kb_file, delete_content, not_refresh_vs_cache=True)
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
msg = f"{file_name} 文件删除失败,错误信息:{e}"
|
|
|
|
|
|
logger.error(msg)
|
|
|
|
|
|
failed_files[file_name] = msg
|
|
|
|
|
|
|
|
|
|
|
|
if not not_refresh_vs_cache:
|
|
|
|
|
|
kb.save_vector_store()
|
|
|
|
|
|
|
|
|
|
|
|
return BaseResponse(code=200, msg=f"文件删除完成", data={"failed_files": failed_files})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def update_docs(
|
|
|
|
|
|
knowledge_base_name: str = Body(..., description="知识库名称", examples=["samples"]),
|
|
|
|
|
|
file_names: List[str] = Body(..., description="文件名称,支持多文件", examples=["file_name"]),
|
|
|
|
|
|
override_custom_docs: bool = Body(False, description="是否覆盖之前自定义的docs"),
|
|
|
|
|
|
docs: Json = Body({}, description="自定义的docs", examples=[{"test.txt": [Document(page_content="custom doc")]}]),
|
2023-08-20 19:10:29 +08:00
|
|
|
|
not_refresh_vs_cache: bool = Body(False, description="暂不保存向量库(用于FAISS)"),
|
2023-08-19 15:14:45 +08:00
|
|
|
|
) -> BaseResponse:
|
2023-08-09 16:52:04 +08:00
|
|
|
|
'''
|
|
|
|
|
|
更新知识库文档
|
|
|
|
|
|
'''
|
|
|
|
|
|
if not validate_kb_name(knowledge_base_name):
|
|
|
|
|
|
return BaseResponse(code=403, msg="Don't attack me")
|
|
|
|
|
|
|
|
|
|
|
|
kb = KBServiceFactory.get_service_by_name(knowledge_base_name)
|
|
|
|
|
|
if kb is None:
|
|
|
|
|
|
return BaseResponse(code=404, msg=f"未找到知识库 {knowledge_base_name}")
|
|
|
|
|
|
|
2023-09-08 08:55:12 +08:00
|
|
|
|
failed_files = {}
|
|
|
|
|
|
kb_files = []
|
|
|
|
|
|
|
|
|
|
|
|
# 生成需要加载docs的文件列表
|
|
|
|
|
|
for file_name in file_names:
|
|
|
|
|
|
file_detail= get_file_detail(kb_name=knowledge_base_name, filename=file_name)
|
|
|
|
|
|
# 如果该文件之前使用了自定义docs,则根据参数决定略过或覆盖
|
|
|
|
|
|
if file_detail.get("custom_docs") and not override_custom_docs:
|
|
|
|
|
|
continue
|
|
|
|
|
|
if file_name not in docs:
|
|
|
|
|
|
try:
|
|
|
|
|
|
kb_files.append(KnowledgeFile(filename=file_name, knowledge_base_name=knowledge_base_name))
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
msg = f"加载文档 {file_name} 时出错:{e}"
|
|
|
|
|
|
logger.error(msg)
|
|
|
|
|
|
failed_files[file_name] = msg
|
|
|
|
|
|
|
|
|
|
|
|
# 从文件生成docs,并进行向量化。
|
|
|
|
|
|
# 这里利用了KnowledgeFile的缓存功能,在多线程中加载Document,然后传给KnowledgeFile
|
|
|
|
|
|
for status, result in files2docs_in_thread(kb_files):
|
|
|
|
|
|
if status:
|
|
|
|
|
|
kb_name, file_name, new_docs = result
|
|
|
|
|
|
kb_file = KnowledgeFile(filename=file_name,
|
|
|
|
|
|
knowledge_base_name=knowledge_base_name)
|
|
|
|
|
|
kb_file.splited_docs = new_docs
|
|
|
|
|
|
kb.update_doc(kb_file, not_refresh_vs_cache=True)
|
|
|
|
|
|
else:
|
|
|
|
|
|
kb_name, file_name, error = result
|
|
|
|
|
|
failed_files[file_name] = error
|
2023-08-09 16:52:04 +08:00
|
|
|
|
|
2023-09-08 08:55:12 +08:00
|
|
|
|
# 将自定义的docs进行向量化
|
|
|
|
|
|
for file_name, v in docs.items():
|
|
|
|
|
|
try:
|
|
|
|
|
|
v = [x if isinstance(x, Document) else Document(**x) for x in v]
|
|
|
|
|
|
kb_file = KnowledgeFile(filename=file_name, knowledge_base_name=knowledge_base_name)
|
|
|
|
|
|
kb.update_doc(kb_file, docs=v, not_refresh_vs_cache=True)
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
|
msg = f"为 {file_name} 添加自定义docs时出错:{e}"
|
|
|
|
|
|
logger.error(msg)
|
|
|
|
|
|
failed_files[file_name] = msg
|
|
|
|
|
|
|
|
|
|
|
|
if not not_refresh_vs_cache:
|
|
|
|
|
|
kb.save_vector_store()
|
|
|
|
|
|
|
|
|
|
|
|
return BaseResponse(code=200, msg=f"更新文档完成", data={"failed_files": failed_files})
|
2023-07-27 23:22:07 +08:00
|
|
|
|
|
2023-08-06 18:32:10 +08:00
|
|
|
|
|
2023-08-14 11:46:36 +08:00
|
|
|
|
async def download_doc(
|
2023-09-08 08:55:12 +08:00
|
|
|
|
knowledge_base_name: str = Query(...,description="知识库名称", examples=["samples"]),
|
|
|
|
|
|
file_name: str = Query(...,description="文件名称", examples=["test.txt"]),
|
|
|
|
|
|
preview: bool = Query(False, description="是:浏览器内预览;否:下载"),
|
2023-08-14 11:46:36 +08:00
|
|
|
|
):
|
|
|
|
|
|
'''
|
|
|
|
|
|
下载知识库文档
|
|
|
|
|
|
'''
|
|
|
|
|
|
if not validate_kb_name(knowledge_base_name):
|
|
|
|
|
|
return BaseResponse(code=403, msg="Don't attack me")
|
|
|
|
|
|
|
|
|
|
|
|
kb = KBServiceFactory.get_service_by_name(knowledge_base_name)
|
|
|
|
|
|
if kb is None:
|
|
|
|
|
|
return BaseResponse(code=404, msg=f"未找到知识库 {knowledge_base_name}")
|
|
|
|
|
|
|
2023-09-08 08:55:12 +08:00
|
|
|
|
if preview:
|
|
|
|
|
|
content_disposition_type = "inline"
|
|
|
|
|
|
else:
|
|
|
|
|
|
content_disposition_type = None
|
|
|
|
|
|
|
2023-08-19 15:14:45 +08:00
|
|
|
|
try:
|
|
|
|
|
|
kb_file = KnowledgeFile(filename=file_name,
|
|
|
|
|
|
knowledge_base_name=knowledge_base_name)
|
|
|
|
|
|
|
|
|
|
|
|
if os.path.exists(kb_file.filepath):
|
|
|
|
|
|
return FileResponse(
|
|
|
|
|
|
path=kb_file.filepath,
|
|
|
|
|
|
filename=kb_file.filename,
|
2023-09-08 08:55:12 +08:00
|
|
|
|
media_type="multipart/form-data",
|
|
|
|
|
|
content_disposition_type=content_disposition_type,
|
|
|
|
|
|
)
|
2023-08-19 15:14:45 +08:00
|
|
|
|
except Exception as e:
|
2023-09-08 08:55:12 +08:00
|
|
|
|
msg = f"{kb_file.filename} 读取文件失败,错误信息是:{e}"
|
|
|
|
|
|
logger.error(msg)
|
|
|
|
|
|
return BaseResponse(code=500, msg=msg)
|
2023-08-14 11:46:36 +08:00
|
|
|
|
|
2023-08-19 15:14:45 +08:00
|
|
|
|
return BaseResponse(code=500, msg=f"{kb_file.filename} 读取文件失败")
|
2023-08-04 20:26:14 +08:00
|
|
|
|
|
|
|
|
|
|
|
2023-08-08 17:59:41 +08:00
|
|
|
|
async def recreate_vector_store(
|
2023-08-09 21:57:40 +08:00
|
|
|
|
knowledge_base_name: str = Body(..., examples=["samples"]),
|
2023-08-09 16:52:04 +08:00
|
|
|
|
allow_empty_kb: bool = Body(True),
|
2023-08-11 13:53:20 +08:00
|
|
|
|
vs_type: str = Body(DEFAULT_VS_TYPE),
|
|
|
|
|
|
embed_model: str = Body(EMBEDDING_MODEL),
|
2023-08-08 17:59:41 +08:00
|
|
|
|
):
|
2023-08-04 20:26:14 +08:00
|
|
|
|
'''
|
|
|
|
|
|
recreate vector store from the content.
|
|
|
|
|
|
this is usefull when user can copy files to content folder directly instead of upload through network.
|
2023-08-08 17:59:41 +08:00
|
|
|
|
by default, get_service_by_name only return knowledge base in the info.db and having document files in it.
|
|
|
|
|
|
set allow_empty_kb to True make it applied on empty knowledge base which it not in the info.db or having no documents.
|
2023-08-04 20:26:14 +08:00
|
|
|
|
'''
|
2023-08-06 18:32:10 +08:00
|
|
|
|
|
2023-08-26 18:26:38 +08:00
|
|
|
|
def output():
|
2023-08-19 15:14:45 +08:00
|
|
|
|
kb = KBServiceFactory.get_service(knowledge_base_name, vs_type, embed_model)
|
|
|
|
|
|
if not kb.exists() and not allow_empty_kb:
|
|
|
|
|
|
yield {"code": 404, "msg": f"未找到知识库 ‘{knowledge_base_name}’"}
|
|
|
|
|
|
else:
|
|
|
|
|
|
kb.create_kb()
|
|
|
|
|
|
kb.clear_vs()
|
2023-09-08 08:55:12 +08:00
|
|
|
|
files = list_files_from_folder(knowledge_base_name)
|
|
|
|
|
|
kb_files = [(file, knowledge_base_name) for file in files]
|
|
|
|
|
|
i = 0
|
|
|
|
|
|
for status, result in files2docs_in_thread(kb_files):
|
|
|
|
|
|
if status:
|
|
|
|
|
|
kb_name, file_name, docs = result
|
|
|
|
|
|
kb_file = KnowledgeFile(filename=file_name, knowledge_base_name=kb_name)
|
|
|
|
|
|
kb_file.splited_docs = docs
|
2023-08-19 15:14:45 +08:00
|
|
|
|
yield json.dumps({
|
|
|
|
|
|
"code": 200,
|
2023-09-08 08:55:12 +08:00
|
|
|
|
"msg": f"({i + 1} / {len(files)}): {file_name}",
|
|
|
|
|
|
"total": len(files),
|
2023-08-19 15:14:45 +08:00
|
|
|
|
"finished": i,
|
2023-09-08 08:55:12 +08:00
|
|
|
|
"doc": file_name,
|
2023-08-19 15:14:45 +08:00
|
|
|
|
}, ensure_ascii=False)
|
2023-09-08 08:55:12 +08:00
|
|
|
|
kb.add_doc(kb_file, not_refresh_vs_cache=True)
|
|
|
|
|
|
else:
|
|
|
|
|
|
kb_name, file_name, error = result
|
|
|
|
|
|
msg = f"添加文件‘{file_name}’到知识库‘{knowledge_base_name}’时出错:{error}。已跳过。"
|
|
|
|
|
|
logger.error(msg)
|
2023-08-19 15:14:45 +08:00
|
|
|
|
yield json.dumps({
|
|
|
|
|
|
"code": 500,
|
2023-09-08 08:55:12 +08:00
|
|
|
|
"msg": msg,
|
2023-08-19 15:14:45 +08:00
|
|
|
|
})
|
2023-09-08 08:55:12 +08:00
|
|
|
|
i += 1
|
2023-08-19 15:14:45 +08:00
|
|
|
|
|
|
|
|
|
|
return StreamingResponse(output(), media_type="text/event-stream")
|