增加显示详细日志开关
This commit is contained in:
parent
228fc5bd93
commit
5550e3c1a9
|
|
@ -5,6 +5,8 @@ LOG_FORMAT = "%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(mes
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
logging.basicConfig(format=LOG_FORMAT)
|
logging.basicConfig(format=LOG_FORMAT)
|
||||||
|
# 是否显示详细日志
|
||||||
|
log_verbose = False
|
||||||
|
|
||||||
|
|
||||||
# 在以下字典中修改属性值,以指定本地embedding模型存储位置
|
# 在以下字典中修改属性值,以指定本地embedding模型存储位置
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
|
||||||
from configs.model_config import LLM_MODEL, NLTK_DATA_PATH
|
from configs.model_config import LLM_MODEL, NLTK_DATA_PATH
|
||||||
from configs.server_config import OPEN_CROSS_DOMAIN, HTTPX_DEFAULT_TIMEOUT
|
from configs.server_config import OPEN_CROSS_DOMAIN, HTTPX_DEFAULT_TIMEOUT
|
||||||
from configs import VERSION
|
from configs import VERSION, logger, log_verbose
|
||||||
import argparse
|
import argparse
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from fastapi import Body
|
from fastapi import Body
|
||||||
|
|
@ -140,6 +140,8 @@ def create_app():
|
||||||
r = httpx.post(controller_address + "/list_models")
|
r = httpx.post(controller_address + "/list_models")
|
||||||
return BaseResponse(data=r.json()["models"])
|
return BaseResponse(data=r.json()["models"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
logger.error(f'{e.__class__.__name__}: {e}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return BaseResponse(
|
return BaseResponse(
|
||||||
code=500,
|
code=500,
|
||||||
data=[],
|
data=[],
|
||||||
|
|
@ -165,6 +167,8 @@ def create_app():
|
||||||
)
|
)
|
||||||
return r.json()
|
return r.json()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
logger.error(f'{e.__class__.__name__}: {e}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return BaseResponse(
|
return BaseResponse(
|
||||||
code=500,
|
code=500,
|
||||||
msg=f"failed to stop LLM model {model_name} from controller: {controller_address}。错误信息是: {e}")
|
msg=f"failed to stop LLM model {model_name} from controller: {controller_address}。错误信息是: {e}")
|
||||||
|
|
@ -190,6 +194,8 @@ def create_app():
|
||||||
)
|
)
|
||||||
return r.json()
|
return r.json()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
logger.error(f'{e.__class__.__name__}: {e}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return BaseResponse(
|
return BaseResponse(
|
||||||
code=500,
|
code=500,
|
||||||
msg=f"failed to switch LLM model from controller: {controller_address}。错误信息是: {e}")
|
msg=f"failed to switch LLM model from controller: {controller_address}。错误信息是: {e}")
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
from typing import List
|
from typing import List
|
||||||
import openai
|
import openai
|
||||||
from configs.model_config import llm_model_dict, LLM_MODEL, logger
|
from configs.model_config import llm_model_dict, LLM_MODEL, logger, log_verbose
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -46,7 +46,9 @@ async def openai_chat(msg: OpenAiChatMsgIn):
|
||||||
print(answer)
|
print(answer)
|
||||||
yield(answer)
|
yield(answer)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"获取ChatCompletion时出错:{e}")
|
msg = f"获取ChatCompletion时出错:{e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
|
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
get_response(msg),
|
get_response(msg),
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import asyncio
|
||||||
from typing import Awaitable, List, Tuple, Dict, Union
|
from typing import Awaitable, List, Tuple, Dict, Union
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from langchain.prompts.chat import ChatMessagePromptTemplate
|
from langchain.prompts.chat import ChatMessagePromptTemplate
|
||||||
|
from configs import logger, log_verbose
|
||||||
|
|
||||||
|
|
||||||
async def wrap_done(fn: Awaitable, event: asyncio.Event):
|
async def wrap_done(fn: Awaitable, event: asyncio.Event):
|
||||||
|
|
@ -10,7 +11,9 @@ async def wrap_done(fn: Awaitable, event: asyncio.Event):
|
||||||
await fn
|
await fn
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# TODO: handle exception
|
# TODO: handle exception
|
||||||
print(f"Caught exception: {e}")
|
msg = f"Caught exception: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
finally:
|
finally:
|
||||||
# Signal the aiter to stop.
|
# Signal the aiter to stop.
|
||||||
event.set()
|
event.set()
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ from server.utils import BaseResponse, ListResponse
|
||||||
from server.knowledge_base.utils import validate_kb_name
|
from server.knowledge_base.utils import validate_kb_name
|
||||||
from server.knowledge_base.kb_service.base import KBServiceFactory
|
from server.knowledge_base.kb_service.base import KBServiceFactory
|
||||||
from server.db.repository.knowledge_base_repository import list_kbs_from_db
|
from server.db.repository.knowledge_base_repository import list_kbs_from_db
|
||||||
from configs.model_config import EMBEDDING_MODEL, logger
|
from configs.model_config import EMBEDDING_MODEL, logger, log_verbose
|
||||||
from fastapi import Body
|
from fastapi import Body
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -31,7 +31,8 @@ async def create_kb(knowledge_base_name: str = Body(..., examples=["samples"]),
|
||||||
kb.create_kb()
|
kb.create_kb()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"创建知识库出错: {e}"
|
msg = f"创建知识库出错: {e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return BaseResponse(code=500, msg=msg)
|
return BaseResponse(code=500, msg=msg)
|
||||||
|
|
||||||
return BaseResponse(code=200, msg=f"已新增知识库 {knowledge_base_name}")
|
return BaseResponse(code=200, msg=f"已新增知识库 {knowledge_base_name}")
|
||||||
|
|
@ -57,7 +58,8 @@ async def delete_kb(
|
||||||
return BaseResponse(code=200, msg=f"成功删除知识库 {knowledge_base_name}")
|
return BaseResponse(code=200, msg=f"成功删除知识库 {knowledge_base_name}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"删除知识库时出现意外: {e}"
|
msg = f"删除知识库时出现意外: {e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return BaseResponse(code=500, msg=msg)
|
return BaseResponse(code=500, msg=msg)
|
||||||
|
|
||||||
return BaseResponse(code=500, msg=f"删除知识库失败 {knowledge_base_name}")
|
return BaseResponse(code=500, msg=f"删除知识库失败 {knowledge_base_name}")
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import urllib
|
||||||
from fastapi import File, Form, Body, Query, UploadFile
|
from fastapi import File, Form, Body, Query, UploadFile
|
||||||
from configs.model_config import (DEFAULT_VS_TYPE, EMBEDDING_MODEL,
|
from configs.model_config import (DEFAULT_VS_TYPE, EMBEDDING_MODEL,
|
||||||
VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD,
|
VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD,
|
||||||
logger,)
|
logger, log_verbose,)
|
||||||
from server.utils import BaseResponse, ListResponse, run_in_thread_pool
|
from server.utils import BaseResponse, ListResponse, run_in_thread_pool
|
||||||
from server.knowledge_base.utils import (validate_kb_name, list_files_from_folder,get_file_path,
|
from server.knowledge_base.utils import (validate_kb_name, list_files_from_folder,get_file_path,
|
||||||
files2docs_in_thread, KnowledgeFile)
|
files2docs_in_thread, KnowledgeFile)
|
||||||
|
|
@ -80,7 +80,8 @@ def _save_files_in_thread(files: List[UploadFile],
|
||||||
return dict(code=200, msg=f"成功上传文件 {filename}", data=data)
|
return dict(code=200, msg=f"成功上传文件 {filename}", data=data)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"{filename} 文件上传失败,报错信息为: {e}"
|
msg = f"{filename} 文件上传失败,报错信息为: {e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return dict(code=500, msg=msg, data=data)
|
return dict(code=500, msg=msg, data=data)
|
||||||
|
|
||||||
params = [{"file": file, "knowledge_base_name": knowledge_base_name, "override": override} for file in files]
|
params = [{"file": file, "knowledge_base_name": knowledge_base_name, "override": override} for file in files]
|
||||||
|
|
@ -185,7 +186,8 @@ async def delete_docs(knowledge_base_name: str = Body(..., examples=["samples"])
|
||||||
kb.delete_doc(kb_file, delete_content, not_refresh_vs_cache=True)
|
kb.delete_doc(kb_file, delete_content, not_refresh_vs_cache=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"{file_name} 文件删除失败,错误信息:{e}"
|
msg = f"{file_name} 文件删除失败,错误信息:{e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
failed_files[file_name] = msg
|
failed_files[file_name] = msg
|
||||||
|
|
||||||
if not not_refresh_vs_cache:
|
if not not_refresh_vs_cache:
|
||||||
|
|
@ -225,7 +227,8 @@ async def update_docs(
|
||||||
kb_files.append(KnowledgeFile(filename=file_name, knowledge_base_name=knowledge_base_name))
|
kb_files.append(KnowledgeFile(filename=file_name, knowledge_base_name=knowledge_base_name))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"加载文档 {file_name} 时出错:{e}"
|
msg = f"加载文档 {file_name} 时出错:{e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
failed_files[file_name] = msg
|
failed_files[file_name] = msg
|
||||||
|
|
||||||
# 从文件生成docs,并进行向量化。
|
# 从文件生成docs,并进行向量化。
|
||||||
|
|
@ -249,7 +252,8 @@ async def update_docs(
|
||||||
kb.update_doc(kb_file, docs=v, not_refresh_vs_cache=True)
|
kb.update_doc(kb_file, docs=v, not_refresh_vs_cache=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"为 {file_name} 添加自定义docs时出错:{e}"
|
msg = f"为 {file_name} 添加自定义docs时出错:{e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
failed_files[file_name] = msg
|
failed_files[file_name] = msg
|
||||||
|
|
||||||
if not not_refresh_vs_cache:
|
if not not_refresh_vs_cache:
|
||||||
|
|
@ -291,7 +295,8 @@ def download_doc(
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"{kb_file.filename} 读取文件失败,错误信息是:{e}"
|
msg = f"{kb_file.filename} 读取文件失败,错误信息是:{e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return BaseResponse(code=500, msg=msg)
|
return BaseResponse(code=500, msg=msg)
|
||||||
|
|
||||||
return BaseResponse(code=500, msg=f"{kb_file.filename} 读取文件失败")
|
return BaseResponse(code=500, msg=f"{kb_file.filename} 读取文件失败")
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from configs.model_config import (
|
||||||
CACHED_VS_NUM,
|
CACHED_VS_NUM,
|
||||||
EMBEDDING_MODEL,
|
EMBEDDING_MODEL,
|
||||||
SCORE_THRESHOLD,
|
SCORE_THRESHOLD,
|
||||||
logger,
|
logger, log_verbose,
|
||||||
)
|
)
|
||||||
from server.knowledge_base.kb_service.base import KBService, SupportedVSType
|
from server.knowledge_base.kb_service.base import KBService, SupportedVSType
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from configs.model_config import EMBEDDING_MODEL, DEFAULT_VS_TYPE
|
from configs.model_config import EMBEDDING_MODEL, DEFAULT_VS_TYPE, logger, log_verbose
|
||||||
from server.knowledge_base.utils import (get_file_path, list_kbs_from_folder,
|
from server.knowledge_base.utils import (get_file_path, list_kbs_from_folder,
|
||||||
list_files_from_folder, run_in_thread_pool,
|
list_files_from_folder, run_in_thread_pool,
|
||||||
files2docs_in_thread,
|
files2docs_in_thread,
|
||||||
|
|
@ -30,7 +30,9 @@ def file_to_kbfile(kb_name: str, files: List[str]) -> List[KnowledgeFile]:
|
||||||
kb_file = KnowledgeFile(filename=file, knowledge_base_name=kb_name)
|
kb_file = KnowledgeFile(filename=file, knowledge_base_name=kb_name)
|
||||||
kb_files.append(kb_file)
|
kb_files.append(kb_file)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"{e},已跳过")
|
msg = f"{e},已跳过"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return kb_files
|
return kb_files
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ from configs.model_config import (
|
||||||
CHUNK_SIZE,
|
CHUNK_SIZE,
|
||||||
OVERLAP_SIZE,
|
OVERLAP_SIZE,
|
||||||
ZH_TITLE_ENHANCE,
|
ZH_TITLE_ENHANCE,
|
||||||
logger,
|
logger, log_verbose,
|
||||||
)
|
)
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
import importlib
|
import importlib
|
||||||
|
|
@ -189,7 +189,9 @@ def get_loader(loader_name: str, file_path_or_content: Union[str, bytes, io.Stri
|
||||||
document_loaders_module = importlib.import_module('langchain.document_loaders')
|
document_loaders_module = importlib.import_module('langchain.document_loaders')
|
||||||
DocumentLoader = getattr(document_loaders_module, loader_name)
|
DocumentLoader = getattr(document_loaders_module, loader_name)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"为文件{file_path_or_content}查找加载器{loader_name}时出错:{e}")
|
msg = f"为文件{file_path_or_content}查找加载器{loader_name}时出错:{e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
document_loaders_module = importlib.import_module('langchain.document_loaders')
|
document_loaders_module = importlib.import_module('langchain.document_loaders')
|
||||||
DocumentLoader = getattr(document_loaders_module, "UnstructuredFileLoader")
|
DocumentLoader = getattr(document_loaders_module, "UnstructuredFileLoader")
|
||||||
|
|
||||||
|
|
@ -228,7 +230,9 @@ def make_text_splitter(
|
||||||
chunk_overlap=chunk_overlap,
|
chunk_overlap=chunk_overlap,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"查找分词器 {splitter_name} 时出错:{e}")
|
msg = f"查找分词器 {splitter_name} 时出错:{e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
TextSplitter = getattr(text_splitter_module, "RecursiveCharacterTextSplitter")
|
TextSplitter = getattr(text_splitter_module, "RecursiveCharacterTextSplitter")
|
||||||
text_splitter = TextSplitter(
|
text_splitter = TextSplitter(
|
||||||
chunk_size=chunk_size,
|
chunk_size=chunk_size,
|
||||||
|
|
@ -330,7 +334,8 @@ def files2docs_in_thread(
|
||||||
return True, (file.kb_name, file.filename, file.file2text(**kwargs))
|
return True, (file.kb_name, file.filename, file.file2text(**kwargs))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"从文件 {file.kb_name}/{file.filename} 加载文档时出错:{e}"
|
msg = f"从文件 {file.kb_name}/{file.filename} 加载文档时出错:{e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return False, (file.kb_name, file.filename, msg)
|
return False, (file.kb_name, file.filename, msg)
|
||||||
|
|
||||||
kwargs_list = []
|
kwargs_list = []
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
|
||||||
from configs.model_config import llm_model_dict, LLM_MODEL, LOG_PATH, logger
|
from configs.model_config import llm_model_dict, LLM_MODEL, LOG_PATH, logger, log_verbose
|
||||||
from server.utils import MakeFastAPIOffline, set_httpx_timeout, llm_device
|
from server.utils import MakeFastAPIOffline, set_httpx_timeout, llm_device
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ from typing import List
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import asyncio
|
import asyncio
|
||||||
from configs.model_config import LLM_MODEL, llm_model_dict, LLM_DEVICE, EMBEDDING_DEVICE
|
from configs.model_config import LLM_MODEL, llm_model_dict, LLM_DEVICE, EMBEDDING_DEVICE, logger, log_verbose
|
||||||
from configs.server_config import FSCHAT_MODEL_WORKERS
|
from configs.server_config import FSCHAT_MODEL_WORKERS
|
||||||
import os
|
import os
|
||||||
from server import model_workers
|
from server import model_workers
|
||||||
|
|
@ -86,9 +86,10 @@ def torch_gc():
|
||||||
from torch.mps import empty_cache
|
from torch.mps import empty_cache
|
||||||
empty_cache()
|
empty_cache()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
msg=("如果您使用的是 macOS 建议将 pytorch 版本升级至 2.0.0 或更高版本,"
|
||||||
print("如果您使用的是 macOS 建议将 pytorch 版本升级至 2.0.0 或更高版本,以支持及时清理 torch 产生的内存占用。")
|
"以支持及时清理 torch 产生的内存占用。")
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
|
|
||||||
def run_async(cor):
|
def run_async(cor):
|
||||||
'''
|
'''
|
||||||
|
|
@ -217,7 +218,9 @@ def get_model_worker_config(model_name: str = LLM_MODEL) -> dict:
|
||||||
try:
|
try:
|
||||||
config["worker_class"] = getattr(model_workers, provider)
|
config["worker_class"] = getattr(model_workers, provider)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"在线模型 ‘{model_name}’ 的provider没有正确配置")
|
msg = f"在线模型 ‘{model_name}’ 的provider没有正确配置"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
|
|
||||||
config["device"] = llm_device(config.get("device") or LLM_DEVICE)
|
config["device"] = llm_device(config.get("device") or LLM_DEVICE)
|
||||||
return config
|
return config
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ from configs.model_config import (
|
||||||
SCORE_THRESHOLD,
|
SCORE_THRESHOLD,
|
||||||
VECTOR_SEARCH_TOP_K,
|
VECTOR_SEARCH_TOP_K,
|
||||||
SEARCH_ENGINE_TOP_K,
|
SEARCH_ENGINE_TOP_K,
|
||||||
logger,
|
logger, log_verbose,
|
||||||
)
|
)
|
||||||
from configs.server_config import HTTPX_DEFAULT_TIMEOUT
|
from configs.server_config import HTTPX_DEFAULT_TIMEOUT
|
||||||
import httpx
|
import httpx
|
||||||
|
|
@ -78,7 +78,9 @@ class ApiRequest:
|
||||||
else:
|
else:
|
||||||
return httpx.get(url, params=params, **kwargs)
|
return httpx.get(url, params=params, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when get {url}: {e}")
|
msg = f"error when get {url}: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
retry -= 1
|
retry -= 1
|
||||||
|
|
||||||
async def aget(
|
async def aget(
|
||||||
|
|
@ -99,7 +101,9 @@ class ApiRequest:
|
||||||
else:
|
else:
|
||||||
return await client.get(url, params=params, **kwargs)
|
return await client.get(url, params=params, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when aget {url}: {e}")
|
msg = f"error when aget {url}: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
retry -= 1
|
retry -= 1
|
||||||
|
|
||||||
def post(
|
def post(
|
||||||
|
|
@ -121,7 +125,9 @@ class ApiRequest:
|
||||||
else:
|
else:
|
||||||
return httpx.post(url, data=data, json=json, **kwargs)
|
return httpx.post(url, data=data, json=json, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when post {url}: {e}")
|
msg = f"error when post {url}: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
retry -= 1
|
retry -= 1
|
||||||
|
|
||||||
async def apost(
|
async def apost(
|
||||||
|
|
@ -143,7 +149,9 @@ class ApiRequest:
|
||||||
else:
|
else:
|
||||||
return await client.post(url, data=data, json=json, **kwargs)
|
return await client.post(url, data=data, json=json, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when apost {url}: {e}")
|
msg = f"error when apost {url}: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
retry -= 1
|
retry -= 1
|
||||||
|
|
||||||
def delete(
|
def delete(
|
||||||
|
|
@ -164,7 +172,9 @@ class ApiRequest:
|
||||||
else:
|
else:
|
||||||
return httpx.delete(url, data=data, json=json, **kwargs)
|
return httpx.delete(url, data=data, json=json, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when delete {url}: {e}")
|
msg = f"error when delete {url}: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
retry -= 1
|
retry -= 1
|
||||||
|
|
||||||
async def adelete(
|
async def adelete(
|
||||||
|
|
@ -186,7 +196,9 @@ class ApiRequest:
|
||||||
else:
|
else:
|
||||||
return await client.delete(url, data=data, json=json, **kwargs)
|
return await client.delete(url, data=data, json=json, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when adelete {url}: {e}")
|
msg = f"error when adelete {url}: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
retry -= 1
|
retry -= 1
|
||||||
|
|
||||||
def _fastapi_stream2generator(self, response: StreamingResponse, as_json: bool =False):
|
def _fastapi_stream2generator(self, response: StreamingResponse, as_json: bool =False):
|
||||||
|
|
@ -205,7 +217,9 @@ class ApiRequest:
|
||||||
elif chunk.strip():
|
elif chunk.strip():
|
||||||
yield chunk
|
yield chunk
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"error when run fastapi router: {e}")
|
msg = f"error when run fastapi router: {e}"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
|
|
||||||
def _httpx_stream2generator(
|
def _httpx_stream2generator(
|
||||||
self,
|
self,
|
||||||
|
|
@ -226,7 +240,9 @@ class ApiRequest:
|
||||||
pprint(data, depth=1)
|
pprint(data, depth=1)
|
||||||
yield data
|
yield data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"接口返回json错误: ‘{chunk}’。错误信息是:{e}。")
|
msg = f"接口返回json错误: ‘{chunk}’。错误信息是:{e}。"
|
||||||
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
else:
|
else:
|
||||||
print(chunk, end="", flush=True)
|
print(chunk, end="", flush=True)
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
@ -241,7 +257,8 @@ class ApiRequest:
|
||||||
yield {"code": 500, "msg": msg}
|
yield {"code": 500, "msg": msg}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = f"API通信遇到错误:{e}"
|
msg = f"API通信遇到错误:{e}"
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
yield {"code": 500, "msg": msg}
|
yield {"code": 500, "msg": msg}
|
||||||
|
|
||||||
# 对话相关操作
|
# 对话相关操作
|
||||||
|
|
@ -414,7 +431,8 @@ class ApiRequest:
|
||||||
return response.json()
|
return response.json()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = "API未能返回正确的JSON。" + (errorMsg or str(e))
|
msg = "API未能返回正确的JSON。" + (errorMsg or str(e))
|
||||||
logger.error(msg)
|
logger.error(f'{e.__class__.__name__}: {msg}',
|
||||||
|
exc_info=e if log_verbose else None)
|
||||||
return {"code": 500, "msg": msg}
|
return {"code": 500, "msg": msg}
|
||||||
|
|
||||||
def list_knowledge_bases(
|
def list_knowledge_bases(
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue