diff --git a/server/chat/agent_chat.py b/server/chat/agent_chat.py index 10ac1eb..80313d3 100644 --- a/server/chat/agent_chat.py +++ b/server/chat/agent_chat.py @@ -23,7 +23,7 @@ async def agent_chat(query: str = Body(..., description="用户输入", examples ), stream: bool = Body(False, description="流式输出"), model_name: str = Body(LLM_MODEL, description="LLM 模型名称。"), - temperature: float = Body(TEMPERATURE, description="LLM 采样温度", gt=0.0, le=1.0), + temperature: float = Body(TEMPERATURE, description="LLM 采样温度", ge=0.0, le=1.0), # top_p: float = Body(TOP_P, description="LLM 核采样。勿与temperature同时设置", gt=0.0, lt=1.0), ): history = [History.from_data(h) for h in history] diff --git a/server/chat/chat.py b/server/chat/chat.py index 02856ea..15a0250 100644 --- a/server/chat/chat.py +++ b/server/chat/chat.py @@ -21,7 +21,7 @@ async def chat(query: str = Body(..., description="用户输入", examples=["恼 ), stream: bool = Body(False, description="流式输出"), model_name: str = Body(LLM_MODEL, description="LLM 模型名称。"), - temperature: float = Body(TEMPERATURE, description="LLM 采样温度", gt=0.0, le=1.0), + temperature: float = Body(TEMPERATURE, description="LLM 采样温度", ge=0.0, le=1.0), # top_p: float = Body(TOP_P, description="LLM 核采样。勿与temperature同时设置", gt=0.0, lt=1.0), prompt_name: str = Body("llm_chat", description="使用的prompt模板名称(在configs/prompt_config.py中配置)"), ): diff --git a/server/chat/knowledge_base_chat.py b/server/chat/knowledge_base_chat.py index 0fb7ab4..f5cee99 100644 --- a/server/chat/knowledge_base_chat.py +++ b/server/chat/knowledge_base_chat.py @@ -30,7 +30,7 @@ async def knowledge_base_chat(query: str = Body(..., description="用户输入", ), stream: bool = Body(False, description="流式输出"), model_name: str = Body(LLM_MODEL, description="LLM 模型名称。"), - temperature: float = Body(TEMPERATURE, description="LLM 采样温度", gt=0.0, le=1.0), + temperature: float = Body(TEMPERATURE, description="LLM 采样温度", ge=0.0, le=1.0), prompt_name: str = Body("knowledge_base_chat", description="使用的prompt模板名称(在configs/prompt_config.py中配置)"), local_doc_url: bool = Body(False, description="知识文件返回本地路径(true)或URL(false)"), request: Request = None, diff --git a/server/chat/search_engine_chat.py b/server/chat/search_engine_chat.py index 78246ae..24845aa 100644 --- a/server/chat/search_engine_chat.py +++ b/server/chat/search_engine_chat.py @@ -71,7 +71,7 @@ async def search_engine_chat(query: str = Body(..., description="用户输入", ), stream: bool = Body(False, description="流式输出"), model_name: str = Body(LLM_MODEL, description="LLM 模型名称。"), - temperature: float = Body(TEMPERATURE, description="LLM 采样温度", gt=0.0, le=1.0), + temperature: float = Body(TEMPERATURE, description="LLM 采样温度", ge=0.0, le=1.0), prompt_name: str = Body("knowledge_base_chat", description="使用的prompt模板名称(在configs/prompt_config.py中配置)"), ): if search_engine_name not in SEARCH_ENGINES.keys(): diff --git a/server/knowledge_base/migrate.py b/server/knowledge_base/migrate.py index b8a95d9..53abecf 100644 --- a/server/knowledge_base/migrate.py +++ b/server/knowledge_base/migrate.py @@ -37,8 +37,8 @@ def folder2db( mode: Literal["recreate_vs", "update_in_db", "increament"], vs_type: Literal["faiss", "milvus", "pg", "chromadb"] = DEFAULT_VS_TYPE, embed_model: str = EMBEDDING_MODEL, - chunk_size: int = -1, - chunk_overlap: int = -1, + chunk_size: int = CHUNK_SIZE, + chunk_overlap: int = CHUNK_SIZE, zh_title_enhance: bool = ZH_TITLE_ENHANCE, ): '''