fix: langchain warnings for import from root

This commit is contained in:
liunux4odoo 2023-09-27 21:53:47 +08:00
parent 523764e284
commit 8d0f8a5d67
10 changed files with 13 additions and 10 deletions

View File

@ -1,6 +1,6 @@
from server.utils import get_ChatOpenAI from server.utils import get_ChatOpenAI
from configs.model_config import LLM_MODEL, TEMPERATURE from configs.model_config import LLM_MODEL, TEMPERATURE
from langchain import LLMChain from langchain.chains import LLMChain
from langchain.prompts.chat import ( from langchain.prompts.chat import (
ChatPromptTemplate, ChatPromptTemplate,
HumanMessagePromptTemplate, HumanMessagePromptTemplate,

View File

@ -45,7 +45,9 @@ Thought:
from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser
from langchain.prompts import StringPromptTemplate from langchain.prompts import StringPromptTemplate
from langchain import OpenAI, SerpAPIWrapper, LLMChain from langchain.llms import OpenAI
from langchain.utilities import SerpAPIWrapper
from langchain.chains import LLMChain
from typing import List, Union from typing import List, Union
from langchain.schema import AgentAction, AgentFinish, OutputParserException from langchain.schema import AgentAction, AgentFinish, OutputParserException
from server.agent.tools import tools from server.agent.tools import tools

View File

@ -1,4 +1,4 @@
from langchain import PromptTemplate from langchain.prompts import PromptTemplate
from langchain.chains import LLMMathChain from langchain.chains import LLMMathChain
from server.utils import wrap_done, get_ChatOpenAI from server.utils import wrap_done, get_ChatOpenAI
from configs.model_config import LLM_MODEL, TEMPERATURE from configs.model_config import LLM_MODEL, TEMPERATURE

View File

@ -1,4 +1,5 @@
from langchain import PromptTemplate, LLMChain from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
import sys import sys
import os import os

View File

@ -262,7 +262,7 @@ class LLMWeatherChain(Chain):
return cls(llm_chain=llm_chain, **kwargs) return cls(llm_chain=llm_chain, **kwargs)
from langchain import PromptTemplate from langchain.prompts import PromptTemplate
_PROMPT_TEMPLATE = """用户将会向您咨询天气问题,您不需要自己回答天气问题,而是将用户提问的信息提取出来区,市和时间三个元素后使用我为你编写好的工具进行查询并返回结果,格式为 区+市+时间 每个元素用空格隔开。如果缺少信息,则用 None 代替。 _PROMPT_TEMPLATE = """用户将会向您咨询天气问题,您不需要自己回答天气问题,而是将用户提问的信息提取出来区,市和时间三个元素后使用我为你编写好的工具进行查询并返回结果,格式为 区+市+时间 每个元素用空格隔开。如果缺少信息,则用 None 代替。
问题: ${{用户的问题}} 问题: ${{用户的问题}}

View File

@ -7,7 +7,7 @@ from fastapi import Body
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from configs.model_config import LLM_MODEL, TEMPERATURE, HISTORY_LEN from configs.model_config import LLM_MODEL, TEMPERATURE, HISTORY_LEN
from server.utils import wrap_done, get_ChatOpenAI from server.utils import wrap_done, get_ChatOpenAI
from langchain import LLMChain from langchain.chains import LLMChain
from typing import AsyncIterable from typing import AsyncIterable
import asyncio import asyncio
from langchain.prompts.chat import ChatPromptTemplate from langchain.prompts.chat import ChatPromptTemplate

View File

@ -2,7 +2,7 @@ from fastapi import Body
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from configs import LLM_MODEL, TEMPERATURE from configs import LLM_MODEL, TEMPERATURE
from server.utils import wrap_done, get_ChatOpenAI from server.utils import wrap_done, get_ChatOpenAI
from langchain import LLMChain from langchain.chains import LLMChain
from langchain.callbacks import AsyncIteratorCallbackHandler from langchain.callbacks import AsyncIteratorCallbackHandler
from typing import AsyncIterable from typing import AsyncIterable
import asyncio import asyncio

View File

@ -3,7 +3,7 @@ from fastapi.responses import StreamingResponse
from configs import (LLM_MODEL, VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD, TEMPERATURE) from configs import (LLM_MODEL, VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD, TEMPERATURE)
from server.utils import wrap_done, get_ChatOpenAI from server.utils import wrap_done, get_ChatOpenAI
from server.utils import BaseResponse, get_prompt_template from server.utils import BaseResponse, get_prompt_template
from langchain import LLMChain from langchain.chains import LLMChain
from langchain.callbacks import AsyncIteratorCallbackHandler from langchain.callbacks import AsyncIteratorCallbackHandler
from typing import AsyncIterable, List, Optional from typing import AsyncIterable, List, Optional
import asyncio import asyncio

View File

@ -6,7 +6,7 @@ from fastapi.responses import StreamingResponse
from fastapi.concurrency import run_in_threadpool from fastapi.concurrency import run_in_threadpool
from server.utils import wrap_done, get_ChatOpenAI from server.utils import wrap_done, get_ChatOpenAI
from server.utils import BaseResponse, get_prompt_template from server.utils import BaseResponse, get_prompt_template
from langchain import LLMChain from langchain.chains import LLMChain
from langchain.callbacks import AsyncIteratorCallbackHandler from langchain.callbacks import AsyncIteratorCallbackHandler
from typing import AsyncIterable from typing import AsyncIterable
import asyncio import asyncio

View File

@ -3,7 +3,7 @@ import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from configs import LLM_MODEL, TEMPERATURE from configs import LLM_MODEL, TEMPERATURE
from server.utils import get_ChatOpenAI from server.utils import get_ChatOpenAI
from langchain import LLMChain from langchain.chains import LLMChain
from langchain.agents import LLMSingleActionAgent, AgentExecutor from langchain.agents import LLMSingleActionAgent, AgentExecutor
from server.agent.tools import tools, tool_names from server.agent.tools import tools, tool_names
from langchain.memory import ConversationBufferWindowMemory from langchain.memory import ConversationBufferWindowMemory