import os

from dotenv import load_dotenv
from langchain.agents import AgentExecutor, create_tool_calling_agent
from langchain.chat_models import init_chat_model
from langchain_community.tools import TavilySearchResults
from langchain_core.prompts import ChatPromptTemplate

from src.Basic.tools.file import write_file

load_dotenv(override=True)

#定义模型
model = init_chat_model(model="deepseek-chat", model_provider="deepseek")
searchResults = TavilySearchResults(max_results=2, tavily_api_key=os.getenv("TAVILY_SEARCH_API_KEY"))

# 定义 天气查询 工具函数
tools = [searchResults, write_file]
llm_with_tools = model.bind_tools(tools)

prompt = ChatPromptTemplate.from_messages(
    [
        ("system", "你是一个善于帮助用户通过搜索工具获取信息的助手"),
        ("human", "{input}"),
        ("placeholder", "{agent_scratchpad}"),
    ]
)

agent = create_tool_calling_agent(llm=model,tools=tools,prompt=prompt)
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)

while True:
    query = input("请问我可以帮您做什么吗？")

    if query == "exit" or query == "quit" or query == "bye" or query == "goodbye" or query == "stop":
        break
    #并行
    response = agent_executor.invoke({"input": query})
    print(response['output'])
