import os

from dotenv import load_dotenv
from langchain.chat_models import init_chat_model
from langchain.prompts import ChatPromptTemplate
from langchain_community.tools import TavilySearchResults
from langchain_core.tracers import LangChainTracer
from langgraph.prebuilt import create_react_agent

from langchain_weather import get_weather
from tool_call import write_file

if __name__ == '__main__':
    load_dotenv(override=True)
    DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
    os.environ["LANGCHAIN_PROJECT"] = "hepei-langgraph-Demo"
    os.environ["LANGCHAIN_TRACING_V2"] = "true"
    os.environ["LANGCHAIN_API_KEY"] = "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"
    model = init_chat_model(model="deepseek-chat", model_provider="deepseek")

    prompt_template = ChatPromptTemplate([
        ("system", "你是一个乐于助人的助手，请根据用户的问题给出回答"),
    ])

    print(f'name:{get_weather.name},description:{get_weather.description},args:{get_weather.args}')
    search = TavilySearchResults(max_results=2)
    tools = [get_weather, write_file, search]
    agent = create_react_agent(model=model, tools=tools)

    tracer = LangChainTracer()

    # response = agent.invoke({"messages": [{"role": "user", "content": "请问北京和南京天气哪个更热怎么样？并保存到本地"}]})
    response = agent.invoke({"messages": [
        {"role": "user", "content": "瑞芯微第九届开发者大会在哪里举行？哪里的天气怎么样，把信息并保存到本地"}]},
        {"recursion_limit": 100})# 限制工具调次数

    print("==== All messages ====")
    print(response)

    print("==== Final AI Message Content ====")
    print(response["messages"][-1].content)
