# -*- coding: utf-8 -*-
# time: 2025/5/12 13:44
# file: TavilySeach_agent.py
# author: hanson
"""
https://app.tavily.com/home
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

"""
import os

from langchain.agents import create_tool_calling_agent, AgentExecutor
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.tools import Tool

os.environ["TAVILY_API_KEY"] = "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"
from langchain_ollama import ChatOllama
from langchain_community.tools.tavily_search import TavilySearchResults

# 2. 创建 LLM
model = ChatOllama(model="qwen2.5:1.5b", temperature=0)


#   langchain 内置搜索工具
search = TavilySearchResults(max_results=2)
# print(search.invoke("中山的天气怎么样?")) # 直接用工具调用


def search_weather(query: str) -> str:
    """
    搜索天气信息
    """
    return search.invoke(query)

weather_tool = Tool.from_function(
    name="search_weather",
    description="天气信息",
    func=search_weather
)
tools = [weather_tool]
model_with_tools = model.bind_tools(tools)
# res = model_with_tools.invoke([HumanMessage(content="中国的首都是哪个城市?")])
# print(f'= :{res.content}')
# print(f'= :{res}')
# res2 = model_with_tools.invoke([HumanMessage(content="中山的天气怎么样?")])
# print(f'= :{res2.content}')
# print(f'= :{res2}') # 没有搜索结果
# 模型调用工具需要用代理 agent
# 创建 Agent 执行器
model_with_tools = model.bind_tools(tools)

# 构建 prompt
prompt = ChatPromptTemplate.from_messages(
    [
        ("system", "你是一个科学助手."),
        MessagesPlaceholder(variable_name="messages"),
        MessagesPlaceholder(variable_name="agent_scratchpad"),
    ]
)

# 正确创建 agent
agent = create_tool_calling_agent(model, tools, prompt)

# 创建执行器
agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)

# 示例调用
response = agent_executor.invoke({"messages": [HumanMessage(content="中国的首都是哪个城市??")]})
print(response)
# 示例调用
response = agent_executor.invoke({"messages": [HumanMessage(content="搜索天气信息:中山的天气怎么样?")]})
print(response)



