# -*- coding: utf-8 -*-
# time: 2025/5/14 16:54
# file: ch01.py
# author: hanson
"""
https://www.langchain.com.cn/docs/how_to/tool_results_pass_to_model/

"""
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.tools import tool
from langchain_ollama import ChatOllama


@tool(description="2个数相加")
def add(a: int, b: int) -> int:
    """2个数相加"""
    print(f"日志==》正在计算：{a} + {b}")
    return a + b


@tool(description="幂次方")
def power(a: int, b: int) -> int:
    """1个数幂次方"""
    print(f"日志==》正在计算数幂次方：{a} + {b}")
    return a ** b


llm = ChatOllama(model="llama3.2:3b", temperature=0.1)

tools = [add, power]
# 定义提示词模板  无


# 绑定工具 + 应用提示词
llm_with_tools = llm.bind_tools(tools)
messages = [HumanMessage("2个数相加:3,4")]
response = llm_with_tools.invoke(messages)
print(response)
print(response.content)
