# Schema for structured output
from pydantic import BaseModel, Field

from langchain_ollama import ChatOllama

# llm = ChatOllama(model="qwen2.5:7b-instruct-q5_K_S",temperature=0.5)
llm = ChatOllama(model="qwen3:8b", temperature=0.5, reasoning=False)

class SearchQuery(BaseModel):
    search_query: str = Field(None, description="Query that is optimized web search.")
    justification: str = Field(
        None, description="Why this query is relevant to the user's request."
    )

# Augment the LLM with schema for structured output
structured_llm = llm.with_structured_output(SearchQuery)

# Invoke the augmented LLM
output = structured_llm.invoke("How does Calcium CT score relate to high cholesterol?")

print("======================================")
print(output)

# Define a tool
def multiply(a: int, b: int) -> int:
    return a * b

# Augment the LLM with tools
llm_with_tools = llm.bind_tools([multiply])

# Invoke the LLM with input that triggers the tool call
msg = llm_with_tools.invoke("What is 2 times 3?")

# Get the tool call
calls = msg.tool_calls
print("++++++++++++++++++++++++++++++++++++++")
print(calls)
