from datetime import datetime

from langchain_ollama import OllamaLLM
from langchain_core.prompts import PromptTemplate

# Step 1: Define a function to get the current time
def get_current_time(*args, **kwargs):
    """Return the current time"""
    now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    return f"The current time is: {now}"

# Step 2: Initialize the local Ollama model
llm = OllamaLLM(model="qwen3:8b")  # Make sure you've pulled this model locally

# Step 3: Define a simple prompt that asks for a direct answer
prompt_template = PromptTemplate.from_template(
    """Question: {input}
Answer:"""
)

# Step 4: Combine prompt and LLM call
question = "What time is it?"
prompt = prompt_template.format(input=question)
response = llm.invoke(prompt)

# Step 5: If LLM fails to answer directly, fall back to tool_call
if "time" in question.lower():
    response = get_current_time()

# Step 6: Print final output
print(response)