import chainlit as cl
from datetime import datetime
from langchain.chat_models import init_chat_model
from langchain_core.messages import HumanMessage
from langchain_core.tools import tool
from duckduckgo_search import DDGS


# @cl.step(type="tool")  # type: ignore
@tool
def get_time() -> str:
    """获取当前时间"""
    # 获取当前时间
    now = datetime.now()
    # 格式化时间为指定的字符串格式
    formatted_time = now.strftime("当前时间是：%Y年%m月%d日 %H:%M。")
    return formatted_time


# @cl.step(type="tool")  # type: ignore
@tool
def get_weather(city: str) -> str:
    """获取城市天气"""
    return f" {city} 天气晴，25度。"


# @cl.step(type="tool")  # type: ignore
@tool
def get_web_data(query: str) -> str:
    """使用DuckDuckGo搜索引擎进行搜索"""
    results = DDGS().text(query, max_results=10)

    return results


tools = [get_time, get_weather, get_web_data]

api_key = "sk-pdfifkpjdlxvyvgkerbluaotktpznsmpbcvskjauotenxgvz"
base_url = "https://api.siliconflow.cn/v1"

model = init_chat_model("Qwen/Qwen2.5-72B-Instruct", model_provider="openai", api_key=api_key, base_url=base_url)

llm_with_tools = model.bind_tools(tools)


@cl.on_chat_start
def on_chat_start():
    print("A new chat session has started!")
    # Set the assistant agent in the user session.


@cl.on_message
async def on_message(message: cl.Message):
    response = cl.Message(content="")

    messages = [HumanMessage(message.content)]
    print(messages)

    ai_msg = llm_with_tools.invoke(messages)

    print(f"tool_call{ai_msg.tool_calls}")


    if ai_msg.tool_calls:
        messages.append(ai_msg)
        print(messages)

        for tool_call in ai_msg.tool_calls:
            selected_tool = {"get_time": get_time, "get_weather": get_weather, "get_web_data": get_web_data}[
                tool_call["name"].lower()]
            tool_msg = selected_tool.invoke(tool_call)
            print(tool_msg)
            messages.append(tool_msg)

        for token in llm_with_tools.stream(messages):
            print(f"返回{token}")
            await response.stream_token(token.content)

    else:
        # **没有工具调用**，直接发送模型的初始回复
        await response.stream_token(ai_msg.content)
    await response.send()


@cl.on_stop
def on_stop():
    print("The user wants to stop the task!")


@cl.on_chat_end
def on_chat_end():
    print("The user disconnected!")
