import asyncio
import os
import sys
import uuid
from datetime import datetime
from typing import Dict
import logging

import pytz
from llama_index.core.agent import ReActAgent
# 配置日志
#将日志记录级别设置为DEBUG，并将日志信息输出到标准输出
# logging.basicConfig(stream=sys.stdout,level=logging.DEBUG, format='%(asctime)s %(name)s [%(pathname)s line:%(lineno)d] %(levelname)s %(message)s')
# logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))




from llama_index.core.agent.workflow import AgentWorkflow, FunctionAgent
from llama_index.core.workflow import Workflow, Context

from llama_index.llms.ollama import Ollama
from llama_index.llms.openai import OpenAI
from pydantic import Field


from llama_index.core import Settings
from llama_index.core.callbacks import CallbackManager, LlamaDebugHandler
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
from langfuse.llama_index import LlamaIndexCallbackHandler
langfuse_callback_handler = LlamaIndexCallbackHandler(
    trace_name='demo',
    user_id='sks',
    session_id=str(uuid.uuid1()),
    public_key=os.getenv('LANGFUSE_PUBLIC_KEY'),
    secret_key=os.getenv('LANGFUSE_SECRET_KEY'),
    host="https://us.cloud.langfuse.com"
)

# Settings.callback_manager = CallbackManager([langfuse_callback_handler,llama_debug])
Settings.callback_manager = CallbackManager([langfuse_callback_handler])

from llama_index.llms.openai.utils import ALL_AVAILABLE_MODELS, CHAT_MODELS
# 定义模型及其对应的上下⽂⼤⼩
LLM_MODELS_CONF: Dict[str, int] = {
    "ishumilin/deepseek-r1-coder-tools:14b": 64000,#设置上下⽂⼤⼩为64000
    "qwen2.5-coder:7b": 131072,
    "qwen-max-latest": 64000,
}
# 更新所有可⽤模型的字典，添加DeepSeeK模型
ALL_AVAILABLE_MODELS.update(LLM_MODELS_CONF)
# 更新聊天模型的字典，添加DeepSeeK模型
CHAT_MODELS.update(LLM_MODELS_CONF)


# https://docs.llamaindex.ai/en/latest/getting_started/starter_example/
# Define a simple calculator tool


def now_time():
    """
    获取当前时间
    如果设置了 TZ , datetime.now() 时候获取到的是UTC时间
        此函数能够获得正确的时间
    """
    if 'TZ' in os.environ:
        shanghai_tz = pytz.timezone(os.environ["TZ"])
        now = datetime.now(shanghai_tz)
        return now
    return datetime.now()

def tool_now_time():
    """获取当前时间"""
    time_str = now_time().strftime("%Y年%m月%d日%H时%M分%S秒")
    print(f'调用函数 tool_now_time {time_str}')
    return time_str

def multiply(a: float, b: float) -> float:
    """Useful for multiplying two numbers."""
    print(f'调用函数 multiply')
    return a * b

#使用该方法提取的 注解等信息 FunctionTool.from_defaults(tool)
#参考 https://download.csdn.net/blog/column/12606825/142743932
def get_weather(
    location: str = Field(
        description="城市名称,格式类似于<城市名称>"
    ),
) -> str:
    """用于获取特定地点的天气。"""
    print(f'调用函数 get_weather')
    return f'{location} 今天多云'


# llm=Ollama(model="qwen2.5-coder:7b", request_timeout=360.0)
llm_url = "http://192.168.56.1:11434/v1"
llm_api_key= "ollama"
llm_model="qwen2.5-coder:7b"

llm_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
llm_model = "qwen-max-latest"
llm_api_key = os.environ['ALIYUN-API-KEY']

llm = OpenAI(model=llm_model,
             api_base=llm_url,
             api_key=llm_api_key, temperature=0.7)
# llm = OpenAI(model="qwen2.5-coder:7b", temperature=0.7)
# print(f'llm init OK,是否支持funcall:{llm.metadata.is_function_calling_model}')
# Create an agent workflow with our calculator tool
# ReActAgent.from_tools()
#0.12.23用这个 start

agent: Workflow= None
ctx:Context =None


def init_agent1():
    """
    0.12.23版本可以正常使用,但是应该用法不是这样的
    https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_multi/
    """
    global agent
    global ctx
    agent = FunctionAgent(
        name="Agent",
        description="Useful for multiplying two numbers",
        tools=[multiply,get_weather],
        llm=llm,
        # system_prompt="You are a helpful assistant that can multiply two numbers.",
        system_prompt="你是一个能将两个数字相乘的得力助手。",
    )
    ctx = Context(agent)




def init_agent2():
    #0.12.25用这个
    global agent
    global ctx
    agent = AgentWorkflow.from_tools_or_functions(
        [multiply,get_weather],
        llm=llm,
        system_prompt="You are a helpful assistant that can search the web for information."
    )
    ctx = Context(agent)

def init_agent3():
    """
    0.12.23版本可以正常使用,但是应该用法不是这样的
    https://docs.llamaindex.ai/en/stable/examples/agent/agent_workflow_multi/
    """
    global agent
    global ctx
    tool_agent = FunctionAgent(
        name="Agent",
        description="Useful for multiplying two numbers",
        tools=[multiply,get_weather,tool_now_time],
        llm=llm,
        # system_prompt="You are a helpful assistant that can multiply two numbers.",
        system_prompt="你是一个能将两个数字相乘的得力助手。",
    )

    agent= AgentWorkflow(
        agents=[tool_agent],
        root_agent=tool_agent.name,
        initial_state={
            "research_notes": {},
            "report_content": "Not written yet.",
            "review": "Review required.",
        },
    )
    ctx = Context(agent)

async def atiwen(msg:str):
    langfuse_callback_handler.session_id = str(uuid.uuid1())
    response = await agent.run(msg,ctx=ctx)

    print(f'{"-"*10}你提的问题{"-"*10}')
    print(msg)
    print(f'{"-"*10}答案{"-"*10}')
    print(str(response))


async def amain():
    # Run the agent
    msg = "What is 1234 * 4567?"
    response = await atiwen(msg)
    msg = "今天杭州的天气如何?"
    response = await atiwen(msg)



async def tiwen(msg:str):
    langfuse_callback_handler.session_id = str(uuid.uuid1())
    response = await agent.run(user_msg=msg,ctx=ctx)

    print(f'{"-"*10}你提的问题{"-"*10}')
    print(msg)
    print(f'{"-"*10}答案{"-"*10}')
    print(str(response))


async def main():
    # Run the agent
    msg = "What is 1234 * 4567?"
    await tiwen(msg)
    msg = "今天杭州的天气如何?"
    await tiwen(msg)
    msg = "现在几点了?"
    await tiwen(msg)

# Run the agent
if __name__ == "__main__":
    # init_agent1()
    # init_agent2()
    init_agent3()
    # asyncio.run(amain())
    asyncio.run(main())
