#使用llamaindex创建一个agent
import asyncio
from llama_index.core.agent.workflow import ReActAgent
from llama_index.core.workflow import Context
from llama_index.llms.langchain import LangChainLLM
from langchain_community.chat_models import ChatTongyi
from llama_index.core.memory.chat_memory_buffer import ChatMemoryBuffer
# 初始化LLM
llm = LangChainLLM(
    ChatTongyi(model="qwen-plus", api_key="sk-f97e3654139742a4b01a99631628d36d")
)

# 创建Agent
agent = ReActAgent(llm=llm)

# 定义异步函数，所有await调用必须在异步函数内部
async def main1():
    ctx = Context(agent)
    res = await agent.run("今天天气怎么样", ctx=ctx)
    print(res)

#在llamaIndex中定义工具
def get_weather(city:str)->str:
    '''获取某个城市的天气'''
    print(f"正在获取{city}的天气")
    return f"{city},天气一直都是晴天！"

agent2 = ReActAgent(llm=llm,tools=[get_weather])
async def main2():
    ctx2 = Context(agent2)
    memory = ChatMemoryBuffer.from_defaults(token_limit=4000)
    res = await agent2.run("成都的天气怎么样",ctx=ctx2,memory=memory)
    print(res)

# 启动事件循环执行异步函数
if __name__ == "__main__":
    # asyncio.run(main1())
    asyncio.run(main2())