import asyncio
from llama_index.core.agent.workflow import ReActAgent
from llama_index.core.workflow import Context
from llama_index.llms.langchain import LangChainLLM
from langchain_community.chat_models import ChatTongyi
import os

async def main():
    llm = LangChainLLM(ChatTongyi(
        model="qwen-plus",
        api_key=os.getenv("DASHSCOPE_API_KEY"),
    ))
    
    agent = ReActAgent(llm=llm)
    ctx = Context(agent)
    resp = await agent.run("怎么退款？", ctx=ctx)
    print(resp.response)
    return resp

# 运行异步函数
if __name__ == "__main__":
    asyncio.run(main())