import asyncio
# 加载电商财报数据
from llama_index.core import SimpleDirectoryReader

A_docs = SimpleDirectoryReader(
    input_files=["data/电商B-Third Quarter 2023 Results.pdf"]
).load_data()
B_docs = SimpleDirectoryReader(
    input_files=["data/电商B-Third Quarter 2023 Results.pdf"]
).load_data()

# 配置本地嵌入模型和LLM
from llama_index.core import Settings
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.llms.openai import OpenAI

class PatchedOpenAI(OpenAI):
    @property
    def metadata(self):
        from llama_index.core.llms import LLMMetadata
        return LLMMetadata(
            is_chat_model=True,
            model_name="qwen-turbo"
        )

# 配置嵌入模型
Settings.embed_model = HuggingFaceEmbedding(
    model_name="D:/ideaSpace/MyPython/models/bge-small-zh-v1.5"
)

# 使用 OpenAILike 而不是自定义类
llm = PatchedOpenAI(
    model="qwen-turbo",
    api_key="sk-4e88cf4db3e14894bafaff606d296610",
    api_base="https://dashscope.aliyuncs.com/compatible-mode/v1",
    is_chat_model=True,
    max_tokens=None
)

# 设置全局LLM
Settings.llm = llm

# 从文档中创建索引
from llama_index.core import VectorStoreIndex
A_index = VectorStoreIndex.from_documents(A_docs)
B_index = VectorStoreIndex.from_documents(B_docs)

# 持久化索引（保存到本地）
from llama_index.core import StorageContext
A_index.storage_context.persist(persist_dir="./storage/A")
B_index.storage_context.persist(persist_dir="./storage/B")

# 从本地读取索引
from llama_index.core import load_index_from_storage
try:
    storage_context = StorageContext.from_defaults(
        persist_dir="./storage/A"
    )
    A_index = load_index_from_storage(storage_context)

    storage_context = StorageContext.from_defaults(
        persist_dir="./storage/B"
    )
    B_index = load_index_from_storage(storage_context)

    index_loaded = True
except:
    index_loaded = False

# 创建查询引擎
A_engine = A_index.as_query_engine(similarity_top_k=3)
B_engine = B_index.as_query_engine(similarity_top_k=3)

# 配置查询工具
from llama_index.core.tools import QueryEngineTool, ToolMetadata
query_engine_tools = [
    QueryEngineTool(
        query_engine=A_engine,
        metadata=ToolMetadata(
            name="A_Finance",
            description="用于提供A公司的财务信息",
        ),
    ),
    QueryEngineTool(
        query_engine=B_engine,
        metadata=ToolMetadata(
            name="B_Finance",
            description="用于提供B公司的财务信息",
        ),
    ),
]

# 使用异步方式运行
async def main():
    from llama_index.core.agent.workflow.react_agent import ReActAgent # 使用该库的ReActAgent必须要异步执行

    agent = ReActAgent(
        tools=query_engine_tools,
        llm=llm,
        verbose=True
    )

    response = await agent.run("比较一下两个公司的销售额")
    print(response)

# 运行异步函数
asyncio.run(main())