from llama_index.embeddings.openai import OpenAIEmbedding
from config import config
import os
from llama_index.core import (
    VectorStoreIndex,
    StorageContext,
    load_index_from_storage,
    SimpleDirectoryReader,
)
from utils import save_document_info, get_document_list
from llama_index.llms.openai import OpenAI


class IndexManager:
    def __init__(self):
        # 初始化索引对象为None
        self.index = None
        # 创建向量模型
        self.embed_model = OpenAIEmbedding(model_name=config.EMBEDDING_MODEL)
        # 初始化索引
        self.initialize_index()

    def initialize_index(self):
        if os.path.exists(config.INDEX_DIR):
            self.index = load_index_from_storage(
                StorageContext.from_defaults(persist_dir=config.INDEX_DIR),
                embed_model=self.embed_model,
            )
        else:
            self.index = VectorStoreIndex(nodes=[], embed_model=self.embed_model)
            self.index.storage_context.persist(persist_dir=config.INDEX_DIR)

    def insert_document(self, doc_file_path: str, filename):
        documents = SimpleDirectoryReader(input_files=[doc_file_path]).load_data()
        for doc in documents:
            self.index.insert(doc)
            save_document_info(filename, doc.text[:200])
        self.index.storage_context.persist(persist_dir=config.INDEX_DIR)

    def get_document_list(self):
        return get_document_list()

    def query_index(self, query_text):
        llm = OpenAI(model=config.LLM_MODEL)
        response = self.index.as_query_engine(
            similarity_top_k=config.SIMILARITY_TOP_K, llm=llm
        ).query(query_text)
        return response


indexManager = IndexManager()
