from llama_index.core import (
    VectorStoreIndex, 
    SimpleDirectoryReader,
    StorageContext,
    load_index_from_storage
)
from llama_index.core.node_parser import SimpleNodeParser
from llama_index.core import settings
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.vector_stores.faiss import FaissVectorStore
import os
import logging
from pathlib import Path
import faiss
from typing import List, Dict, Any, Optional

# Configure logging
logger = logging.getLogger(__name__)

# 设置编码处理器
for handler in logger.handlers:
    if isinstance(handler, logging.FileHandler):
        handler.setEncoding('utf-8')

class RAGManager:
    def __init__(
        self,
        knowledge_base_dir: str = "knowledge_base",
        index_storage_dir: str = "index_storage",
        embedding_model_name: str = "BAAI/bge-small-en-v1.5",
        faiss_index_type: str = "flat",  # Options: flat, hnsw, ivf
        faiss_metrics: str = "l2",       # Options: l2, inner_product, cosine
        nlist: int = 100,                # For IVF indexes
        nprobe: int = 10,                # For IVF search
        M: int = 16,                     # For HNSW
        ef_construction: int = 200,      # For HNSW
        ef_search: int = 128,            # For HNSW
    ):
        self.knowledge_base_dir = knowledge_base_dir
        self.index_storage_dir = index_storage_dir
        self.embedding_model_name = embedding_model_name
        self.faiss_index_type = faiss_index_type
        self.faiss_metrics = faiss_metrics
        self.nlist = nlist
        self.nprobe = nprobe
        self.M = M
        self.ef_construction = ef_construction
        self.ef_search = ef_search
        
        # Create directories if they don't exist
        Path(knowledge_base_dir).mkdir(exist_ok=True)
        Path(index_storage_dir).mkdir(exist_ok=True)
        
        # Configure global settings with local embedding model
        self.embedding_dimension = 384  # Default for bge-small-en-v1.5
        embed_model = HuggingFaceEmbedding(model_name=embedding_model_name)
        settings.Settings.embed_model = embed_model
        settings.Settings.node_parser = SimpleNodeParser()
        
        # Initialize index
        self._initialize_index()

    def _initialize_index(self):
        """Initialize or load the vector index"""
        try:
            # Check if index storage exists and attempt to load it
            index_path = Path(self.index_storage_dir)
            if index_path.exists() and any(index_path.iterdir()):
                logger.info("Loading existing index from storage...")
                try:
                    # Create FAISS vector store
                    faiss_index_file = os.path.join(self.index_storage_dir, "faiss.index")
                    if os.path.exists(faiss_index_file):
                        faiss_index = faiss.read_index(faiss_index_file)
                        # Set search parameters based on index type
                        if isinstance(faiss_index, faiss.IndexIVFFlat):
                            faiss_index.nprobe = self.nprobe
                        elif hasattr(faiss_index, 'hnsw'):
                            faiss_index.hnsw.efSearch = self.ef_search
                            
                        vector_store = FaissVectorStore(faiss_index=faiss_index)
                        storage_context = StorageContext.from_defaults(
                            vector_store=vector_store,
                            persist_dir=self.index_storage_dir
                        )
                        self.index = load_index_from_storage(storage_context)
                        logger.info("Successfully loaded index from storage")
                        return
                    else:
                        logger.info("FAISS index file not found, creating new index")
                except Exception as load_error:
                    logger.error(f"Error loading index from storage: {load_error}")
            
            # Check if there are documents in the knowledge base
            docs_path = Path(self.knowledge_base_dir)
            if not any(docs_path.iterdir()):
                logger.info("No documents found in knowledge base. Creating empty index.")
                faiss_index = self._create_faiss_index()
                vector_store = FaissVectorStore(faiss_index=faiss_index)
                storage_context = StorageContext.from_defaults(vector_store=vector_store)
                self.index = VectorStoreIndex.from_documents([], storage_context=storage_context)
                self._persist_index()
                return
            
            # Create new index
            self._create_new_index()
            
        except Exception as e:
            logger.error(f"Error initializing index: {e}")
            # Create an empty index if initialization fails
            faiss_index = self._create_faiss_index()
            vector_store = FaissVectorStore(faiss_index=faiss_index)
            storage_context = StorageContext.from_defaults(vector_store=vector_store)
            self.index = VectorStoreIndex.from_documents([], storage_context=storage_context)
    
    def _create_faiss_index(self):
        """Create a FAISS index based on configuration"""
        d = self.embedding_dimension
        
        # Configure index based on metric type
        if self.faiss_metrics == "l2":
            metric = faiss.METRIC_L2
        elif self.faiss_metrics == "inner_product":
            metric = faiss.METRIC_INNER_PRODUCT
        elif self.faiss_metrics == "cosine":
            metric = faiss.METRIC_INNER_PRODUCT  # Use inner product for cosine
            # For cosine similarity, we'll normalize vectors before adding
        else:
            metric = faiss.METRIC_L2  # Default
            
        # Create index based on type
        if self.faiss_index_type == "flat":
            index = faiss.IndexFlatL2(d) if metric == faiss.METRIC_L2 else faiss.IndexFlat(d, metric)
        elif self.faiss_index_type == "hnsw":
            index = faiss.IndexHNSWFlat(d, self.M, metric)
            index.hnsw.efConstruction = self.ef_construction
            index.hnsw.efSearch = self.ef_search
        elif self.faiss_index_type == "ivf":
            # For IVF, we need a quantizer (typically a flat index)
            quantizer = faiss.IndexFlatL2(d) if metric == faiss.METRIC_L2 else faiss.IndexFlat(d, metric)
            index = faiss.IndexIVFFlat(quantizer, d, self.nlist, metric)
            # IVF indexes need to be trained before use
            if self.nprobe > 0:
                index.nprobe = self.nprobe
        else:
            # Default to flat index
            index = faiss.IndexFlatL2(d)
            
        logger.info(f"Created FAISS index of type {self.faiss_index_type} with metric {self.faiss_metrics}")
        return index
        
    def _create_new_index(self):
        """Create a new index from documents in the knowledge base"""
        documents = SimpleDirectoryReader(self.knowledge_base_dir).load_data()
        
        # Initialize FAISS index
        faiss_index = self._create_faiss_index()
        
        # Create vector store
        vector_store = FaissVectorStore(faiss_index=faiss_index)
        
        # Create storage context with FAISS vector store
        storage_context = StorageContext.from_defaults(vector_store=vector_store)
        
        # Create index
        self.index = VectorStoreIndex.from_documents(
            documents,
            storage_context=storage_context,
        )
        
        # For IVF indexes, we need to train the index if we have documents
        if self.faiss_index_type == "ivf" and hasattr(self.index, "_vector_store") and len(documents) > 0:
            train_vectors = []
            for doc in documents:
                # Get embeddings for training
                embed = settings.Settings.embed_model.get_text_embedding(doc.text)
                if embed:
                    train_vectors.append(embed)
                    
            if train_vectors:
                import numpy as np
                train_vectors = np.array(train_vectors).astype('float32')
                if len(train_vectors) > 0:
                    # Train the index
                    faiss_index = self.index._vector_store._faiss_index
                    if not faiss_index.is_trained:
                        logger.info(f"Training IVF index with {len(train_vectors)} vectors")
                        faiss_index.train(train_vectors)
        
        logger.info(f"Created new FAISS index with {len(documents)} documents")
        
        # Persist the index
        self._persist_index()
        
    def _persist_index(self):
        """Save the index to disk"""
        try:
            # Save FAISS index separately
            if hasattr(self.index, "_vector_store") and isinstance(self.index._vector_store, FaissVectorStore):
                faiss_index = self.index._vector_store._faiss_index
                faiss_index_file = os.path.join(self.index_storage_dir, "faiss.index")
                faiss.write_index(faiss_index, faiss_index_file)
            
            # Save the index
            self.index.storage_context.persist(persist_dir=self.index_storage_dir)
            logger.info(f"Index persisted to {self.index_storage_dir}")
        except Exception as e:
            logger.error(f"Error persisting index: {e}")

    def add_documents(self, file_paths: List[str]) -> bool:
        """Add documents to the knowledge base and update the index"""
        try:
            # Load and add documents directly
            for file_path in file_paths:
                documents = SimpleDirectoryReader(input_files=[file_path]).load_data()
                logger.info(f"Adding document: {file_path}")
                
                # Simply update the index with new documents
                for doc in documents:
                    self.index.insert(doc)
            
            # Persist the updated index
            self._persist_index()
            
            logger.info("Documents added and index updated successfully")
            return True
        except Exception as e:
            logger.error(f"Error adding documents: {e}")
            return False
    
    def query(self, query_text: str, similarity_top_k: int = 3) -> Dict[str, Any]:
        """Query the RAG system with a question"""
        try:
            # Configure search parameters for specific index types
            if self.faiss_index_type == "ivf" and hasattr(self.index, "_vector_store"):
                faiss_index = self.index._vector_store._faiss_index
                faiss_index.nprobe = self.nprobe
            elif self.faiss_index_type == "hnsw" and hasattr(self.index, "_vector_store"):
                faiss_index = self.index._vector_store._faiss_index
                faiss_index.hnsw.efSearch = self.ef_search
                
            # Create retriever
            retriever = self.index.as_retriever(similarity_top_k=similarity_top_k)
            
            # Retrieve relevant nodes
            nodes = retriever.retrieve(query_text)
            
            # Format retrieved context
            context = "\n\n".join([node.text for node in nodes])
            
            # Get source documents
            sources = []
            for node in nodes:
                if hasattr(node, 'metadata') and 'file_name' in node.metadata:
                    sources.append(node.metadata['file_name'])
                elif hasattr(node, 'source') and node.source:
                    sources.append(node.source)
            
            return {
                "context": context,
                "sources": list(set(sources)),  # Remove duplicates
                "nodes": len(nodes)
            }
        except Exception as e:
            logger.error(f"Error during query: {e}")
            return {"context": "", "sources": [], "nodes": 0, "error": str(e)}

    def optimize_index_type(self):
        """Optimize the FAISS index type based on document count"""
        try:
            # Count documents
            doc_count = 0
            if hasattr(self.index, "_docstore"):
                doc_count = len(self.index._docstore.docs)
            
            if doc_count < 1000:
                # For small collections, flat index is best
                self.faiss_index_type = "flat"
                logger.info(f"Auto-optimized to flat index for {doc_count} documents")
            elif doc_count < 10000:
                # For medium collections, HNSW is a good balance
                self.faiss_index_type = "hnsw"
                self.M = 16
                self.ef_construction = 200
                self.ef_search = 128
                logger.info(f"Auto-optimized to HNSW index for {doc_count} documents")
            else:
                # For large collections, IVF is more memory efficient
                self.faiss_index_type = "ivf"
                # Adjust nlist based on data size
                self.nlist = min(max(int(doc_count / 100), 100), 1000)
                self.nprobe = min(max(int(self.nlist / 10), 10), 100)
                logger.info(f"Auto-optimized to IVF index for {doc_count} documents with nlist={self.nlist}")
            
            # Rebuild index with optimized settings
            self._create_new_index()
            return self.faiss_index_type
        except Exception as e:
            logger.error(f"Error optimizing index: {e}")
            return self.faiss_index_type

    def get_index_stats(self) -> Dict[str, Any]:
        """Get statistics about the index"""
        try:
            doc_count = 0
            if hasattr(self.index, "_docstore"):
                doc_count = len(self.index._docstore.docs)
            
            # Get index type information
            index_type = self.faiss_index_type
            
            # Get FAISS-specific information
            faiss_stats = {}
            if hasattr(self.index, "_vector_store") and isinstance(self.index._vector_store, FaissVectorStore):
                faiss_index = self.index._vector_store._faiss_index
                
                # Common stats
                faiss_stats["dimension"] = faiss_index.d
                
                # Type-specific stats
                if index_type == "ivf" and isinstance(faiss_index, faiss.IndexIVFFlat):
                    faiss_stats["nlist"] = faiss_index.nlist
                    faiss_stats["nprobe"] = faiss_index.nprobe
                    faiss_stats["is_trained"] = faiss_index.is_trained
                elif index_type == "hnsw" and hasattr(faiss_index, 'hnsw'):
                    faiss_stats["M"] = self.M
                    faiss_stats["ef_construction"] = self.ef_construction
                    faiss_stats["ef_search"] = self.ef_search
            
            # Disk usage stats
            index_size = 0
            try:
                faiss_index_file = os.path.join(self.index_storage_dir, "faiss.index")
                if os.path.exists(faiss_index_file):
                    index_size = os.path.getsize(faiss_index_file) / (1024 * 1024)  # Size in MB
            except Exception:
                pass
                
            return {
                "document_count": doc_count,
                "index_type": index_type,
                "metrics_type": self.faiss_metrics,
                "faiss_details": faiss_stats,
                "index_size_mb": round(index_size, 2) if index_size else 0
            }
        except Exception as e:
            logger.error(f"Error getting index stats: {e}")
            return {
                "error": str(e),
                "document_count": 0,
                "index_type": self.faiss_index_type
            } 