"""
向量存储
"""
import os
import shutil
from typing import List, Dict, Any, Optional, Tuple
from abc import ABC, abstractmethod
from langchain.docstore.document import Document
from langchain.vectorstores import FAISS, Chroma
from langchain.vectorstores.pgvector import PGVector
from langchain.vectorstores.milvus import Milvus

from ..config.settings import settings
from .embeddings import EmbeddingAdapter


class VectorStore(ABC):
    """向量存储基类"""
    
    def __init__(self, embedding_model: str = None, **kwargs):
        self.embedding_model = embedding_model or settings.embedding.model_name
        self.embedding_service = EmbeddingAdapter(self.embedding_model)
        self.vector_store = None
    
    @abstractmethod
    def init_vector_store(self, **kwargs):
        """初始化向量存储"""
        pass
    
    @abstractmethod
    def add_documents(self, documents: List[Document]) -> List[str]:
        """添加文档"""
        pass
    
    @abstractmethod
    def similarity_search(
        self, 
        query: str, 
        k: int = 4, 
        score_threshold: float = None
    ) -> List[Tuple[Document, float]]:
        """相似度搜索"""
        pass
    
    @abstractmethod
    def delete_documents(self, ids: List[str]) -> bool:
        """删除文档"""
        pass
    
    @abstractmethod
    def clear(self) -> bool:
        """清空向量存储"""
        pass


class FAISSVectorStore(VectorStore):
    """FAISS向量存储"""
    
    def __init__(self, index_path: str = None, **kwargs):
        super().__init__(**kwargs)
        self.index_path = index_path or os.path.join(settings.vector_store.storage_path, "faiss_index")
        self.vector_store = None
    
    def init_vector_store(self, **kwargs):
        """初始化FAISS向量存储"""
        if os.path.exists(self.index_path):
            # 加载现有索引
            self.vector_store = FAISS.load_local(
                self.index_path, 
                self.embedding_service
            )
        else:
            # 创建新索引
            self.vector_store = FAISS.from_documents(
                [], 
                self.embedding_service
            )
    
    def add_documents(self, documents: List[Document]) -> List[str]:
        """添加文档"""
        if not self.vector_store:
            self.init_vector_store()
        
        texts = [doc.page_content for doc in documents]
        metadatas = [doc.metadata for doc in documents]
        
        # 向量化文档
        embeddings = self.embedding_service.embed_documents(texts)
        
        # 添加到向量存储
        ids = self.vector_store.add_embeddings(
            text_embeddings=zip(texts, embeddings),
            metadatas=metadatas
        )
        
        # 保存索引
        self.save()
        
        return ids
    
    def similarity_search(
        self, 
        query: str, 
        k: int = 4, 
        score_threshold: float = None
    ) -> List[Tuple[Document, float]]:
        """相似度搜索"""
        if not self.vector_store:
            self.init_vector_store()
        
        # 向量化查询
        query_embedding = self.embedding_service.embed_query(query)
        
        # 执行搜索
        docs_and_scores = self.vector_store.similarity_search_with_score_by_vector(
            query_embedding, 
            k=k
        )
        
        # 应用分数阈值
        if score_threshold is not None:
            docs_and_scores = [
                (doc, score) for doc, score in docs_and_scores 
                if score >= score_threshold
            ]
        
        return docs_and_scores
    
    def delete_documents(self, ids: List[str]) -> bool:
        """删除文档"""
        if not self.vector_store:
            return False
        
        try:
            self.vector_store.delete(ids)
            self.save()
            return True
        except Exception as e:
            print(f"删除文档失败: {e}")
            return False
    
    def clear(self) -> bool:
        """清空向量存储"""
        try:
            if os.path.exists(self.index_path):
                shutil.rmtree(self.index_path)
            self.vector_store = None
            return True
        except Exception as e:
            print(f"清空向量存储失败: {e}")
            return False
    
    def save(self):
        """保存向量存储"""
        if self.vector_store:
            os.makedirs(os.path.dirname(self.index_path), exist_ok=True)
            self.vector_store.save_local(self.index_path)


class ChromaVectorStore(VectorStore):
    """ChromaDB向量存储"""
    
    def __init__(self, persist_directory: str = None, collection_name: str = "default", **kwargs):
        super().__init__(**kwargs)
        self.persist_directory = persist_directory or settings.vector_store.chroma_persist_directory
        self.collection_name = collection_name
        self.vector_store = None
    
    def init_vector_store(self, **kwargs):
        """初始化ChromaDB向量存储"""
        self.vector_store = Chroma(
            collection_name=self.collection_name,
            embedding_function=self.embedding_service,
            persist_directory=self.persist_directory,
        )
    
    def add_documents(self, documents: List[Document]) -> List[str]:
        """添加文档"""
        if not self.vector_store:
            self.init_vector_store()
        
        texts = [doc.page_content for doc in documents]
        metadatas = [doc.metadata for doc in documents]
        
        # 向量化文档
        embeddings = self.embedding_service.embed_documents(texts)
        
        # 生成ID
        import uuid
        ids = [str(uuid.uuid4()) for _ in range(len(texts))]
        
        # 添加到向量存储
        self.vector_store._collection.add(
            ids=ids,
            embeddings=embeddings,
            metadatas=metadatas,
            documents=texts
        )
        
        return ids
    
    def similarity_search(
        self, 
        query: str, 
        k: int = 4, 
        score_threshold: float = None
    ) -> List[Tuple[Document, float]]:
        """相似度搜索"""
        if not self.vector_store:
            self.init_vector_store()
        
        # 执行搜索
        results = self.vector_store.similarity_search_with_score(
            query, 
            k=k
        )
        
        # 应用分数阈值
        if score_threshold is not None:
            results = [
                (doc, score) for doc, score in results 
                if score >= score_threshold
            ]
        
        return results
    
    def delete_documents(self, ids: List[str]) -> bool:
        """删除文档"""
        if not self.vector_store:
            return False
        
        try:
            self.vector_store._collection.delete(ids=ids)
            return True
        except Exception as e:
            print(f"删除文档失败: {e}")
            return False
    
    def clear(self) -> bool:
        """清空向量存储"""
        try:
            if os.path.exists(self.persist_directory):
                shutil.rmtree(self.persist_directory)
            self.vector_store = None
            return True
        except Exception as e:
            print(f"清空向量存储失败: {e}")
            return False


class VectorStoreFactory:
    """向量存储工厂"""
    
    @staticmethod
    def create_vector_store(
        store_type: str = None,
        **kwargs
    ) -> VectorStore:
        """创建向量存储实例"""
        store_type = store_type or settings.vector_store.type
        
        if store_type == "faiss":
            return FAISSVectorStore(**kwargs)
        elif store_type == "chromadb":
            return ChromaVectorStore(**kwargs)
        else:
            # 默认使用FAISS
            return FAISSVectorStore(**kwargs) 