"""
向量存储 (Vector Store)
负责临时存储和管理向量化数据
"""

import json
import os
import pickle
import numpy as np
from typing import List, Dict, Any, Optional, Tuple
import logging
from datetime import datetime
import shutil

class VectorStore:
    """
    向量存储，负责临时存储向量化的文档块
    """
    
    def __init__(self, storage_path: str, auto_cleanup: bool = True):
        """
        初始化向量存储
        
        Args:
            storage_path: 存储路径
            auto_cleanup: 是否自动清理临时文件
        """
        self.storage_path = storage_path
        self.auto_cleanup = auto_cleanup
        self.logger = logging.getLogger(__name__)
        
        # 创建存储目录
        os.makedirs(storage_path, exist_ok=True)
        
        # 存储的数据结构
        self.chunks = []  # 文档块列表
        self.vectors = []  # 对应的向量列表
        self.metadata = {}  # 元数据
        
        # 文件路径
        self.chunks_file = os.path.join(storage_path, "chunks.json")
        self.vectors_file = os.path.join(storage_path, "vectors.pkl")
        self.metadata_file = os.path.join(storage_path, "metadata.json")
        
        self.logger.info(f"向量存储初始化完成，存储路径: {storage_path}")
    
    def add_chunk(self, chunk_id: str, content: str, vector: List[float], 
                  elements: List[Dict[str, Any]], level3_number: str) -> bool:
        """
        添加文档块及其向量
        
        Args:
            chunk_id: 块ID
            content: 块内容
            vector: 向量表示
            elements: 包含的元素列表
            level3_number: 三级标题编号
            
        Returns:
            是否添加成功
        """
        try:
            chunk_data = {
                "chunk_id": chunk_id,
                "content": content,
                "elements": elements,
                "level3_number": level3_number,
                "element_count": len(elements),
                "created_at": datetime.now().isoformat()
            }
            
            self.chunks.append(chunk_data)
            self.vectors.append(vector)
            
            self.logger.debug(f"添加文档块: {chunk_id}, 元素数量: {len(elements)}")
            return True
            
        except Exception as e:
            self.logger.error(f"添加文档块失败: {e}")
            return False
    
    def save_to_disk(self) -> bool:
        """
        保存数据到磁盘
        
        Returns:
            是否保存成功
        """
        try:
            # 保存文档块数据
            with open(self.chunks_file, 'w', encoding='utf-8') as f:
                json.dump(self.chunks, f, ensure_ascii=False, indent=2)
            
            # 保存向量数据
            with open(self.vectors_file, 'wb') as f:
                pickle.dump(self.vectors, f)
            
            # 保存元数据
            metadata = {
                "total_chunks": len(self.chunks),
                "vector_dimension": len(self.vectors[0]) if self.vectors else 0,
                "created_at": datetime.now().isoformat(),
                "storage_path": self.storage_path
            }
            
            with open(self.metadata_file, 'w', encoding='utf-8') as f:
                json.dump(metadata, f, ensure_ascii=False, indent=2)
            
            self.metadata = metadata
            self.logger.info(f"向量数据保存成功，共 {len(self.chunks)} 个块")
            return True
            
        except Exception as e:
            self.logger.error(f"保存向量数据失败: {e}")
            return False
    
    def load_from_disk(self) -> bool:
        """
        从磁盘加载数据
        
        Returns:
            是否加载成功
        """
        try:
            # 检查文件是否存在
            if not all(os.path.exists(f) for f in [self.chunks_file, self.vectors_file, self.metadata_file]):
                self.logger.warning("向量存储文件不完整，无法加载")
                return False
            
            # 加载文档块数据
            with open(self.chunks_file, 'r', encoding='utf-8') as f:
                self.chunks = json.load(f)
            
            # 加载向量数据
            with open(self.vectors_file, 'rb') as f:
                self.vectors = pickle.load(f)
            
            # 加载元数据
            with open(self.metadata_file, 'r', encoding='utf-8') as f:
                self.metadata = json.load(f)
            
            self.logger.info(f"向量数据加载成功，共 {len(self.chunks)} 个块")
            return True
            
        except Exception as e:
            self.logger.error(f"加载向量数据失败: {e}")
            return False
    
    def search_similar(self, query_vector: List[float], top_k: int = 5) -> List[Tuple[Dict[str, Any], float]]:
        """
        搜索相似的文档块
        
        Args:
            query_vector: 查询向量
            top_k: 返回的结果数量
            
        Returns:
            相似块列表，每项包含(块数据, 相似度分数)
        """
        if not self.vectors:
            self.logger.warning("没有可搜索的向量数据")
            return []
        
        try:
            # 计算余弦相似度
            query_vector = np.array(query_vector)
            similarities = []
            
            for i, vector in enumerate(self.vectors):
                vector = np.array(vector)
                
                # 余弦相似度计算
                dot_product = np.dot(query_vector, vector)
                norm_query = np.linalg.norm(query_vector)
                norm_vector = np.linalg.norm(vector)
                
                if norm_query > 0 and norm_vector > 0:
                    similarity = dot_product / (norm_query * norm_vector)
                else:
                    similarity = 0.0
                
                similarities.append((i, similarity))
            
            # 按相似度排序
            similarities.sort(key=lambda x: x[1], reverse=True)
            
            # 返回top_k结果
            results = []
            for i, similarity in similarities[:top_k]:
                results.append((self.chunks[i], similarity))
            
            self.logger.debug(f"搜索完成，返回 {len(results)} 个结果")
            return results
            
        except Exception as e:
            self.logger.error(f"向量搜索失败: {e}")
            return []
    
    def get_chunk_by_id(self, chunk_id: str) -> Optional[Dict[str, Any]]:
        """
        根据ID获取文档块
        
        Args:
            chunk_id: 块ID
            
        Returns:
            文档块数据，未找到时返回None
        """
        for chunk in self.chunks:
            if chunk["chunk_id"] == chunk_id:
                return chunk
        return None
    
    def get_chunks_by_level3(self, level3_number: str) -> List[Dict[str, Any]]:
        """
        根据三级标题编号获取文档块
        
        Args:
            level3_number: 三级标题编号
            
        Returns:
            文档块列表
        """
        return [chunk for chunk in self.chunks if chunk["level3_number"] == level3_number]
    
    def get_chunk_count(self) -> int:
        """
        获取文档块数量
        
        Returns:
            文档块数量
        """
        return len(self.chunks) if self.chunks else 0
    
    def get_statistics(self) -> Dict[str, Any]:
        """
        获取存储统计信息
        
        Returns:
            统计信息字典
        """
        if not self.chunks:
            return {"total_chunks": 0, "total_elements": 0}
        
        total_elements = sum(chunk["element_count"] for chunk in self.chunks)
        level3_groups = set(chunk["level3_number"] for chunk in self.chunks)
        
        return {
            "total_chunks": len(self.chunks),
            "total_elements": total_elements,
            "level3_groups": len(level3_groups),
            "vector_dimension": len(self.vectors[0]) if self.vectors else 0,
            "storage_path": self.storage_path,
            "metadata": self.metadata
        }
    
    def cleanup(self) -> bool:
        """
        清理临时存储文件
        
        Returns:
            是否清理成功
        """
        try:
            if os.path.exists(self.storage_path):
                shutil.rmtree(self.storage_path)
                self.logger.info(f"临时存储文件清理完成: {self.storage_path}")
                return True
            return True
            
        except Exception as e:
            self.logger.error(f"清理临时存储文件失败: {e}")
            return False
    
    def __del__(self):
        """析构函数，自动清理临时文件"""
        if self.auto_cleanup:
            self.cleanup()


if __name__ == "__main__":
    # 测试代码
    import tempfile
    
    # 设置日志
    logging.basicConfig(level=logging.INFO)
    
    # 创建临时存储
    with tempfile.TemporaryDirectory() as temp_dir:
        store = VectorStore(temp_dir, auto_cleanup=False)
        
        # 测试添加数据
        test_vector = [0.1] * 768  # 模拟768维向量
        test_elements = [{"element_id": "test_001", "content": "测试内容"}]
        
        success = store.add_chunk(
            chunk_id="test_chunk_001",
            content="这是一个测试文档块",
            vector=test_vector,
            elements=test_elements,
            level3_number="1.1.1"
        )
        
        if success:
            print("✅ 添加文档块成功")
            
            # 保存到磁盘
            if store.save_to_disk():
                print("✅ 保存到磁盘成功")
                print(f"统计信息: {store.get_statistics()}")
            else:
                print("❌ 保存到磁盘失败")
        else:
            print("❌ 添加文档块失败")