from typing import List, Union
import torch
from sentence_transformers import SentenceTransformer
from .base import BaseEmbedding

class TransformerEmbedding(BaseEmbedding):
    def __init__(self, 
                 model_name: str = "BAAI/bge-m3",
                 device: str = None,
                 cache_dir: str = "models/embeddings"):
        """
        初始化Transformer编码器
        参数:
            model_name: 模型名称或路径
            device: 设备 ('cuda' 或 'cpu')
            cache_dir: 模型缓存目录
        """
        import os
        # 设置镜像站点
        os.environ['HF_ENDPOINT'] = "https://hf-mirror.com"
        
        # 添加单例模式
        if hasattr(TransformerEmbedding, '_instance'):
            return TransformerEmbedding._instance
        
        # 模型初始化
        os.makedirs(cache_dir, exist_ok=True)
        self.model_name = model_name
        self.device = device or ('cuda' if torch.cuda.is_available() else 'cpu')
        
        # 添加模型状态检查
        self._model = None
        self._load_model(cache_dir)
        
        # 模型预热
        self._warmup()
        
        TransformerEmbedding._instance = self
    
    def _load_model(self, cache_dir: str):
        """加载模型并检查状态"""
        try:
            print(f"正在加载模型 {self.model_name}...")
            # 国内模型下载，先通过modelscope下载模型，再加载模型
            from modelscope import snapshot_download
            model_dir = snapshot_download(self.model_name, cache_dir=cache_dir)
            self._model = SentenceTransformer(model_dir, trust_remote_code=True)

            # 通过Hugging Face模型下载，要科技上网
            # self._model = SentenceTransformer(
            #     self.model_name,
            #     cache_folder=cache_dir,
            #     device=self.device,
            #     use_auth_token=False
            # )
        except Exception as e:
            print(f"模型加载失败: {str(e)}")
            raise
    
    def _warmup(self, sample_text: str = "模型预热测试文本"):
        """模型预热"""
        print("正在进行模型预热...")
        _ = self._model.encode(sample_text)
    
    def encode(self, texts: Union[str, List[str]], **kwargs) -> Union[List[float], List[List[float]]]:
        """
        将文本编码为向量
        参数:
            texts: 单个文本或文本列表
            **kwargs: 传递给模型的额外参数
        返回:
            文本向量或向量列表
        """
        # 确保输入是列表形式
        if isinstance(texts, str):
            texts = [texts]
        
        # 使用模型生成向量嵌入
        try:
            embeddings = self._model.encode(
                texts,
                convert_to_numpy=True,
                show_progress_bar=False,
                **kwargs
            )
            return embeddings.tolist()
        except Exception as e:
            print(f"生成向量嵌入时出错: {str(e)}")
            raise
    
    @property
    def dimension(self) -> int:
        """返回向量维度"""
        return self._model.get_sentence_embedding_dimension()