"""
BERT模型定义模块
包含意图分类的BERT模型和相关配置
"""

import torch
import torch.nn as nn
from transformers import (
    AutoModelForSequenceClassification,
    AutoTokenizer,
    AutoConfig
)
from typing import Dict, Optional
import logging

logger = logging.getLogger(__name__)


class BertIntentClassifier(nn.Module):
    """BERT意图分类器"""
    
    def __init__(self, 
                 model_name: str = "bert-base-chinese",
                 num_labels: int = 8,
                 dropout_rate: float = 0.1,
                 hidden_size: Optional[int] = None):
        """
        初始化BERT意图分类器
        
        Args:
            model_name: BERT模型名称
            num_labels: 分类标签数量
            dropout_rate: Dropout比率
            hidden_size: 隐层大小，如果为None则使用模型默认值
        """
        super(BertIntentClassifier, self).__init__()
        
        self.model_name = model_name
        self.num_labels = num_labels
        self.dropout_rate = dropout_rate
        
        # 加载配置和预训练模型，添加网络连接失败的备用方案
        try:
            logger.info(f"尝试加载 BERT 配置: {model_name}")
            self.config = AutoConfig.from_pretrained(
                model_name,
                num_labels=num_labels,
                hidden_dropout_prob=dropout_rate,
                attention_probs_dropout_prob=dropout_rate
            )
            
            logger.info(f"尝试加载 BERT 模型: {model_name}")
            self.bert = AutoModelForSequenceClassification.from_pretrained(
                model_name,
                config=self.config
            )
            logger.info(f"BERT模型加载成功: {model_name}")
            
        except Exception as e:
            logger.warning(f"加载 {model_name} 失败: {e}")
            logger.info("尝试使用备用方案...")
            
            # 备用方案列表
            fallback_models = [
                "hfl/chinese-bert-wwm-ext",  # 如果已缓存
                "bert-base-multilingual-cased",  # 多语言版本，可能已缓存
                "distilbert-base-multilingual-cased",  # 更小的模型
            ]
            
            loaded = False
            for fallback_model in fallback_models:
                try:
                    logger.info(f"尝试备用模型: {fallback_model}")
                    self.config = AutoConfig.from_pretrained(
                        fallback_model,
                        num_labels=num_labels,
                        hidden_dropout_prob=dropout_rate,
                        attention_probs_dropout_prob=dropout_rate
                    )
                    
                    self.bert = AutoModelForSequenceClassification.from_pretrained(
                        fallback_model,
                        config=self.config
                    )
                    
                    self.model_name = fallback_model
                    logger.info(f"成功加载备用模型: {fallback_model}")
                    loaded = True
                    break
                except Exception as fallback_e:
                    logger.warning(f"备用模型 {fallback_model} 也失败: {fallback_e}")
                    continue
            
            if not loaded:
                logger.error("所有模型都无法加载")
                raise RuntimeError(f"无法加载任何BERT模型，请检查网络连接或下载模型到本地。原错误: {e}")
        
        # 如果指定了隐层大小，则添加额外的分类层
        if hidden_size is not None:
            self.classifier = nn.Sequential(
                nn.Linear(self.config.hidden_size, hidden_size),
                nn.ReLU(),
                nn.Dropout(dropout_rate),
                nn.Linear(hidden_size, num_labels)
            )
            # 替换原有的分类器
            self.bert.classifier = self.classifier
        
        logger.info(f"BERT模型初始化完成: {model_name}")
        logger.info(f"标签数量: {num_labels}")
        logger.info(f"隐层大小: {self.config.hidden_size}")
    
    def forward(self, input_ids, attention_mask=None, token_type_ids=None, labels=None, **kwargs):
        """
        前向传播
        
        Args:
            input_ids: 输入token IDs
            attention_mask: 注意力掩码
            token_type_ids: token类型IDs（用于区分句子A和句子B）
            labels: 标签（训练时使用）
            **kwargs: 其他可能的参数
            
        Returns:
            模型输出
        """
        outputs = self.bert(
            input_ids=input_ids,
            attention_mask=attention_mask,
            token_type_ids=token_type_ids,
            labels=labels
        )
        return outputs
    
    def get_embeddings(self, input_ids, attention_mask=None, token_type_ids=None):
        """
        获取文本嵌入向量
        
        Args:
            input_ids: 输入token IDs
            attention_mask: 注意力掩码
            token_type_ids: token类型IDs
            
        Returns:
            文本嵌入向量
        """
        with torch.no_grad():
            outputs = self.bert.bert(
                input_ids=input_ids,
                attention_mask=attention_mask,
                token_type_ids=token_type_ids
            )
            # 使用[CLS] token的嵌入作为句子嵌入
            sentence_embedding = outputs.last_hidden_state[:, 0, :]
            return sentence_embedding
    
    def save_model(self, save_path: str):
        """
        保存模型
        
        Args:
            save_path: 保存路径
        """
        self.bert.save_pretrained(save_path)
        logger.info(f"模型已保存到: {save_path}")
    
    @classmethod
    def load_model(cls, model_path: str, num_labels: int = None):
        """
        加载已保存的模型
        
        Args:
            model_path: 模型路径
            num_labels: 标签数量
            
        Returns:
            加载的模型实例
        """
        try:
            # 加载配置
            config = AutoConfig.from_pretrained(model_path)
            if num_labels is not None:
                config.num_labels = num_labels
            
            # 创建模型实例
            model = cls(
                model_name=model_path,
                num_labels=config.num_labels,
                dropout_rate=config.hidden_dropout_prob
            )
            
            logger.info(f"模型从 {model_path} 加载成功")
            return model
            
        except Exception as e:
            logger.error(f"加载模型失败: {e}")
            raise


class ModelConfig:
    """模型配置类"""
    
    def __init__(self, **kwargs):
        """
        初始化模型配置
        
        支持的配置项：
        - model_name: BERT模型名称
        - num_labels: 分类标签数量
        - max_length: 最大序列长度
        - dropout_rate: Dropout比率
        - learning_rate: 学习率
        - batch_size: 批次大小
        - num_epochs: 训练轮数
        - warmup_steps: 预热步数
        - weight_decay: 权重衰减
        """
        # 模型相关配置
        self.model_name = kwargs.get('model_name', 'bert-base-chinese')
        self.num_labels = kwargs.get('num_labels', 8)
        self.max_length = kwargs.get('max_length', 128)
        self.dropout_rate = kwargs.get('dropout_rate', 0.1)
        self.hidden_size = kwargs.get('hidden_size', None)
        
        # 训练相关配置
        self.learning_rate = kwargs.get('learning_rate', 2e-5)
        self.batch_size = kwargs.get('batch_size', 16)
        self.num_epochs = kwargs.get('num_epochs', 3)
        self.warmup_steps = kwargs.get('warmup_steps', 100)
        self.weight_decay = kwargs.get('weight_decay', 0.01)
        
        # 评估相关配置
        self.eval_steps = kwargs.get('eval_steps', 100)
        self.save_steps = kwargs.get('save_steps', 500)
        self.logging_steps = kwargs.get('logging_steps', 50)
        
        # 输出相关配置
        self.output_dir = kwargs.get('output_dir', './models/checkpoints')
        self.save_total_limit = kwargs.get('save_total_limit', 2)
        self.load_best_model_at_end = kwargs.get('load_best_model_at_end', True)
        self.metric_for_best_model = kwargs.get('metric_for_best_model', 'accuracy')
        
        # 设备配置
        self.device = kwargs.get('device', 'cuda' if torch.cuda.is_available() else 'cpu')
        self.fp16 = kwargs.get('fp16', False)
        
        # 随机种子
        self.seed = kwargs.get('seed', 42)
        
    def to_dict(self) -> Dict:
        """转换为字典格式"""
        return {
            attr: getattr(self, attr) 
            for attr in dir(self) 
            if not attr.startswith('_') and not callable(getattr(self, attr))
        }
    
    def save_config(self, file_path: str):
        """
        保存配置到文件
        
        Args:
            file_path: 保存路径
        """
        import json
        from pathlib import Path
        
        Path(file_path).parent.mkdir(parents=True, exist_ok=True)
        
        with open(file_path, 'w', encoding='utf-8') as f:
            json.dump(self.to_dict(), f, ensure_ascii=False, indent=2)
        
        logger.info(f"模型配置已保存到: {file_path}")
    
    @classmethod
    def load_config(cls, file_path: str):
        """
        从文件加载配置
        
        Args:
            file_path: 配置文件路径
            
        Returns:
            ModelConfig实例
        """
        import json
        
        with open(file_path, 'r', encoding='utf-8') as f:
            config_dict = json.load(f)
        
        return cls(**config_dict)


def create_model_from_config(config: ModelConfig) -> BertIntentClassifier:
    """
    根据配置创建模型
    
    Args:
        config: 模型配置
        
    Returns:
        BERT模型实例
    """
    model = BertIntentClassifier(
        model_name=config.model_name,
        num_labels=config.num_labels,
        dropout_rate=config.dropout_rate,
        hidden_size=config.hidden_size
    )
    
    # 设置设备
    model = model.to(config.device)
    
    return model


def get_model_info(model: BertIntentClassifier) -> Dict:
    """
    获取模型信息
    
    Args:
        model: BERT模型
        
    Returns:
        模型信息字典
    """
    total_params = sum(p.numel() for p in model.parameters())
    trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
    
    info = {
        'model_name': model.model_name,
        'num_labels': model.num_labels,
        'total_parameters': total_params,
        'trainable_parameters': trainable_params,
        'model_size_mb': total_params * 4 / (1024 * 1024),  # 假设float32
        'config': model.config.to_dict() if hasattr(model.config, 'to_dict') else str(model.config)
    }
    
    return info


if __name__ == "__main__":
    # 测试模型创建
    config = ModelConfig(
        model_name='bert-base-chinese',
        num_labels=8,
        max_length=128
    )
    
    print("创建模型配置:")
    print(config.to_dict())
    
    # 创建模型
    model = create_model_from_config(config)
    
    # 获取模型信息
    model_info = get_model_info(model)
    print(f"\n模型信息:")
    for key, value in model_info.items():
        if key != 'config':
            print(f"  {key}: {value}")
    
    print(f"\n模型创建成功!") 