"""
工具函数
"""

import torch
import random

import torch.nn as nn
import numpy as np
import matplotlib.pyplot as plt

from sklearn.metrics import roc_curve, auc


def set_seed(seed):
    """设置随机种子"""
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)
    torch.cuda.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False


def save_checkpoint(model, optimizer, scheduler, epoch, auroc, accuracy, filepath):
    """保存检查点"""
    checkpoint = {
        "epoch": epoch,
        "model_state_dict": model.state_dict(),
        "optimizer_state_dict": optimizer.state_dict(),
        "scheduler_state_dict": scheduler.state_dict()
        if scheduler is not None
        else None,
        "auroc": auroc,
        "accuracy": accuracy,
        "num_classes": model.num_classes if hasattr(model, "num_classes") else None,
    }
    torch.save(checkpoint, filepath)
    print(f"Checkpoint saved to {filepath}")


def load_checkpoint(filepath, model, optimizer=None, scheduler=None):
    """加载检查点"""
    checkpoint = torch.load(filepath, map_location="cpu", weights_only=False)

    # 检查类别数是否匹配
    if "num_classes" in checkpoint and checkpoint["num_classes"] is not None:
        if (
            hasattr(model, "num_classes")
            and model.num_classes != checkpoint["num_classes"]
        ):
            print(
                f"Warning: Model has {model.num_classes} classes but checkpoint has {checkpoint['num_classes']} classes"
            )

    # 加载模型权重
    try:
        model.load_state_dict(checkpoint["model_state_dict"])
    except RuntimeError as e:
        print(f"Warning: Some keys mismatch when loading model: {e}")
        # 尝试部分加载
        model_dict = model.state_dict()
        pretrained_dict = checkpoint["model_state_dict"]
        # 过滤掉不匹配的键
        pretrained_dict = {
            k: v
            for k, v in pretrained_dict.items()
            if k in model_dict and v.shape == model_dict[k].shape
        }
        model_dict.update(pretrained_dict)
        model.load_state_dict(model_dict)
        print(
            f"Partially loaded model with {len(pretrained_dict)}/{len(model_dict)} matched keys"
        )

    if optimizer and "optimizer_state_dict" in checkpoint:
        try:
            optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
        except Exception as e:
            print(f"Warning: Could not load optimizer state, {e}")

    if (
        scheduler
        and "scheduler_state_dict" in checkpoint
        and checkpoint["scheduler_state_dict"] is not None
    ):
        try:
            scheduler.load_state_dict(checkpoint["scheduler_state_dict"])
        except Exception as e:
            print(f"Warning: Could not load scheduler state, {e}")

    return checkpoint


def plot_roc_curve(scores, labels, save_path):
    """绘制ROC曲线"""
    fpr, tpr, _ = roc_curve(labels, scores)
    roc_auc = auc(fpr, tpr)

    plt.figure(figsize=(8, 6))
    plt.plot(
        fpr, tpr, color="darkorange", lw=2, label=f"ROC curve (AUC = {roc_auc:.2f})"
    )
    plt.plot([0, 1], [0, 1], color="navy", lw=2, linestyle="--")
    plt.xlim([0.0, 1.0])
    plt.ylim([0.0, 1.05])
    plt.xlabel("False Positive Rate")
    plt.ylabel("True Positive Rate")
    plt.title("Receiver Operating Characteristic")
    plt.legend(loc="lower right")
    plt.grid(True, alpha=0.3)
    plt.savefig(save_path, dpi=300, bbox_inches="tight")
    plt.close()


def calculate_forgetting(old_acc_before, old_acc_after):
    """计算遗忘率"""
    return old_acc_before - old_acc_after


def print_model_info(model):
    """打印模型信息"""
    total_params = sum(p.numel() for p in model.parameters())
    trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
    print(f"Total parameters: {total_params:,}")
    print(
        f"Trainable parameters: {trainable_params:,} ({100 * trainable_params / total_params:.1f}%)"
    )

    if hasattr(model, "num_classes"):
        print(f"Number of classes: {model.num_classes}")

    # 打印每层的参数量
    print("\nLayer-wise parameters:")
    for name, module in model.named_modules():
        if isinstance(module, (nn.Linear, nn.Conv2d)):
            params = sum(p.numel() for p in module.parameters())
            print(f"  {name}: {params:,}")


def plot_learning_curves(train_losses, train_accs, val_accs, save_path):
    """绘制学习曲线"""
    epochs = range(1, len(train_losses) + 1)

    fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 4))

    # 损失曲线
    ax1.plot(epochs, train_losses, "b-", label="Training Loss")
    ax1.set_xlabel("Epoch")
    ax1.set_ylabel("Loss")
    ax1.set_title("Training Loss")
    ax1.legend()
    ax1.grid(True, alpha=0.3)

    # 准确率曲线
    ax2.plot(epochs, train_accs, "b-", label="Training Accuracy")
    ax2.plot(epochs, val_accs, "r-", label="Validation Accuracy")
    ax2.set_xlabel("Epoch")
    ax2.set_ylabel("Accuracy (%)")
    ax2.set_title("Accuracy")
    ax2.legend()
    ax2.grid(True, alpha=0.3)

    plt.tight_layout()
    plt.savefig(save_path, dpi=300, bbox_inches="tight")
    plt.close()
