import torch
import numpy as np
import random
import math
from copy import deepcopy 


def set_seed(seed=2025):
    torch.manual_seed(seed)
    if torch.cuda.is_available():
        torch.cuda.manual_seed(seed)
        torch.cuda.manual_seed_all(seed)
    np.random.seed(seed)
    random.seed(seed)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False


def one_cycle(y1=0.0, y2=1.0, steps=100):
    """
    Generates a lambda for a sinusoidal ramp from y1 to y2 over 'steps'.

    See https://arxiv.org/pdf/1812.01187.pdf for details.
    """
    return lambda x: ((1 - math.cos(x * math.pi / steps)) / 2) * (y2 - y1) + y1


class ModelEMA:

    def __init__(self, model, decay=0.9999, tau=2000, updates=0):
        # 首先把模型复制一份，然后设置为eval模式
        self.ema = deepcopy(model).eval()
        self.updates = updates
        # 权重衰减系数，越到训练后期，衰减系数越大
        self.decay = lambda x: decay * (1 - math.exp(-x / tau))
        for param in self.ema.parameters():
            param.requires_grad_(False)

    def update(self, model):
        # 权重参数更新次数加一，用于更新衰减系数
        self.updates += 1
        # 更新衰减系数
        decay = self.decay(self.updates)
        msd = model.state_dict()
        # 更新权重参数
        for k, v in self.ema.state_dict().items():
            if v.dtype.is_floating_point:
                v *= decay
                v += (1. - decay) * msd[k].detach()
    
    def update_attr(self, model, include=(), exclude=("process_group", "reducer")):
        for k, v in model.__dict__.items():
            if (len(include) and k not in include) or k in exclude or k.startswith("_"):
                continue
            else:
                setattr(self.ema, k, v)


class EarlyStopping:

    def __init__(self, patience=30):
        self.best_acc = 0
        self.best_epoch = 0
        self.patience = patience
        self.possible_stop = False

    def __call__(self, acc, epoch):
        if self.best_acc < acc:
            self.best_acc = acc
            self.best_epoch = epoch
        
        delta = epoch - self.best_epoch
        self.possible_stop = delta >= self.patience - 1
        stop = delta >= self.patience
        return stop


if __name__ == "__main__":
    class_counts = np.arange(2 * 3 * 80).reshape(2, 3, 80)
    class_weights = np.arange(80).reshape(1, -1)
    _ = class_weights * class_counts
    print(_.sum(axis=1).shape)
            
