import torch
import torch.nn as nn

def not_freeze(name, freeze):
    for f in freeze:
        if f in name:
            return False
    return True


class SynapticIntelligence:
    def __init__(self, model, device, lambda_=1.0):
        self.device = device
        self.model = model
        self.lambda_ = lambda_
        self.omega = {n: torch.zeros_like(p).to(device) for n, p in model.named_parameters()}
        self.old_params = {n: p.clone().detach().to(device) for n, p in model.named_parameters()}
        self.step = 1  # 新增步数计数器

    def update_omega(self):
        """在每个训练步骤后更新重要性值"""
        # with torch.no_grad():
        #     for name, param in self.model.named_parameters():
        #         if param.grad is not None:
        #             delta_w = (param - self.old_params[name]) # 参数变化量
        #             self.omega[name] += torch.abs(delta_w * param.grad)  # 更新 Omega
        #         self.old_params[name] = param.clone().detach()  # 保存当前参数
        with torch.no_grad():
            for name, param in self.model.named_parameters():
                if param.grad is not None:
                    delta_w = (param - self.old_params[name])
                    self.omega[name] = (self.omega[name] * (self.step - 1) + torch.abs(delta_w * param.grad)) / self.step
                self.old_params[name] = param.clone().detach()
            self.step += 1  # 更新步数

    def penalty_loss(self, lambda_ = None, freeze = []):
        """计算正则项损失"""
        loss = torch.zeros(1, device=self.device)
        if lambda_ is not None:
            self.lambda_ = lambda_
        for name, param in self.model.named_parameters():
            if not_freeze(name, freeze) and '24' not in name:
                now_loss = (self.omega[name] * (param - self.old_params[name])**2).sum()
                if (not torch.isinf(now_loss)) and (not torch.isnan(now_loss)):
                    #print(self.omega[name], '\n\n\n\n', (param - self.old_params[name]))
                    loss += now_loss
        return self.lambda_ * loss






