from math import sqrt
from torch.optim.lr_scheduler import LambdaLR
from torch.optim.adamw import AdamW

class TPLinkerOptimScheduler:
    def __init__(self, model, dynamic_lr=False, bert_lr = 1e-7, tplinker_lr = 1e-4) -> None:
        
        grouped_parameters = [
            {'params': [parameter for name, parameter in model.named_parameters() \
                if any(keyword in name for keyword in ["bert.pooler", "tplinker"])], 'lr': tplinker_lr},
            {'params': [parameter for name, parameter in model.named_parameters() \
                if any(keyword in name for keyword in ["bert.encoder",])], 'lr': bert_lr},
            {'params': [parameter for name, parameter in model.named_parameters() \
                if any(keyword in name for keyword in ["bert.embeddings.",])], 'lr': 0.0}
        ]
        
        if dynamic_lr:
            lr_adjust_fn = [self.tplinker_lr_adjust, self.encoder_lr_adjust]
        else:
            lr_adjust_fn = [self.static_lr_func, self.static_lr_func]
        
        self.optomizer = AdamW(grouped_parameters, 1e-6, eps=1e-7)
        self.scheduler = LambdaLR(self.optomizer, lr_adjust_fn)
        
        return None

    def zero_grad(self):
        self.optomizer.zero_grad()
        
    def step(self):
        self.optomizer.step()
        
    def update_lr(self):
        self.scheduler.step()
        
    def get_lr(self):
        return self.scheduler.get_lr()

    @staticmethod
    def static_lr_func(epoch_num):
        return 1.0

    @staticmethod
    def tplinker_lr_adjust(epoch_num):
        scaler_gamma = (epoch_num-2)**2 + 1
        return scaler_gamma

    @staticmethod
    def encoder_lr_adjust(epoch_num):
        scaler_gamma = 0.0 if epoch_num < 5 else 10**sqrt((epoch_num-4))
        return scaler_gamma