#########################################################################################
import time
import parameters as p
from torch import nn
from torch import optim
from model import EncoderDecoder
from model import Transformer



d_model = p.D_MODEL
warm_up = p.LR_WARM_UP





def run_epoch(data_iter,model:nn.Module,loss_compute):
    start = time.time()
    total_tokens = 0
    total_loss = 0
    tokens = 0
    for i ,batch in enumerate(data_iter):
        out = model.forward(batch.src,batch.tgt,batch.src_mask,batch.tgt_mask)
        loss = loss_compute(out,batch.tgt_y,batch.ntokens)
        total_loss += loss
        total_tokens += batch.ntokens
        tokens += batch.ntokens
        if i % 50 == 1:
            elapsed = time.time() - start
            print(f"Epoch Step: {i} Loss: {loss / batch.ntokens} Tokens per Sec: {tokens / elapsed }")

            start = time.time()
            tokens = 0
    return total_loss / total_tokens

# 
class NoamOpt:
    def __init__(self,d_model:int,warm_up:int,optimizer,factor=1) -> None:
        self.d_model = d_model
        self.warm_up = warm_up
        self.optimizer = optimizer
        self.factor = factor

        self._step = 0
        self._rate = 0

    def step(self):
        self._step += 1
        rate = self.rate()

        for p in self.optimizer.param_groups:
            p['lr'] = rate
        self._rate = rate
        self.optimizer.step()
   

    def rate(self,step:int|None=None):
        
        if step is None:
            step = self._step

        return self.factor *\
              (self.d_model ** (-0.5) * min(step ** (-0.5) , step*self.warm_up ** (-1.5)))

def get_std_opt(model:EncoderDecoder):
    global d_model,warm_up
    return NoamOpt(d_model=d_model,warm_up=warm_up,
                   optimizer=optim.Adam(model.parameters(),lr=0,betas=(0.9,0.98),eps=1e-9),
                   factor=2
                   )


class LableSmoothing(nn.Module):
    def __init__(self,size, padding_idx, smoothing=0.0,*args, **kwargs) -> None:
        super().__init__(*args, **kwargs)



if __name__ == '__main__' :

    import matplotlib.pyplot as plt
    import numpy as np

    optims = [NoamOpt(512,4000,None,1),
              NoamOpt(512,8000,None,1),
              NoamOpt(512,4000,None,2)]
    
    plt.plot(np.arange(1,20000),[[opt.rate(i) for opt in optims] for i in range(1,20000)] )
    plt.legend(['4000:1','8000:1','4000:2'])
    plt.show()
