import torch.optim

from utils.Save import SavingMethod
from utils.timeloader import getLoader
from params.CNN_ATTENTION_params import *
from models.CNN_ATTENTION import MODEL
import torch
def main():
    test_bearing = 5
    loader1 = getLoader('./learn_files',batch_size=batch_size,bearing_condition=bearing_condition,bearing_label=1,window=window)
    loader2 = getLoader('./learn_files',batch_size=batch_size,bearing_condition=bearing_condition,bearing_label=2,window=window)
    test_loader = getLoader('./test_files',batch_size=batch_size,bearing_condition=bearing_condition,bearing_label=test_bearing,window=window)

    model = MODEL(input_dim,num_hiddens,num_head,window).to('cuda')

    optimizer = torch.optim.Adam(model.parameters(),lr = lr,weight_decay=0.0001)
    loss_fun = torch.nn.L1Loss().to('cuda')

    Saving = SavingMethod(model_name=['./models/CNN_ATTENTION.py', './params/Attention_CNN_FC_params.py'])


    for epoch in range(epochs):
        total_loss = 0
        for i, bearing in enumerate([loader1, loader2]):
            for x, y, l in bearing:
                x, l = x.to('cuda'), l.to('cuda')
                l_pre = model(x)
                loss = loss_fun(l_pre, l)
                optimizer.zero_grad()
                loss.backward()
                optimizer.step()
                total_loss += loss
        print('epoch:{},bearing:{},loss:{}'.format(epoch,str(test_bearing),total_loss))
        Saving.savetrain_loss([str(epoch), '1', '12', '', '', str(total_loss)])
        Saving.savefig(model, test_loader, epoch)
        if epoch % 10 ==0:
            input('please input:')
            print('start')


if __name__ == '__main__':
    main()