import os
import time

import torch
from torch import optim
from torch.utils.data import DataLoader
from tqdm import tqdm

from config import Config
from datasets import WaveDataSet
from models import WakeupModule
from offline import annotation

if __name__ == '__main__':
    if not os.path.exists("logs"):
        os.mkdir("logs")
    wakeup = WakeupModule(None if Config["new_training"] else Config["weight_path"]).train()
    # wakeup = RnnModule(None if Config["new_training"] else Config["weight_path"]).train()
    # summary(wakeup, input_size=(1, wakeup.wav_size))

    annotation('../data/train', "../data/annotation.txt")
    dst = WaveDataSet(ann_path="../data/annotation.txt", robust_flag=Config["robust_flag"])
    data_loader = DataLoader(dst, batch_size=Config["batch_size"], shuffle=True, pin_memory=True)

    epoch_num = Config["epoch_num"]
    lr = Config["lr"]

    optimizer = torch.optim.SGD(wakeup.parameters(), lr)
    lr_scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=1, gamma=Config["gamma"])

    for epoch in range(epoch_num):
        total_loss = 0
        epoch_time = time.time()
        with tqdm(total=len(data_loader), desc="Epoch %d/%d" % (epoch + 1, epoch_num), postfix=dict) as bar:
            for iteration, (_wave, _label) in enumerate(data_loader):
                if len(_wave) == 1:
                    break
                loss = wakeup.train_wave(_wave, _label, optimizer)
                total_loss += loss
                bar.set_postfix({'lr': lr_scheduler.get_last_lr(), 'total_loss': total_loss / bar.n})
                bar.update(1)

        epoch_time = time.time() - epoch_time
        print("Epoch [%d] Use time [%d] Loss: %.4f" % (epoch + 1, epoch_time, total_loss / bar.total))
        print('Saving state, iter:', str(epoch + 1))
        wakeup.save_weight('logs/Epoch%d_Total_Loss%.4f.pth' % ((epoch + 1), total_loss / bar.total))
        lr_scheduler.step()

    print('Finished!')
