import torch
from torch.utils.tensorboard import SummaryWriter
from torch import nn
from torch.utils.data import DataLoader
import torchvision
from config.Params import Params
from dataset.Parser import MotoAngleSolverDataset
from model.SimpleDense import AngleSolver
from modules.AverageMeter import AverageMeter
from tqdm import tqdm

class Trainer():
    
    def __init__(self):
        self.params = Params()
        
        # device
        self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

        # tensorboard
        self.writer = SummaryWriter(self.params.log_path)

        # dataset
        train_dataset = MotoAngleSolverDataset(path=self.params.train_dataset_path)
        test_dataset = MotoAngleSolverDataset(path=self.params.test_dataset_path)

        # dataloader
        self.train_dataloader = DataLoader(dataset=train_dataset,
                                      batch_size=self.params.batch_size,
                                      shuffle=True,
                                      num_workers=self.params.num_workers)
        self.test_dataloader = DataLoader(dataset=test_dataset,
                                      batch_size=self.params.batch_size,
                                      shuffle=True,
                                      num_workers=self.params.num_workers)
        
        # model prepare
        self.model = AngleSolver()
        self.model = self.model.to(self.device)

        # loss function
        self.mse_loss = nn.MSELoss()
        self.mse_loss = self.mse_loss.to(self.device)

        # optimizer
        self.optim = torch.optim.Adam(self.model.parameters(), lr=self.params.lr, weight_decay=self.params.w_decay)

        # scheduler
        self.scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(self.optim, 
                                                                    T_max=self.params.epoch, 
                                                                    eta_min=self.params.min_lr)


    def train(self):
        min_train_loss = 1000
        min_valid_loss = 1000

        for epoch in tqdm(range(self.params.epoch), "Train Process"):
            train_loss = self.train_epoch(epoch=epoch)
            if train_loss < min_train_loss:
                min_train_loss = train_loss
                torch.save(self.model, self.params.train_model_save_path)
            self.draw_board("Train Loss Curve", {"train loss": train_loss}, epoch)
            self.scheduler.step()

            if epoch % self.params.valid_epoch == 0:
                valid_loss = self.validate(epoch=epoch)
                if valid_loss < min_valid_loss:
                    min_valid_loss = valid_loss
                    torch.save(self.model, self.params.valid_model_save_path)
                self.draw_board("Valid Loss Curve", {"valid loss":valid_loss}, epoch)
        
        print("Finish training.")
    
    def train_epoch(self, epoch):
        self.model.train()
        loss_avg = AverageMeter()

        for i, (x, y) in enumerate(self.train_dataloader):
            x = x.to(self.device)
            y = y.to(self.device)
            out = self.model(x)
            
            loss = self.mse_loss(out, y)
            self.optim.zero_grad()
            loss.backward()
            self.optim.step()

            loss_avg.update(loss.mean().item(), x.shape[0])

            for g in self.optim.param_groups:
                lr = g["lr"]
            
            if i % self.params.report_batch == 0: 
                log = "Epoch:[{0}][{1}/{2}] | Loss:{loss.val:.4f}({loss.avg:.4f}) | Lr:{lr}".format(
                    epoch, i, len(self.train_dataloader), loss=loss_avg, lr=lr)
                if self.params.print_log:
                    print(log)
                self.save_to_log(log)

        return loss_avg.avg
        
    
    def validate(self, epoch):

        self.model.eval()
        loss_avg = AverageMeter()

        with torch.no_grad():
            for i, (x, y) in enumerate(self.test_dataloader):
                x = x.to(self.device)
                y = y.to(self.device)
                out = self.model(x)
                
                loss = self.mse_loss(out, y)
                loss_avg.update(loss.mean().item(), x.shape[0])

                for g in self.optim.param_groups:
                    lr = g["lr"]
            
        log = "Epoch:[{0}] | Loss:{loss.avg:.4f}".format(epoch, loss=loss_avg)
        if self.params.print_log:
            print(log)
        self.save_to_log(log)

        log = "*"*80
        self.save_to_log(log)
        if self.params.print_log:
            print(log)
        
        return loss_avg.avg

    def save_to_log(self, log:str):
        with open("./logs/printinfo.txt", 'a') as f:
            f.write("".join([log, "\n"]))
        
    def draw_board(self, title, y, x):
        self.writer.add_scalars(title, y, x)

    