import time
import copy
import torch
from options.base_options import BaseOptions
from data import CreateDataLoader
from models import create_model
from util.visualizer import Visualizer
import tensorboardX
import os
if __name__ == '__main__':
    # training dataset
    opt = BaseOptions().parse()
    print('>>>>>lambda_L1:', opt.lambda_L1)
    data_loader = CreateDataLoader(opt)
    dataset = data_loader.load_data()
    dataset_size = len(data_loader)
    opt.dataset_size = dataset_size

    writer = tensorboardX.SummaryWriter(logdir=os.path.join(opt.checkpoints_dir, opt.name))
    
    print('#training images = %d' % dataset_size)

    # model
    model = create_model(opt)
    model.setup(opt)
    
    load_path = "./checkpoints/RGBTextures/room1-TEX2048--20220908-234749/3_texture.pth"
    model.load_networks(load_path)
    
    visualizer = Visualizer(opt)
    total_steps = 0

    for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1):
        epoch_start_time = time.time()
        visualizer.reset()

        epoch_iter = 0 # iterator within an epoch
        epoch_loss = 0.0
        model.reset_gradients()
        for i, data in enumerate(dataset): 
            iter_start_time = time.time()
            model.set_input(data)
            # model.compute_gradients(epoch, epoch_iter)
            model.optimize_parameters(epoch, epoch_iter)
            total_steps += opt.batch_size
            epoch_iter += opt.batch_size
            iter_loss = model.loss_G_total.detach().cpu()
            epoch_loss += iter_loss

            loss_G_L1 = model.loss_G_L1.detach().cpu()
            loss_G_L1_Diff = model.loss_G_L1_Diff.detach().cpu()
            loss_G_VGG = model.loss_G_VGG.detach().cpu()
            loss_G_TexReg = model.loss_G_TexReg.detach().cpu()

            writer.add_scalar('TotalLoss', iter_loss, total_steps)
            writer.add_scalar('loss_G_L1', loss_G_L1, total_steps)
            writer.add_scalar('loss_G_L1_Diff', loss_G_L1_Diff, total_steps)
            writer.add_scalar('loss_G_VGG', loss_G_VGG, total_steps)
            writer.add_scalar('loss_G_TexReg', loss_G_TexReg, total_steps)
            losses = {"TotalLoss": iter_loss, "loss_G_L1":loss_G_L1,"loss_G_L1_Diff":loss_G_L1_Diff,"loss_G_VGG":loss_G_VGG,"loss_G_TexReg":loss_G_TexReg}
            visualizer.print_current_losses(epoch, i, losses, time.time() - iter_start_time)
        epoch_loss /= dataset_size
        # model.step_gradients()
        model.adjust_learning_rate(epoch,opt.lr)
        model.update_learning_rate()
        losses = {"EpochLoss": epoch_loss}
        # visualizer.plot_current_losses(epoch, 0.0, opt, losses)
        

        if epoch % opt.save_epoch_freq == 0:
            model.WriteTextureToFile(opt.results_dir + 'texture_'+str(epoch)+'.png')

        if epoch % opt.save_epoch_freq == 0:
            print('saving the model at the end of epoch %d, iters %d' % (epoch, total_steps))
            model.save_networks('latest')
            model.save_networks(epoch)

        print('End of epoch %d / %d \t Time Taken: %d sec' %
              (epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
