import os
import torch
import time
import warnings
import parameters as p
import torchvision
from utils import *
# from model import Generator, Discriminator
from Csal.discriminator import Discriminator
from Csal.generator import Generator
from dataset import get_dataloader
from tensorboardX import SummaryWriter


##############################################################
warnings.filterwarnings("ignore", category=UserWarning)

def train(generator: Generator, critic: Discriminator,
          opt_critic: torch.optim.Optimizer,
          opt_gen: torch.optim.Optimizer,
          dataloader, device,
          epochs: int = p.EPOCHS,
          save_on_step: int = p.SAVE_ON_STEP,
          sample_on_step: int = p.SAMPLE_ON_STEP):

    if os.path.exists(p.MODEL_CHECKPOINT_PATH) and load_model(critic, generator,strict=False):
        print("################################## models loaded ##################################\n")
    else:
        pass
        # initialize_weights(critic)
        # initialize_weights(generator)
    if os.path.exists(p.OPTIMIZER_CHECKPOINT_PATH) and load_optimizer(opt_critic, opt_gen):
        print("################################## optimizers loaded ##################################\n")
    opt_critic.param_groups[0]['initial_lr'] = p.LEARNING_RATE
    opt_gen.param_groups[0]['initial_lr'] = p.LEARNING_RATE
    cv2.namedWindow('Sample', cv2.WINDOW_AUTOSIZE)
    # cv2.namedWindow('reals',cv2.WINDOW_AUTOSIZE)
    generator = generator.to(device)
    critic = critic.to(device)
    last_epoch = 0
    # opt_gen_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer=opt_gen,gamma=0.9,last_epoch=last_epoch,verbose=True)
    # opt_critic_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer=opt_critic,gamma=0.8,last_epoch=last_epoch,verbose=True)
    # writer_real = SummaryWriter(p._LOG_PATH + "\\GAN_ACG\\real")
    # writer_fake = SummaryWriter(p._LOG_PATH + "\\GAN_ACG\\fake")
    # step = 0
    #20230415 408 6e-4
    #20230416 768 3e-5
    print("################################## infomation ##################################")
    print(f"Batchs : {len(dataloader)}")
    print(f"Batch size : {p.BATCH_SIZE}")
    print(f"")
    print(f"")
    print('################################## ##################################\n')
   
    one = torch.tensor(1, dtype=torch.float,device=device)
    mone = (one * -1).to(device)

    # 41 20230417
    # 41- 91 20230418
    # 91 - 101 20230419
    # 102 - 116
    for epoch in range(113,epochs):
        start_time = time.time()
        print(f"epoch : {epoch} of {epochs}")
        loss_accumulator = {'critic': 0., 'generator': 0.,'gp':0.,'w_distance':0.}
        
        
        for batch_idx,(x, _) in enumerate(dataloader):
            real = x.to(device)
            
            for param in critic.parameters():
                param.requires_grad = True

            for __ in range(p.CRITIC_ITERATIONS):
                critic.zero_grad()

                critic_real = critic(real).mean()
                
                
                z = torch.randn([real.size(0),*p.LATENT_SINGLE_SHAPE], device=device)

                fake = generator(z)
                critic_fake = critic(fake).mean()
        
                gp = gradient_penalty(critic,real,fake,device=device)  

                loss_critic = critic_fake - critic_real + gp

                loss_critic.backward()

                opt_critic.step()
                  
            for param in critic.parameters():
                param.requires_grad = False

            # train generator
            for __ in range(p.GENERATOR_ITERATIONS):
                generator.zero_grad()

                z = torch.randn([real.size(0),*p.LATENT_SINGLE_SHAPE], device=device)
                fake = generator(z)        
                output = critic(fake).mean()
                output.backward(mone)
                opt_gen.step()
                loss_gen = - output
                

            loss_accumulator['generator'] += loss_gen.cpu().detach().item()
            loss_accumulator['critic'] += loss_critic.cpu().detach().item()
            loss_accumulator['gp'] += gp.cpu().detach().item()
            loss_accumulator['w_distance'] += (critic_real - critic_fake).cpu().detach().item()

            if batch_idx % 4 == 0 :
                critic_loss = loss_accumulator['critic'] / 4
                generator_loss = loss_accumulator["generator"] / 4
                avg_gp = loss_accumulator["gp"] / 4
                avg_w_d = loss_accumulator["w_distance"] / 4
                end_time = time.time()
                print(
            f"[critic loss : {critic_loss:<12}] [ generator loss : {generator_loss:>12}]\n [EM_distance : {avg_w_d}] [gp : {avg_gp}]({batch_idx+1}/{epoch+1})")
                print(f'time cost : {end_time - start_time }') 
                
                loss_accumulator = {'critic': 0., 'generator': 0.,'gp':0.,'w_distance':0.}
                start_time = time.time()

            if batch_idx % p.SAMPLE_ON_STEP == 0:
                sample_and_exhibit(winname='Sample',winname2='reals',generator=generator,device=device)
                # sample_and_exhibit('',winname2='reals',device=device,samples=real)
            if batch_idx != 0 and batch_idx %  p.SAVE_ON_STEP == 0:
                 
                if not os.path.exists(p._SAVE_PATH):
                    os.mkdir(p._SAVE_PATH)

                if save_model(D=critic, G=generator):
                    print(
                        "################################## models saved ##################################")
                else:
                    print("Fail to save models")
                if save_optimizer(opt_critic=opt_critic, opt_gen=opt_gen):
                    print(
                        "################################## optimizers saved ##################################")
                else:
                    print("Fail to save optimizers")
                    
            # if batch_idx != 0 and batch_idx % 512 == 0:
            #     opt_critic_scheduler.step()
            #     opt_gen_scheduler.step()
   
        ##########################################################

        

        ##########################################################


if __name__ == '__main__':
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    
    generator = Generator().to(device)
    discriminator = Discriminator().to(device)
    opt_critic = torch.optim.Adam([{'initial_lr': p.LEARNING_RATE,'params': discriminator.parameters(), }], 
                                  lr=p.LEARNING_RATE,betas=p.BETAS,)
    opt_gen = torch.optim.Adam([{'initial_lr': p.LEARNING_RATE,'params': generator.parameters()} ], 
                               lr=p.LEARNING_RATE,betas=p.BETAS,
                               )
    dataloader = get_dataloader(presistent_workers=True)
    train(generator=generator, critic=discriminator,
          opt_critic=opt_critic, opt_gen=opt_gen,
          dataloader=dataloader, device=device)

    # print(opt_gen.param_groups[0]['initial_lr'] )
    # torch.optim.lr_scheduler.ExponentialLR(opt_gen,gamma=0.4,last_epoch=21)
