import copy
import os
import pickle
import shutil
import random

import click
import torch
from torch import optim
from torch.utils.tensorboard import SummaryWriter
from tqdm import tqdm

from segmentation.dataset import Ds, MR_slice
from segmentation.network_ds import UNet_ds, Unet2D
from segmentation.dataset import apply_trans
from segmentation.utils import *


def worker_init_fn(worker_id):
    random.seed(worker_id)

def apply_trans_batch(image, mask, chn):
    B = image.shape[0]
    image = (image+1)/2
    for i in range(B):
        image_, mask_ = apply_trans(torch.cat([image[i], mask[i].unsqueeze(0)], dim=0)).split([chn, 1], dim=0)
        image[i], mask[i] = image_, mask_.squeeze(0).to(torch.long)

    image = (image-0.5)/0.5

    return image, mask

def repeat_dataloader(iterable):
    """ repeat dataloader """
    while True:
        for x in iterable:
            yield x


@click.command()
# train
@click.option('--n_train',                  help='N train in a row', type=int, default=3, show_default=True)
@click.option('--data_dir',                 help='Training data save', required=True, metavar='DIR')
@click.option('--out_dir',                  help='Training output save', metavar='DIR', default='.')
@click.option('--epochs',                   help='Number of epochs', type=int, default=300, show_default=True)
@click.option('--batch_size',               help='Batch size for training', type=int, default=10, show_default=True)
@click.option('--resolution',               help='Resolution of image', type=int, default=256, show_default=True)
@click.option('--split',                    help='Split of training, validation, test set', nargs=3, default=[7, 1, 2])
@click.option('--n_class',                  help='', type=int, default=16, show_default=True)
@click.option('--multi_class',              help='multi_class?', is_flag=True)
@click.option('--combine',                  help='combine 3,42 2,41?', is_flag=True)
@click.option('--aug',                      help='combine 3,42 2,41?', is_flag=True)
@click.option('--shot',                     help='how many shot training', type=int, default=500, show_default=True)
@click.option('--length',                   help='how many slices each sample', type=int, default=128, show_default=True)
@click.option('--w_steps',                  help='how many w project step', type=int, default=1000, show_default=True)
def train(
    n_train,
    epochs,
    batch_size,
    resolution,
    split,
    shot,
    multi_class,
    n_class,
    combine,
    aug,
    data_dir,
    out_dir,
    length,
    w_steps,
):

    # make dir =====================================================================
    # folder_dir = make_save_dir('./_/supervised', 'U-Net', shot, '')

    if not os.path.exists(out_dir):
        os.makedirs(out_dir)
        print(f'make output dir: {out_dir}')
    id = len(os.listdir(out_dir))
    folder_dir = os.path.join(out_dir, f'exp{id}-{shot}shot-reso{resolution}-aug{aug}-ds')

    assert not os.path.exists(folder_dir)
    os.mkdir(folder_dir)
    os.mkdir(os.path.join(folder_dir, 'visualization_ds'))
    os.mkdir(os.path.join(folder_dir, 'visualization_myds'))
    os.mkdir(os.path.join(folder_dir, 'checkpoint'))
    os.mkdir(os.path.join(folder_dir, 'runs'))
    print('make dir done!')

    chn = 1
    out_ch = n_class + 1
    sum_dice_ds = 0
    sum_dice_class_ds = [0]*(out_ch-1)
    sum_dice_myds = 0
    sum_dice_class_myds = [0]*(out_ch-1)
    print(f'image channel: {chn}')

    if 'CANDI' in data_dir:
        test_dir = 'data/OASIS-128-160-norm'
    elif 'OASIS' in data_dir:
        test_dir = 'data/CANDI-128-160-norm'
    condition = 0 if 'OASIS' in test_dir else 1

    split_length = len(os.listdir(os.path.join(test_dir, 'images')))//128
    samples = list(range(split_length))
    # init parameters ===============================================================
    criterion = nn.CrossEntropyLoss()

    # train for n_train ===============================================================
    for i_train in range(n_train):
        # seed = i_train*28
        torch.manual_seed(i_train)
        random.seed(28*i_train)
        writer = SummaryWriter(os.path.join(folder_dir, 'runs', f'{i_train}_train'))

        net = UNet_ds(chn, out_ch).cuda()

        ss_loader = DataLoader(
            Ds(
            data_dir=data_dir,
            sub='ss',
            aug=aug
            ), batch_size, shuffle=True, num_workers=8, pin_memory=True, drop_last=True,worker_init_fn=worker_init_fn
        )
        sd_loader = DataLoader(
            Ds(
            data_dir=data_dir,
            sub='sd',
            aug=aug
            ), batch_size, shuffle=True, num_workers=8, pin_memory=True, drop_last=True,worker_init_fn=worker_init_fn
        )

        print('train set len: ', ss_loader.__len__())

        test_loader = DataLoader(
            MR_slice(
                data_dir=test_dir,
                aug=False,
                sample=samples
            ),
            batch_size=batch_size, drop_last=False, shuffle=False, num_workers=8, pin_memory=True
        )
        print(f'test len: {test_loader.__len__()}')
        print(f'train on {data_dir}')
        print(f'test on {test_dir}')
        iters = ss_loader.__len__() // batch_size

        loaders = [repeat_dataloader(ss_loader), repeat_dataloader(sd_loader)]

        optimizer = optim.Adam(net.parameters(), lr=0.001, betas=(0.9, 0.999))
        # scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.5)
        scheduler = optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99)


        losses = [0, 0]
        with tqdm(range(epochs), initial=0, dynamic_ncols=True, smoothing=0.01, ascii=True) as pbar:
            for epoch in range(epochs):
                for i in range(iters):
                    for sub in range(2):
                        # prepare data
                        image, mask = next(loaders[sub])
                        image = image.cuda()
                        mask = mask.cuda()
                        image = (image-0.5)/0.5

                        pred = net(image, sub)
                        loss = criterion(pred, mask)

                        optimizer.zero_grad()
                        loss.backward()
                        optimizer.step()
                        losses[sub] = loss.item()

                    # log ======================================================================
                    pbar.set_description(' '.join([
                        f'[{i_train}/{n_train}]',
                        f'[{epoch}/{epochs}]',
                        f'[{i}/{iters}]',
                        f'loss0: {losses[0]:.3f}',
                        f'loss1: {losses[1]:.3f}',
                        f'lr: {round(scheduler.get_last_lr()[0], 6)}',
                    ]))

                    writer.add_scalar('loss0', losses[0], global_step=epoch)
                    writer.add_scalar('loss1', losses[1], global_step=epoch)
                    writer.add_scalar('lr', optimizer.param_groups[0]['lr'], global_step=epoch)
                
                scheduler.step()
                pbar.update(1)

        with open(os.path.join(folder_dir, 'checkpoint', f'{i_train}_best.pth'), 'wb') as f:
            pickle.dump({'net': copy.deepcopy(net).eval().requires_grad_(False).cpu()}, f)
            
        # dice, dice_std, dice_class = eval_dice_3d_ds(net, test_loader, True, folder_dir, i_train, multi_class, True, condition)

        # for d in dice_class:
        #     print(f'{d:.3f}', end=', ')
        # print(f'\ndice: {dice:.3f} std: {dice_std:.3f}\n')

    #     with open(os.path.join(folder_dir, 'test_INFO_ds.txt'), 'a') as log:
    #         # mdice
    #         for d in dice_class:
    #             log.write(f'{d:.3f}, ')
    #         log.write('\n')
    #         log.write(f'{dice:.3f}\n')

    #         sum_dice_ds += dice
    #         sum_dice_class_ds = [x+y for x,y in zip(sum_dice_class_ds, dice_class)]

    #         log.write(f'==============ds=============\n')

    #     dice, dice_std, dice_class = eval_dice_3d_myds(net, test_loader, True, folder_dir, i_train, multi_class, True, condition)
        
    #     with open(os.path.join(folder_dir, 'test_INFO_ds.txt'), 'a') as log:
    #         # mdice
    #         for d in dice_class:
    #             log.write(f'{d:.3f}, ')
    #         log.write('\n')
    #         log.write(f'{dice:.3f}\n')

    #         sum_dice_myds += dice
    #         sum_dice_class_myds = [x+y for x,y in zip(sum_dice_class_myds, dice_class)]

    #         log.write(f'==============myds=============\n')

        save_image(torch.cat([(image.repeat(1,3,1,1)+1)/2, visualize(mask), visualize(pred.argmax(1))]), f'{folder_dir}/show.png', nrow=batch_size)

    # with open(os.path.join(folder_dir, 'test_INFO_ds.txt'), 'a') as log:
    #     log.write(f'Mean :\n')
    #     sum_dice_class_ds = [x/n_train for x in sum_dice_class_ds]

    #     # mdice
    #     for d in sum_dice_class_ds:
    #         log.write(f'{d:.3f}, ')
    #     log.write('\n')
    #     log.write(f'{sum_dice_ds/n_train:.3f}\n')
    #     log.write(f'==============ds=============\n')

    #     log.write(f'Mean :\n')
    #     sum_dice_class_myds = [x/n_train for x in sum_dice_class_myds]

    #     # mdice
    #     for d in sum_dice_class_myds:
    #         log.write(f'{d:.3f}, ')
    #     log.write('\n')
    #     log.write(f'{sum_dice_ds/n_train:.3f}\n')
    #     log.write(f'==============myds=============\n')



if __name__ == "__main__":
    train()
#----------------------------------------------------------------------------
