import os
import pickle

import torch
from torch.utils.data.dataloader import DataLoader

from segmentation.dataset import Handmask, Triplet
from segmentation.utils import *


def get_UNet(
    data_dir,
    folder_dir,
    n_train=3,
    split=[72,8,20],
    length=128,
    w_steps=1000,
    combine=True,
    multi_class=True,
):

    import warnings
    warnings.filterwarnings('ignore')

    condition = 0 if 'OASIS' in data_dir else 1

    # folder_dir = os.path.join(out_dir, f'exp{id}-{shots}shot-{threshold}thres-{which_net}')
    # init dataaset ===============================================================
    # split 2:1 first, then choose n shot for training, ensure same test set with U-Net training
    split_length = len(os.listdir(os.path.join(data_dir, 'images')))//128
    split = [int(s) for s in split]
    split = [split_length*s//sum(split) for s in split]
    split[1] = split_length - split[0] - split[2]
    print(f'dataset split: {split}')

    out_ch = 17
    sum_dice = 0
    sum_dice_class = [0]*(out_ch-1)

    import random
    for i_train in range(n_train):
        torch.manual_seed(i_train)
        random.seed(28*i_train)
        samples = list(range(split_length))
        random.shuffle(samples)

        test_loader = DataLoader(
            Triplet(
                data_dir=data_dir,
                multi_class=multi_class,
                aug=False,
                sample=samples[split[0]+split[1]:split_length],
                combine=combine,
                length=length,
                w_steps=w_steps,
            ),
            batch_size=4, drop_last=False, shuffle=False, num_workers=4, pin_memory=True
        )

        print(f'val: {samples[split[0]:split[0]+split[1]]}\ntest: {samples[split[0]+split[1]:split_length]}')

        chn = 1
        print(f'image channel: {chn}')

        with torch.no_grad():
            with open(os.path.join(folder_dir, 'checkpoint', f'{i_train}_best.pth'), 'rb') as f:
                net = pickle.load(f)['net'].eval().requires_grad_(False).cuda()

            dice, dice_std, dice_class = eval_dice_3d(net, test_loader, True, folder_dir, i_train, multi_class, True, condition)

            for d in dice_class:
                print(f'{d:.3f}', end=', ')
            print(f'\ndice: {dice:.3f} std: {dice_std:.3f}\n')

        with open(os.path.join(folder_dir, 'INFO_ds.txt'), 'a') as log:
            log.write(f'mdice: [{dice:.5f}]\n')

        with open(os.path.join(folder_dir, 'test_INFO_ds.txt'), 'a') as log:
            # mdice
            for d in dice_class:
                log.write(f'{d:.3f}, ')
            log.write('\n')
            log.write(f'{dice:.3f}\n')

            sum_dice += dice
            sum_dice_class = [x+y for x,y in zip(sum_dice_class, dice_class)]

            log.write('=================================\n')

    with open(os.path.join(folder_dir, 'INFO_ds.txt'), 'a') as log:
        log.write(f'Mean test miou: [{sum_dice/n_train}]\n')

    with open(os.path.join(folder_dir, 'test_INFO_ds.txt'), 'a') as log:
        log.write('Mean:\n')
        sum_dice_class = [x/n_train for x in sum_dice_class]

        # mdice
        for d in sum_dice_class:
            log.write(f'{d:.3f}, ')
        log.write('\n')
        log.write(f'{sum_dice/n_train:.3f}\n')

if __name__ == "__main__":
    os.environ["CUDA_VISIBLE_DEVICES"] = '2'
    get_UNet(
        data_dir='data/CANDI-128-160-norm',
        folder_dir='save_seg/ds/_O-C-ns',
    )
    # get_UNet(
    #     data_dir='data/OASIS-128-160-norm',
    #     folder_dir='save_seg/supervised/exp22-74shot-reso160-augTrue'
    # )
#----------------------------------------------------------------------------
