# ==============
# === nnUNet ===
# ==============
from re import M
from batchgenerators.utilities.file_and_folder_operations import load_pickle, save_pickle
import json
import collections
import pathlib
import nibabel as nib
import numpy as np
from thesmuggler import smuggle
from tqdm.auto import tqdm
from rich.pretty import pprint as print
from sklearn.model_selection import KFold
import random
import numpy as np
import pickle
import seg_metrics.seg_metrics as sg


_SHIDAOAIDATA = '../unetr/src/data'
pp = smuggle(f'{_SHIDAOAIDATA}/pathparser.py')
op = smuggle(f'{_SHIDAOAIDATA}/operator.py')

_TASKNUM = '602'
_TASKNAME = 'Z2'

_MYROOT = '/home/yusongli/_dataset/shidaoai/img/_out/nn'
_MYDATA = f'{_MYROOT}/DATASET'
_MYMODEL = 'nnUNet'

BASE = f'{_MYDATA}/{_MYMODEL}_raw_data_base'
PREPROCESSING_OUTPUT_DIR = f'{_MYDATA}/{_MYMODEL}_preprocessed'
NETWORK_TRAINING_OUTPUT_DIR_BASE = f'{_MYDATA}/{_MYMODEL}_cropped_data'

_EMPTIES = [
    # # empties
    # 'lt_0827',
    # 'lt_0830',
    # 'lt_0718',
    # 'fxc_0077',
    # 'zc_0195',
    # 'zc_0017',
    # 'zc_0090',
    # 'lt_0747',
    # 'yx_0112',
    # 'fxc_0041',
    # 'lt_0544',
    # 'ly_0123',
    # # dicezeros
    # "lt_0123",
    # "lt_0826",
    # "yx_0045",
    # "lt_0077"
    # metric infinity
    'lt_0784'
]


def nn_splits_final() -> None:
    """
    Split the train_val data into training, validation set.
    """
    with open(f'{_MYROOT}/n_to_s.json', 'r') as f:
        n_to_s = json.load(f)
    pkl = f'{PREPROCESSING_OUTPUT_DIR}/Task{_TASKNUM}_{_TASKNAME}/splits_final.pkl'
    obj = load_pickle(pkl)
    obj = [
        {
            'train': [f'Z2_{item}' for item in list(n_to_s['training'])],
            'val': [f'Z2_{item}' for item in list(n_to_s['validation'])],
        }
    ]
    save_pickle(obj, pkl)


def dice(
    small_pred_folder,
    big_gt_folder,
    start_positions,
    val_list,
):
    """_summary_

    Args:
        small_pred_folder (str): e.g, '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_cropped_data/nnUNet/3d_fullres/Task607_CZ2/new_fine_yunet__nnUNetPlansv2.1/fold_0/validation_raw_postprocessed'
        big_gt_folder (str): e.g, '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C/labelsTr'
        start_positions (str): the path of the 'start_positions.txt'
        val_list (list): the list of validation set. e.g, ['zc_0017', 'fxc_0077', 'lt_0718', ...]
    """
    small_pred_folder = pathlib.Path(pp.exists(small_pred_folder))
    big_gt_folder = pathlib.Path(pp.exists(big_gt_folder))
    start_positions = pathlib.Path(pp.exists(start_positions))

    with open(start_positions, 'r') as f:
        start_positions = f.read()
    start_positions = start_positions.split('\n')
    start_positions = {item.split('|')[0]: list(map(lambda x: int(x), item.split('|')[1].split('+'))) for item in start_positions if item != ''}

    results = {}

    # ? get id from start_positions.txt
    for item in tqdm(val_list):
        sp = start_positions[item]
        # if item in _EMPTIES:
        #     continue

        # ? get small pred array
        small_pred_path = small_pred_folder / f'{item}.nii.gz'
        small_pred_arr = nib.load(small_pred_path).get_fdata()

        # ? get big_gt_path by (n_to_s.json, id) and string concat.
        # big_gt_path = pathlib.Path(big_gt_folder) / f'{item}.nii.gz'
        big_gt_path = next(big_gt_folder.glob(f'*{item}*'))

        # ? get big_gt_arr by big_gt_path.
        big_gt_arr = nib.load(big_gt_path).get_fdata()

        # ? get big_pred_arr from (small_pred_arr, start_position, big_gt_arr.shape)
        big_pred_arr = op._embed_clone(small_pred_arr, big_gt_arr.shape, sp)

        # ? get dice between big_pred_arr and big_gt_arr.
        # ! <<< open debug yusongli
        # if results.get('dice') is None:
        #     results['dice'] = []
        # result = op._dice(big_gt_arr, big_pred_arr)
        # results['dice'].append(result)
        # ! ===
        result = sg.write_metrics(labels=[1], gdth_img=big_gt_arr, pred_img=big_pred_arr, verbose=False, metrics=None)[0]
        def temp():
            for key in result:
                if results.get(key) is None:
                    results[key] = []
                results[key].append(result[key][0])
        temp()
        # ! >>> clos debug

    return results


def unetr_yaml_z2():
    folder = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task602_Z2'
    d = []
    tr = f'{folder}/start_positions_Tr.txt'
    ts = f'{folder}/start_positions_Ts.txt'


def nn_dice_roi():
    folders = [
        '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_cropped_data/nnUNet/3d_fullres/Task607_CZ2/new_fine_nyunet11_epoch500__nnUNetPlansv2.1/fold_0/validation_raw_postprocessed'
    ]
    big_gt_folder = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C/labelsTr'
    start_positions = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task607_CZ2/start_positions.txt'
    val_list = load_pickle('/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_preprocessed/Task607_CZ2/splits_final.pkl')[0]['val']

    txt = False

    if txt:
        recorder = []

    for folder in tqdm(folders):
        counter = op.MyCounter()
        results = dice(folder, big_gt_folder, start_positions, val_list)
        for i, result in enumerate(results['dice']):
            counter.set(result, val_list[i])
        if txt:
            recorder.append(results)

        results = {k: np.mean(v) for k, v in results.items()}
        print(f"{pathlib.Path(folder).parents[1].name.split('__')[0]} | {results}")
        print(f'{counter.get()}')

    if txt:
        with open('/home/yusongli/dices.txt', 'w') as f:
            f.seek(0)
            f.truncate()
            for i in range(len(recorder[0]['dice'])):
                def temp():
                    # ? Specificlly write samples which dice is zero.
                    # for item in recorder:
                    #     if float(item['dice'][i]) != 0.0:
                    #         return
                    string = f'{str(val_list[i])} | '
                    string += '  '.join(f"{float(recorder[j]['dice'][i]):.4f}" for j in range(len(recorder)))
                    f.write(f'{string}\n')
                    f.flush()
                temp()


def nn_dice_big():
    pred_folders = [
        pathlib.Path('/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_cropped_data/nnUNet/3d_fullres/Task606_C/new_coarse_foldall__nnUNetPlansv2.1/fold_3/validation_raw_postprocessed')
    ]
    gt_folder = pathlib.Path('/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C/labelsTr')
    with open('/home/yusongli/Documents/nnUNet/fold3.json', 'w') as f:
        for pred_folder in pred_folders:
            counter = op.MyCounter()
            dices, dicezeros, empties = [], [], []
            for pred_path in tqdm(list(pred_folder.glob('*nii*'))):
                idx = pred_path.name.split('.')[0]
                pred_arr = nib.load(pred_path).get_fdata()
                gt_path = next(gt_folder.glob(f'*{idx}*'))
                dice = op._dice(pred_arr, gt_path)
                counter.set(dice, idx)
                dices.append(dice)

                # 1. Empty prediction
                empty = False
                if pred_arr.sum() == 0.0:
                    empties.append(idx)
                    empty = True

                # 2. Not empty but dice is zero (totally wrong predict location)
                if not empty and dice == 0.0:
                    dicezeros.append(idx)

            print('=' * 50)
            print(f'Mean: {sum(dices) / len(dices)}')
            print(f'{counter.get()}')
            json.dump({'mean_dice': (sum(dices) / len(dices)), 'empties' : empties, 'dicezeros' : dicezeros}, f)


def nn_empty_data():
    folders = [
        # '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_cropped_data/nnUNet/3d_fullres/Task607_CZ2/new_fine_nnunet_fp32_epoch500_04__nnUNetPlansv2.1/fold_0/validation_raw_postprocessed',
        pathlib.Path('/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_cropped_data/nnUNet/3d_fullres/Task606_C/new_coarse__nnUNetPlansv2.1/fold_0/validation_raw_postprocessed')
    ]
    for folder in tqdm(folders):
        folder = pathlib.Path(folder)
        empties = []
        for item in tqdm(list(folder.glob('*nii*'))):
            item_arr = nib.load(item).get_fdata()
            if item_arr.sum() == 0.0:
                empties.append(pathlib.Path(item).name)
        print(f'{pathlib.Path(folder).parents[1].name}')
        print(empties)


def nn_split(folder: str, splits_final_filepath: str, folds=5) -> None:
    random.seed(0)
    idxs = list(map(lambda x: x.name.split('.')[0], list(pathlib.Path(folder).glob('**/*label*/*nii*'))))
    random.shuffle(idxs)
    d = collections.defaultdict(list)
    for idx in idxs:
        who = idx.split('_')[0]
        d[who].append(idx)
    kf = KFold(n_splits=folds)
    for key in d:
        temp = []
        for pair in kf.split(d[key]):
            train = [d[key][j] for j in pair[0]]
            valid = [d[key][k] for k in pair[1]]
            temp.append((train, valid))
        d[key] = temp
    splits = [collections.defaultdict(list) for _ in range(folds)]
    for i in range(len(splits)):
        for key in d:
            splits[i]['train'].extend(d[key][i][0])
            splits[i]['val'].extend(d[key][i][1])
    # ! <<< Fix offset
    for f in range(folds):
        print((f, len(splits[f]['train']), len(splits[f]['val'])))
    print('=' * 10)
    splits[0]['train'].append(splits[0]['val'].pop())
    splits[-1]['val'].append(splits[-1]['train'].pop())
    for f in range(folds):
        print((f, len(splits[f]['train']), len(splits[f]['val'])))
    # ! >>>
    with open(splits_final_filepath, 'wb') as f:
        pickle.dump(splits, f)

def temp():
    splits_final_filepath = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C/splits_final_5fold.pkl'
    folder = pathlib.Path('/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_cropped_data/nnUNet/3d_fullres/Task606_C/new_coarse_foldall__nnUNetPlansv2.1/fold_1/validation_raw')
    with open(splits_final_filepath, 'rb') as f:
        s = pickle.load(f)
    s = s[1]['val']
    for path in folder.glob('*nii*'):
        idx = path.name.split('.')[0]
        if idx not in s:
            pkl = pathlib.Path(f'{path.parent}/{idx}.pkl')
            npz = pathlib.Path(f'{path.parent}/{idx}.npz')
            path.unlink(missing_ok=True)
            pkl.unlink(missing_ok=True)
            npz.unlink(missing_ok=True)


def check_dataset():
    labels = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C/labelsTr'
    labels = pathlib.Path(labels)
    counter = op.MyCounter()
    for label in tqdm(labels.iterdir(), total=len(list(labels.iterdir()))):
        arr = nib.load(label).get_fdata()
        axis_sums = arr.sum(axis=0).sum(axis=1)
        for i, axis_sum in enumerate(axis_sums):
            counter.set(axis_sum, f'{label} at y index {i}.')
    print(counter.get())


if __name__ == '__main__':
    # nn_dice_roi()
    # nn_empty_data()
    # nn_dice_big()

    # folder = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C'
    # splits_final_filepath = '/home/yusongli/_dataset/shidaoai/img/_out/nn/DATASET/nnUNet_raw_data_base/nnUNet_raw_data/Task606_C/splits_final_5fold.pkl'
    # nn_split(folder, splits_final_filepath, folds=5)
    check_dataset()
