import os
import random
import time
from copy import deepcopy
from glob import glob

import numpy as np
import torch
from scipy.ndimage.filters import gaussian_filter
from torch.utils.data import Dataset

from utils.ShangJiao_Utils import load_itk_image, lumTrans, load_pickle


class VesselData(Dataset):
    """
	Generate dataloader
	"""

    def __init__(self, config, phase='train', split_comber=None, debug=False, random_select=False):
        """
		:param config: configuration from model
		:param phase: training or validation or testing
		:param split_comber: split-combination-er
		:param debug: debug mode to check few data
		:param random_select: use partly, randomly chosen data for training
		"""
        assert (phase == 'train' or phase == 'val' or phase == 'test')
        self.phase = phase
        self.augtype = config['augtype']
        self.split_comber = split_comber
        self.rand_sel = random_select
        self.patch_per_case = 5  # patches used per case
        self.debug_flag = debug
        """
		specify the paths
		"""
        self.rawpath = config['rawpath']
        self.refpath = config['refpath']
        self.airwaypath = config['airwaypath']
        self.lungairwaypath = config['lungairwaypath']
        self.fullpath = config['fullpath']
        self.dataset = load_pickle(config['dataset_split'])

        print("-------------------------Load all data into memory---------------------------")
        """
		count the number of cases
		"""
        labellist = []
        cubelist = []
        self.caseNumber = 0
        allimgdata_memory = {}
        alllabeldata_memory = {}
        allcontext_memory = {}
        alldt_memory = {}

        if self.phase == 'train':
            data_file_names = self.dataset['train']
            file_num = len(data_file_names)
            if self.debug_flag:
                data_file_names = data_file_names[:1]
                file_num = len(data_file_names)
            self.caseNumber += file_num

            print("total %s case number: %d" % (self.phase, self.caseNumber))

            for data_name in data_file_names:
                raw_path = glob(os.path.join(self.rawpath, data_name + '*.mhd'))
                assert (len(raw_path) == 1)
                raw_path = raw_path[0]
                assert (os.path.exists(raw_path) == True)
                label_path = os.path.join(self.fullpath, data_name + '_fullAnnotations.mhd')
                if not os.path.exists(label_path):
                    label_path = os.path.join(self.refpath, data_name + '_semiAuto_results.mhd')
                assert (os.path.exists(label_path) == True)

                # load original CT image
                imgs, origin, spacing = load_itk_image(raw_path)
                imgs = lumTrans(imgs)
                splits, nzhw, orgshape = self.split_comber.split_id(imgs)
                print("Name: %s, # of splits: %d" % (data_name, len(splits)))
                allimgdata_memory[data_name] = [imgs, origin, spacing]

                # load artery-vein label
                labels, _, _ = load_itk_image(label_path)
                # only 4 labels: 0 for bg, 1 for artery, 2 for vein, 3 for non-label
                labels[labels < 0] = 3
                labels[labels > 3] = 3
                alllabeldata_memory[data_name] = labels

                # load airway context map
                awlabel = glob(os.path.join(self.lungairwaypath, data_name + '*mask.nii.gz'))
                assert (len(awlabel) == 1)
                awlabel = awlabel[0]
                curairway, _, _ = load_itk_image(awlabel)
                allcontext_memory[data_name] = curairway

                # load airway distance transform map
                awdt = glob(os.path.join(self.airwaypath, data_name + '*-pred-dilation-dt.nii.gz'))
                assert (len(awdt) == 1)
                awdt = awdt[0]
                curairwaydt, _, _ = load_itk_image(awdt)
                alldt_memory[data_name] = curairwaydt

                cube_train = []

                for j in range(len(splits)):
                    """
					check if this cube is suitable
					"""
                    cursplit = splits[j]
                    labelcube = labels[cursplit[0][0]:cursplit[0][1], cursplit[1][0]:cursplit[1][1],
                                cursplit[2][0]:cursplit[2][1]]
                    curnumlabel = np.sum(labelcube == 1) + np.sum(labelcube == 2)

                    labellist.append(curnumlabel)
                    if curnumlabel > 0:  # filter out those zero-0 labels
                        curlist = [data_name, cursplit, j, nzhw, orgshape, 'Y']
                        cube_train.append(curlist)

                random.shuffle(cube_train)

                if self.rand_sel:
                    """
					only chooses random number of patches for training
					"""
                    cubelist.append(cube_train)
                else:
                    cubelist += cube_train

        else:
            if self.phase == 'val':
                data_file_names = self.dataset['val']
                file_num = len(data_file_names)
                if self.debug_flag:
                    data_file_names = data_file_names[:1]
                    file_num = len(data_file_names)
            else:
                data_file_names = self.dataset['test']
                file_num = len(data_file_names)

            # in case of memory limit, split into testing subsets
            # if self.debug_flag==0:
            # 	data_file_names = data_file_names[:2]
            # elif self.debug_flag==1:
            # 	data_file_names = data_file_names[2:4]
            # elif self.debug_flag==2:
            # 	data_file_names = data_file_names[4:6]
            # elif self.debug_flag==3:
            # 	data_file_names = data_file_names[6:8]
            # else:
            # 	data_file_names = data_file_names[8:]

            self.caseNumber += file_num
            print("total %s case number: %d" % (self.phase, self.caseNumber))

            for data_name in data_file_names:
                raw_path = glob(os.path.join(self.rawpath, data_name + '*.mhd'))
                assert (len(raw_path) == 1)
                raw_path = raw_path[0]
                assert (os.path.exists(raw_path) is True)

                label_path = os.path.join(self.fullpath, data_name + '_fullAnnotations.mhd')
                if not os.path.exists(label_path):
                    label_path = os.path.join(self.refpath, data_name + '_semiAuto_results.mhd')
                assert (os.path.exists(label_path) is True)

                # load original CT image
                imgs, origin, spacing = load_itk_image(raw_path)
                imgs = lumTrans(imgs)
                splits, nzhw, orgshape = self.split_comber.split_id(imgs)
                allimgdata_memory[data_name] = [imgs, origin, spacing]
                print("Name: %s, # of splits: %d" % (data_name, len(splits)))

                # load artery-vein label
                labels, _, _ = load_itk_image(label_path)
                # only 4 labels: 0 for bg, 1 for artery, 2 for vein, 3 for non-label
                labels[labels < 0] = 3.
                labels[labels > 3] = 3.
                alllabeldata_memory[data_name] = labels

                # load airway context map
                awlabel = glob(os.path.join(self.lungairwaypath, data_name + '*mask.nii.gz'))
                assert (len(awlabel) == 1)
                awlabel = awlabel[0]
                curairway, _, _ = load_itk_image(awlabel)
                allcontext_memory[data_name] = curairway

                # load airway distance transform map
                awdt = glob(os.path.join(self.airwaypath, data_name + '*-pred-dilation-dt.nii.gz'))
                assert (len(awdt) == 1)
                awdt = awdt[0]
                curairwaydt, _, _ = load_itk_image(awdt)
                alldt_memory[data_name] = curairwaydt

                for j in range(len(splits)):
                    cursplit = splits[j]
                    curlist = [data_name, cursplit, j, nzhw, orgshape, 'N']
                    cubelist.append(curlist)

        self.allimgdata_memory = allimgdata_memory
        self.alllabeldata_memory = alllabeldata_memory
        self.allcontext_memory = allcontext_memory
        self.alldt_memory = alldt_memory

        if self.rand_sel and self.phase == 'train':
            assert (len(cubelist) == self.caseNumber)
            mean_labelnum = np.mean(np.array(labellist))
            print('mean label number: %d' % (mean_labelnum))
            print('total patches: ', self.patch_per_case * self.caseNumber)

        random.shuffle(cubelist)
        self.cubelist = cubelist

        print('---------------------Initialization Done---------------------')
        print('Phase: %s total cubelist number: %d' % (self.phase, len(self.cubelist)))
        print()

    def __len__(self):
        """
		:return: length of the dataset
		"""
        if self.phase == 'train' and self.rand_sel:
            return self.patch_per_case * self.caseNumber
        else:
            return len(self.cubelist)

    def __getitem__(self, idx):
        """
		:param idx: index of the batch
		:return: wrapped data tensor and name, shape, origin, etc.
		"""
        t = time.time()
        np.random.seed(int(str(t % 1)[2:7]))  # seed according to time

        if self.phase == 'train' and self.rand_sel:
            caseID = idx // self.patch_per_case
            caseSplit = self.cubelist[caseID]
            np.random.shuffle(caseSplit)
            curlist = caseSplit[0]
            assert (len(curlist) == 6)
        else:
            curlist = self.cubelist[idx]

        curNameID = curlist[0]
        cursplit = curlist[1]
        curSplitID = curlist[2]
        curnzhw = curlist[3]
        curShapeOrg = curlist[4]
        curtransFlag = curlist[5]

        if self.phase == 'train' and curtransFlag == 'Y' and self.augtype['split_jitter'] is True:
            # random jittering during the training
            cursplit = augment_split_jittering(cursplit, curShapeOrg)

        # print("curname ID: ", curNameID)
        imginfo = self.allimgdata_memory[curNameID]
        imgs, origin, spacing = imginfo[0], imginfo[1], imginfo[2]

        curairway = self.allcontext_memory[curNameID]
        curlungregion = (curairway == 3)
        curlungregion = curlungregion.astype('float')
        curairwaydt = self.alldt_memory[curNameID]
        # awdt_max = np.amax(curairwaydt)
        curairwaydt = (curairwaydt.astype('float')) / 255.0
        curairwaydt *= curlungregion  # only mask the lung field region of distance map
        ####################################################################
        curcube = imgs[cursplit[0][0]:cursplit[0][1], cursplit[1][0]:cursplit[1][1], cursplit[2][0]:cursplit[2][1]]
        curairway = curairway[cursplit[0][0]:cursplit[0][1], cursplit[1][0]:cursplit[1][1],
                    cursplit[2][0]:cursplit[2][1]]
        curairwaydt = curairwaydt[cursplit[0][0]:cursplit[0][1], cursplit[1][0]:cursplit[1][1],
                      cursplit[2][0]:cursplit[2][1]]
        ####################################################################
        curcube = (curcube.astype(np.float32)) / 255.0
        curairway = curairway.astype('float')
        ####################################################################
        ###calculate the coordinate for coordinate-aware convolution
        start = [float(cursplit[0][0]), float(cursplit[1][0]), float(cursplit[2][0])]
        normstart = ((np.array(start).astype('float') / np.array(curShapeOrg).astype('float')) - 0.5) * 2.0
        crop_size = [curcube.shape[0], curcube.shape[1], curcube.shape[2]]
        stride = 1.0
        normsize = (np.array(crop_size).astype('float') / np.array(curShapeOrg).astype('float')) * 2.0
        xx, yy, zz = np.meshgrid(np.linspace(normstart[0], normstart[0] + normsize[0], int(crop_size[0])), \
                                 np.linspace(normstart[1], normstart[1] + normsize[1], int(crop_size[1])), \
                                 np.linspace(normstart[2], normstart[2] + normsize[2], int(crop_size[2])),
                                 indexing='ij')
        coord = np.concatenate([xx[np.newaxis, ...], yy[np.newaxis, ...], zz[np.newaxis, :]], 0).astype('float')
        assert (coord.shape[0] == 3)
        ####################################################################
        label = self.alllabeldata_memory[curNameID]
        label = label.astype('float')
        label = label[cursplit[0][0]:cursplit[0][1], cursplit[1][0]:cursplit[1][1], cursplit[2][0]:cursplit[2][1]]
        ####################################################################
        curNameID = [curNameID]
        curSplitID = [curSplitID]
        curnzhw = np.array(curnzhw)
        curShapeOrg = np.array(curShapeOrg)
        #####################################################################
        if self.phase == 'train' and curtransFlag == 'Y':
            curcube, label, coord, curairway, curairwaydt = augment(curcube, label, coord, curairway, curairwaydt, \
                                                                    ifflip=self.augtype['flip'],
                                                                    ifswap=self.augtype['swap'], \
                                                                    ifsmooth=self.augtype['smooth'],
                                                                    ifjitter=self.augtype['jitter'])

        # process the label
        # Background: 0
        # Artery: 1
        # Vein: 2
        # Non-label: 3
        labelbw = (label > 0)
        labelbw = labelbw.astype('float')
        mask = deepcopy(label)
        mask = (mask != 3)
        assert (mask.shape[1] == label.shape[1] and mask.shape[0] == label.shape[0] and mask.shape[2] == label.shape[2])
        mask = mask.astype('float')
        label[label == 3] = 0.

        curcube = curcube[np.newaxis, ...]
        curairway = curairway[np.newaxis, ...]
        curairwaydt = curairwaydt[np.newaxis, ...]
        # concatenate context transform map and distance transform map with ct image as inputs
        curcube = np.concatenate([curcube, curairway, curairwaydt], axis=0)
        label = label[np.newaxis, ...]
        labelbw = labelbw[np.newaxis, ...]
        mask = mask[np.newaxis, ...]

        return torch.from_numpy(curcube).float(), torch.from_numpy(label).long(), \
               torch.from_numpy(coord).float(), torch.from_numpy(labelbw).float(), \
               torch.from_numpy(mask).float(), torch.from_numpy(curairway).float(), \
               torch.from_numpy(origin), torch.from_numpy(spacing), curNameID, curSplitID, \
               torch.from_numpy(curnzhw), torch.from_numpy(curShapeOrg)


def augment_split_jittering(cursplit, curShapeOrg):
    # orgshape [z, h, w]
    zstart, zend = cursplit[0][0], cursplit[0][1]
    hstart, hend = cursplit[1][0], cursplit[1][1]
    wstart, wend = cursplit[2][0], cursplit[2][1]
    curzjitter, curhjitter, curwjitter = 0, 0, 0
    if zend - zstart <= 3:
        jitter_range = (zend - zstart) * 32
    else:
        jitter_range = (zend - zstart) * 2
    # print("jittering range ", jitter_range)
    jitter_range_half = jitter_range // 2

    t = 0
    while t < 10:
        if zstart == 0:
            curzjitter = int(np.random.rand() * jitter_range)
        elif zend == curShapeOrg[0]:
            curzjitter = -int(np.random.rand() * jitter_range)
        else:
            curzjitter = int(np.random.rand() * jitter_range) - jitter_range_half
        t += 1
        if (curzjitter + zstart >= 0) and (curzjitter + zend < curShapeOrg[0]):
            break

    t = 0
    while t < 10:
        if hstart == 0:
            curhjitter = int(np.random.rand() * jitter_range)
        elif hend == curShapeOrg[1]:
            curhjitter = -int(np.random.rand() * jitter_range)
        else:
            curhjitter = int(np.random.rand() * jitter_range) - jitter_range_half
        t += 1
        if (curhjitter + hstart >= 0) and (curhjitter + hend < curShapeOrg[1]):
            break

    t = 0
    while t < 10:
        if wstart == 0:
            curwjitter = int(np.random.rand() * jitter_range)
        elif wend == curShapeOrg[2]:
            curwjitter = -int(np.random.rand() * jitter_range)
        else:
            curwjitter = int(np.random.rand() * jitter_range) - jitter_range_half
        t += 1
        if (curwjitter + wstart >= 0) and (curwjitter + wend < curShapeOrg[2]):
            break

    if (curzjitter + zstart >= 0) and (curzjitter + zend < curShapeOrg[0]):
        cursplit[0][0] = curzjitter + zstart
        cursplit[0][1] = curzjitter + zend

    if (curhjitter + hstart >= 0) and (curhjitter + hend < curShapeOrg[1]):
        cursplit[1][0] = curhjitter + hstart
        cursplit[1][1] = curhjitter + hend

    if (curwjitter + wstart >= 0) and (curwjitter + wend < curShapeOrg[2]):
        cursplit[2][0] = curwjitter + wstart
        cursplit[2][1] = curwjitter + wend
    # print ("after ", cursplit)
    return cursplit


def augment(sample, label, coord=None, curairway=None, curairwaydt=None, ifflip=True, ifswap=False, ifsmooth=False,
            ifjitter=False):
    """
	:param sample, the cropped sample input
	:param label, the corresponding sample ground-truth
	:param coord, the corresponding sample coordinates
	:param curairway, the corresponding airway context map
	:param curairwaydt, the corresponding airway distance transform map
	:param ifflip, flag for random flipping
	:param ifswap, flag for random swapping
	:param ifsmooth, flag for Gaussian smoothing on the CT image
	:param ifjitter, flag for intensity jittering on the CT image
	:return: augmented training samples
	"""
    if ifswap:
        if sample.shape[0] == sample.shape[1] and sample.shape[0] == sample.shape[2]:
            axisorder = np.random.permutation(3)
            sample = np.transpose(sample, axisorder)
            label = np.transpose(label, axisorder)
            curairway = np.transpose(curairway, axisorder)
            curairwaydt = np.transpose(curairwaydt, axisorder)
            coord = np.transpose(coord, np.concatenate([[0], axisorder + 1]))

    if ifflip:
        flipid = np.random.randint(2) * 2 - 1
        sample = np.ascontiguousarray(sample[:, :, ::flipid])
        label = np.ascontiguousarray(label[:, :, ::flipid])
        curairway = np.ascontiguousarray(curairway[:, :, ::flipid])
        curairwaydt = np.ascontiguousarray(curairwaydt[:, :, ::flipid])
        coord = np.ascontiguousarray(coord[:, :, :, ::flipid])

    prob_aug = random.random()
    if ifjitter and prob_aug > 0.8:
        '''
		DATA AUGMENTATION LUMEN INTENSITY JITTERING
		'''
        ADD_INT = (np.random.rand(sample.shape[0], sample.shape[1], sample.shape[2]) * 2 - 1) * 10
        ADD_INT = ADD_INT.astype('float')
        label_valid = (label > 0.)
        label_valid = label_valid.astype('float')
        cury_roi = label_valid * ADD_INT / 255.0
        sample += cury_roi
        sample[sample < 0] = 0
        sample[sample > 1] = 1

    prob_aug = random.random()
    if ifsmooth and prob_aug > 0.8:
        sigma = np.random.rand()
        '''
		DATA AUGMENTATION GAUSSION NOISE
		'''
        if sigma > 0.5:
            sample = gaussian_filter(sample, sigma=1.0)

    return sample, label, coord, curairway, curairwaydt
