import os
import time

import numpy as np
import torch
from skimage import measure
from torch.cuda import empty_cache
from tqdm import tqdm

from inner_utils import save_itk, combine_total_avg


def test_casenet(model, data_loader, args, data_dir, save_dir):
    """
    :param model: CNNs model
    :param data_loader: testing dataset
    :param args: global arguments args
    :param data_dir: input CT data directory
    :param save_dir: output save directory
    :return: None
    """
    model.eval()
    th_bin = args.threshold
    sidelen = args.stridev
    margin = args.cubesize
    starttime = time.time()
    p_total = {}
    workpath = data_dir.split('/')[:-1]
    workpath = '/'.join(workpath)
    #################
    with torch.no_grad():
        for i, (x, coord, org, spac, NameID, SplitID, nzhw, ShapeOrg) in enumerate(tqdm(data_loader)):
            ######Wrap Tensor##########
            NameID = NameID[0]
            SplitID = SplitID[0]
            batchlen = x.size(0)
            x = x.cuda()
            coord = coord.cuda()
            casePred, attentions = model(x, coord)
            #######################################################################
            outdata = casePred.cpu().data.numpy()
            origindata = org.numpy()
            spacingdata = spac.numpy()
            #######################################################################
            #################REARRANGE THE DATA BY SPLIT ID########################
            for j in range(batchlen):
                segpred = outdata[j, 0]
                curorigin = origindata[j].tolist()
                curspacing = spacingdata[j].tolist()
                cursplitID = int(SplitID[j])
                curName = NameID[j]
                curnzhw = nzhw[j]
                curshape = ShapeOrg[j]

                if not (curName in p_total.keys()):
                    p_total[curName] = []

                curpinfo = [segpred, cursplitID, curnzhw, curshape, curorigin, curspacing]
                p_total[curName].append(curpinfo)

    #################
    for curName in p_total.keys():
        curp = p_total[curName]
        ######################combine all splited cubes###############################
        p_combine, porigin, pspacing = combine_total_avg(curp, sidelen, margin)
        p_combine_bw = (p_combine > th_bin)
        p_combine_bw_raw = p_combine_bw.copy()

        curpredpath = os.path.join(save_dir, '%s_airway.nii.gz' % (curName.split('_clean')[0]))
        curpredorgpath = os.path.join(save_dir, '%s_airway_large.nii.gz' % (curName.split('_clean')[0]))
        curpredrawpath = os.path.join(save_dir, '%s_airway_nopp.nii.gz' % (curName.split('_clean')[0]))
        curpredorgrawpath = os.path.join(save_dir, '%s_airway_large_nopp.nii.gz' % (curName.split('_clean')[0]))
        curpredprobpath = os.path.join(save_dir, '%s_airway_prob.nii.gz' % (curName.split('_clean')[0]))
        curpredorgprobpath = os.path.join(save_dir, '%s_airway_prob.mhd' % (curName.split('_clean')[0]))

        save_itk(p_combine_bw_raw.astype(dtype='uint8'), porigin, pspacing, curpredrawpath)
        # Process the prediction results with the largest connected component
        labels, num_comp = measure.label(p_combine_bw, return_num=True, connectivity=3)
        props = measure.regionprops(labels)
        maxVol = props[0].area
        maxID = props[0].label
        for curprop in props:
            if curprop.area > maxVol:
                maxVol = curprop.area
                maxID = curprop.label
        # print ("maximum Area: ", maxVol)
        p_combine_bw = (labels == maxID)
        save_itk(p_combine_bw.astype(dtype='uint8'), porigin, pspacing, curpredpath)  # binary segmentation

        p_combine256 = p_combine * 255
        save_itk(p_combine256.astype(dtype='uint8'), porigin, pspacing, curpredprobpath)  # probability segmentation

        box_path = os.path.join(workpath, curName.replace('_clean_hu', '_box.npy'))
        assert (os.path.exists(box_path) is True)
        box = np.load(box_path)

        shape_org = os.path.join(workpath, curName.replace('_clean_hu', '_shapeorg.npy'))
        if os.path.exists(shape_org) is True:
            shape_org = np.load(shape_org)
            fullsize = np.zeros([shape_org[0], shape_org[1], shape_org[2]])
            fullsize2 = np.zeros([shape_org[0], shape_org[1], shape_org[2]])
        else:
            fullsize = np.zeros([box[3, 1], box[4, 1], box[5, 1]])
            fullsize2 = np.zeros([box[3, 1], box[4, 1], box[5, 1]])

        # binary segmentation in original shape size
        fullsize[box[0, 0]:box[0, 1], box[1, 0]:box[1, 1], box[2, 0]:box[2, 1]] = p_combine_bw
        save_itk(fullsize.astype(dtype='ubyte'), porigin, pspacing, curpredorgpath)

        # probability segmentation in original shape size
        fullsize2[box[0, 0]:box[0, 1], box[1, 0]:box[1, 1], box[2, 0]:box[2, 1]] = p_combine256
        fullsize2[box[0, 0]:box[0, 1], box[1, 0]:box[1, 1], box[2, 0]:box[2, 1]] = p_combine_bw_raw
        save_itk(fullsize2.astype(dtype='uint8'), porigin, pspacing, curpredorgprobpath)
        save_itk(fullsize2.astype(dtype='uint8'), porigin, pspacing, curpredorgrawpath)

    endtime = time.time()
    print('Done, time %3.2f' % (endtime - starttime))
    print()
    empty_cache()
    return
