# coding = utf-8

'''
验证hdenseunet
'''


import sys

import click
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from pathlib2 import Path
from tensorboardX import SummaryWriter
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from tqdm import tqdm

import utils.checkpoint as cp
from dataset import KiTS19,KiTS19Path
from dataset.transform import CropTransform
from loss import GeneralizedDiceLoss
from loss.util import class2one_hot
from network.HDenseUnetV2 import DenseUNet
from utils.metrics import Evaluator
from utils.vis import imshow
from post_processing import select_tumor


def evaluation(net, dataset, batch_size, num_workers, type):
    type = type.lower()
    if type == 'train':
        subset = dataset.train_dataset
        case_slice_indices = dataset.train_case_slice_indices
    elif type == 'valid':
        subset = dataset.valid_dataset
        case_slice_indices = dataset.valid_case_slice_indices

    sampler = SequentialSampler(subset)
    data_loader = DataLoader(subset, batch_size=batch_size, sampler=sampler,
                             num_workers=num_workers, pin_memory=True)
    evaluator = Evaluator(dataset.num_classes)

    criterion1 = torch.nn.CrossEntropyLoss(weight=torch.tensor((0.78, 0.65, 8.57), device='cuda'), reduction='mean').cuda()

    loss_list = []

    case = 0
    vol_label = []
    vol_output = []
    vol_images = []
    vol_prob = []
    result_map = {}

    dice1_all = []
    dice2_all = []

    with tqdm(total=len(case_slice_indices) - 1, ascii=True, desc=f'eval/{type:5}', dynamic_ncols=True) as pbar:
        for batch_idx, data in enumerate(data_loader):
            imgs, labels, idx = data['image'].cuda(), data['label'].cuda(), data['index']

            outputs = net(imgs)
            outputs = outputs["output"]
            loss = criterion1(outputs, labels)
            loss_list.append(loss.item())
            outputs = F.softmax(outputs, dim=1)
            prob = outputs[:, 2, :, :]
            outputs = outputs.argmax(dim=1)


            labels = labels.cpu().detach().numpy()
            outputs = outputs.cpu().detach().numpy()
            imgs = imgs.cpu().detach().numpy()
            prob = prob.cpu().detach().numpy()
            idx = idx.numpy()


            vol_label.append(labels)
            vol_output.append(outputs)
            vol_images.append(imgs)
            vol_prob.append(prob)

            while case < len(case_slice_indices) - 1 and idx[-1] >= case_slice_indices[case + 1] - 1:
                vol_output = np.concatenate(vol_output, axis=0)
                vol_label = np.concatenate(vol_label, axis=0)
                vol_images = np.concatenate(vol_images, axis=0)
                vol_prob = np.concatenate(vol_prob, axis=0)

                vol_num_slice = case_slice_indices[case + 1] - case_slice_indices[case]
                evaluator.add(vol_output[:vol_num_slice], vol_label[:vol_num_slice])

                output_case = vol_output[:vol_num_slice]
                label_case = vol_label[:vol_num_slice]
                images_case = vol_images[:vol_num_slice]
                prob_case = vol_prob[:vol_num_slice]

                output_case = select_tumor(output_case, label_case, prob_case)
                ####统计一些per_case的指标

                output_case1 = np.zeros(output_case.shape)
                label_case1 = np.zeros(label_case.shape)
                output_case1[output_case == 1] = 1
                output_case1[output_case == 2] = 1
                label_case1[label_case == 1] = 1
                label_case1[label_case == 2] = 1
                dice1 = 2 * (output_case1*label_case1).sum() / (output_case1.sum() + label_case1.sum())
                precision1 = (output_case1[label_case1 == 1] == 1).sum() / (output_case1 == 1).sum()
                recall1 = (output_case1[label_case1 == 1] == 1).sum() / (label_case1 == 1).sum()

                output_case2 = np.zeros(output_case.shape)
                label_case2 = np.zeros(label_case.shape)
                output_case2[output_case == 2] = 1
                label_case2[label_case == 2] = 1
                dice2 = 2 * (output_case2 * label_case2).sum() / (output_case2.sum() + label_case2.sum())
                precision2 = (output_case2[label_case2 == 1] == 1).sum() / (output_case2 == 1).sum()
                recall2 = (output_case2[label_case2 == 1] == 1).sum() / (label_case2 == 1).sum()

                result_map[label_case2.sum()] = [case+35,dice1, dice2, precision1, recall1,
                                                 precision2, recall2]

                dice1_all.append(dice1)
                dice2_all.append(dice2)



                vol_output = [vol_output[vol_num_slice:]]
                vol_label = [vol_label[vol_num_slice:]]
                vol_images = [vol_images[vol_num_slice:]]
                vol_prob = [vol_prob[vol_num_slice:]]

                case += 1
                pbar.update(1)

    acc = evaluator.eval()


    for k in sorted(list(acc.keys())):
        if k == 'dc_each_case': continue
        print(f'{type}/{k}: {acc[k]:.5f}')


    for case_idx in range(len(acc['dc_each_case'])):
        case_id = dataset.case_idx_to_case_id(case_idx, type)
        dc_each_case = acc['dc_each_case'][case_idx]
        for cls in range(len(dc_each_case)):
            dc = dc_each_case[cls]


    tumor_dice_100w = []
    liver_dice_100w = []
    tumor_dice_10w = []
    liver_dice_10w = []
    tumor_dice_1w = []
    liver_dice_1w = []
    tumor_dice_small = []
    liver_dice_small = []


    for key in sorted(result_map.keys(), reverse=True):
        print(key, result_map[key])
        tumor_dice = result_map[key][2]
        liver_dice = result_map[key][1]
        if key >= 1000000:
            tumor_dice_100w.append(tumor_dice)
            liver_dice_100w.append(liver_dice)
        elif key >= 100000:
            tumor_dice_10w.append(tumor_dice)
            liver_dice_10w.append(liver_dice)
        elif key >= 10000:
            tumor_dice_1w.append(tumor_dice)
            liver_dice_1w.append(liver_dice)
        else:
            tumor_dice_small.append(tumor_dice)
            liver_dice_small.append(liver_dice)


    print(np.mean(np.array(dice1_all)), np.mean(np.array(dice2_all)))
    print(type, "loss:", np.mean(np.array(loss_list)))
    # score = (acc['dc_per_case_1'] + acc["dc_per_case_2"]) / 2
    return np.mean(np.array(loss_list))

#使用基于原始图像的分割测试
def validate_evaluation_origion_image():
    dataset = KiTS19("/datasets/3Dircadb/chengkun_only_liver", stack_num=0, spec_classes=[0, 1, 2], img_size=(512, 512),
                     use_roi=False, roi_file=None, roi_error_range=5,
                     train_transform=None, valid_transform=None)

    min_loss = 100000
    min_idx = 0
    for i in range(3,50):
       print(i)
       model_01 = torch.load("/home/diaozhaoshuo/log/BeliefFunctionNN/chengkung/3dircad/hdenseunet/dense_unet_2d/epoll_{}.pkl".
                             format(str(i).zfill(3)))
       model_01.eval()
       loss = evaluation(model_01, dataset, 2, 1, type='valid')
       if loss < min_loss:
           min_idx = i
           min_loss = loss

    print(min_idx, min_loss)

def validate_evaluation_origion_imagev2():
    dataset = KiTS19("/datasets/3Dircadb/chengkun_only_liver", stack_num=0, spec_classes=[0, 1, 2], img_size=(512, 512),
                     use_roi=False, roi_file=None, roi_error_range=5,
                     train_transform=None, valid_transform=None)

    min_loss = 100000
    min_idx = 0

    model_01 = torch.load("/home/diaozhaoshuo/log/BeliefFunctionNN/chengkung/3dircad/hdenseunet/dense_unet_2d_old1/epoll_084.pkl")
    model_01.eval()
    evaluation(model_01, dataset, 2, 1, type='valid')



def validate_denseunet_3d():
    model_3d = torch.load("/home/diaozhaoshuo/log/BeliefFunctionNN/chengkung/3dircad/hdenseunet/dense_unet_3d/epoll_006.pkl").cuda()
    model_2d = torch.load("/home/diaozhaoshuo/log/BeliefFunctionNN/chengkung/3dircad/hdenseunet/dense_unet_2d_old1/epoll_032.pkl").cuda()

    dataset = KiTS19("/datasets/3Dircadb/chengkun_only_liver", stack_num=7, spec_classes=[0, 1, 2], img_size=(512, 512),
                     use_roi=False, roi_file=None, roi_error_range=5,
                     train_transform=None, valid_transform=None)

    subset = dataset.train_dataset
    case_slice_indices = dataset.train_case_slice_indices

    sampler = SequentialSampler(subset)
    data_loader = DataLoader(subset, batch_size=1, sampler=sampler,
                             num_workers=1, pin_memory=True)
    for index,item in enumerate(data_loader):
           data = item["image"].cuda()
           print(data.shape)



def validate_hdenseunet():
    dataset = KiTS19Path("/datasets/3Dircadb/chengkun_only_liver", stack_num=8, spec_classes=[0, 1, 2], img_size=(512, 512),
                     use_roi=False, roi_file=None, roi_error_range=5,
                     train_transform=None, valid_transform=None)
    subset = dataset.valid_dataset

  #for i in range(53):
    #print("*"*30, i, "*"*30)
    model = torch.load("/home/diaozhaoshuo/log/BeliefFunctionNN/chengkung/3dircad/hdenseunet/dense_unet_fusion/epoll_005.pkl").cuda()
    model.eval()

    sampler = SequentialSampler(subset)
    data_loader = DataLoader(subset, batch_size=1, sampler=sampler,
                             num_workers=1, pin_memory=True)
    tbar = tqdm(data_loader, ascii=True, desc='train', dynamic_ncols=True)
    predict = np.zeros((3, len(tbar),512, 512))
    count = np.zeros((3, len(tbar),512, 512))
    mask_list = np.zeros((len(tbar), 512, 512))
    for batch_idx, item in enumerate(tbar):
        if batch_idx < 4 or batch_idx > len(tbar) - 4:
            continue

        data = item["image"].cuda()
        mask = item["label"]
        mask = mask.squeeze()
        with torch.no_grad():
             output = model(data)
             output = output.squeeze()

        output = torch.softmax(output, dim=0)
        mask_list[batch_idx] += mask[4].numpy()

        output = output.cpu().numpy()

        predict[:, batch_idx-4:batch_idx+4, : , :]  += output
        count[:, batch_idx-4:batch_idx+4, : , :] += 1



    predict = predict/count

    output_case = predict.argmax(axis = 0)
    label_case = mask_list
    prob_case = predict[2,:,:]

    #output_case = select_tumor(output_case, label_case, prob_case)
    #output_case[output_case == 2] = 1
    #output_case[prob_case>=0.7] = 2

    output_case1 = np.zeros(output_case.shape)
    label_case1 = np.zeros(label_case.shape)
    output_case1[output_case == 1] = 1
    output_case1[output_case == 2] = 1
    label_case1[label_case == 1] = 1
    label_case1[label_case == 2] = 1
    dice1 = 2 * (output_case1 * label_case1).sum() / (output_case1.sum() + label_case1.sum())
    precision1 = (output_case1[label_case1 == 1] == 1).sum() / (output_case1 == 1).sum()
    recall1 = (output_case1[label_case1 == 1] == 1).sum() / (label_case1 == 1).sum()

    output_case2 = np.zeros(output_case.shape)
    label_case2 = np.zeros(label_case.shape)
    output_case2[output_case == 2] = 1
    label_case2[label_case == 2] = 1
    dice2 = 2 * (output_case2 * label_case2).sum() / (output_case2.sum() + label_case2.sum())
    precision2 = (output_case2[label_case2 == 1] == 1).sum() / (output_case2 == 1).sum()
    recall2 = (output_case2[label_case2 == 1] == 1).sum() / (label_case2 == 1).sum()

    print(dice1, dice2, precision1, recall1,precision2, recall2)




if __name__ == '__main__':
    validate_hdenseunet()