#Evaluate of SIFA
import os
# os.environ['CUDA_VISIBLE_DEVICES'] = '0'
import numpy as np
import matplotlib
matplotlib.use('Agg')
from torch.utils.data import DataLoader
import torch
from model import SIFA
from dataset.dataset import SingleDataset
from metrics import dice_eval,assd_eval,create_visual_anno
import cv2
from dataset.dataset import parse_config
from tqdm import tqdm

config = "./config/train4label.cfg"
config = parse_config(config)
exp_name = config['train']['exp_name']

def norm_01(image):
    mn = np.min(image)
    mx = np.max(image)
    image = (image-mn)/(mx-mn).astype(np.float32)
    return image
    
def save_img(image):
    image = norm_01(image)
    image = (image*255).astype(np.uint8)
    return image    

    
device = torch.device('cuda:{}'.format(config['test']['gpu']))
test_path = config['test']['test_path']
num_classes = config['test']['num_classes']
sifa_model = SIFA(config).to(device)
sifa_model.load_state_dict(torch.load('{}'.format(config['test']['test_model']),map_location=torch.device('cuda:{}'.format(config['test']['gpu']))))
sifa_model.eval()
#test dataset
test_dataset = SingleDataset(test_path)
batch_size = config['test']['batch_size']
test_loader = DataLoader(test_dataset,batch_size,shuffle=False)

#test
all_batch_dice = []
all_batch_assd = []
with torch.no_grad():
    for it,(xt,xt_label) in tqdm(enumerate(test_loader)):
        xt = xt.to(device)
        xt_label = xt_label.numpy().squeeze().astype(np.uint8)

        output = sifa_model.test_seg(xt).detach()
        output = output.squeeze(0)
        output = torch.argmax(output,dim=0)        
        output = output.cpu().numpy()
        print(set(output.flatten()))

        xt = xt.detach().cpu().numpy().squeeze()
        gt = xt_label.reshape(256,256).astype(np.uint8)
        output = output.squeeze()

        xt = save_img(xt)
        
        output_vis = create_visual_anno(output)
        gt_vis = create_visual_anno(gt)
        results = config['test']['predict_image_path']
        if(not os.path.exists(results)):
            os.makedirs(results)
            print(results)
        cv2.imwrite('{}/xt-{}.jpg'.format(results, it+1),xt)
        cv2.imwrite('{}/gt-{}.jpg'.format(results, it+1),gt_vis)
        cv2.imwrite('{}/output-{}.jpg'.format(results, it+1),output_vis)
        

        
        one_case_dice = dice_eval(output,xt_label,num_classes) * 100
        #print('{:.4f} th case dice MYO:{:.4f} LV:{:.4f} RV:{:.4f}'.format(it+1,one_case_dice[0],one_case_dice[1],one_case_dice[2]))
        #dicefile.write('file:{},{} th case dice:{}\n'.format(filename,it+1,one_case_dice))
        all_batch_dice += [one_case_dice]
        try:
            one_case_assd = assd_eval(output,xt_label,num_classes)
        except:
            continue
        all_batch_assd.append(one_case_assd)
        
'''
    脾（spleen）：1         红色
    右肾（right kidney）：2  绿色
    左肾（left kidney）：3   黄色
    肝：4                   浅蓝色
'''

all_batch_dice = np.array(all_batch_dice)
all_batch_assd = np.array(all_batch_assd)
mean_dice = np.mean(all_batch_dice,axis=0) 
std_dice = np.std(all_batch_dice,axis=0) 
mean_assd = np.mean(all_batch_assd,axis=0)
#输出
all_batch_dice = np.array(all_batch_dice,dtype=np.int16)
print(all_batch_dice)
std_assd = np.std(all_batch_assd,axis=0)
print('-----------')
print('MYO||LV||RV')
print('Dice mean:{}'.format(mean_dice))
print('Dice std:{}'.format(std_dice))
print('total mean dice:',np.mean(mean_dice))
print('ASSD mean:{}'.format(mean_assd))
print('ASSD std:{}'.format(std_assd))
print('total mean assd:',np.mean(mean_assd))
print('-----------')

'''
60： [40.75037337 56.46230896 25.46799964 19.78436507]
80： [46.56454925 62.31222111 13.97301379 26.78729246]
100：[42.23531027 59.79586272 43.67719427 34.17413111]
120：[31.75980246 36.90693933 32.00057506 32.3421381 ]
140：[33.97719566 40.50739008 36.14413817 19.72479731]
160：[28.49582146 63.63636364 46.12647865 11.35176667]
200：[29.00035067 60.29238789 65.76656888 20.10472457]
220：[32.28604542 59.10023312 30.4001434  24.2296973 ]
240：[17.07839666 64.81667504 51.05290999 22.80802119]
260：[38.46276324 59.09090913 22.40526311 23.54856176]
280：[13.77519991  2.27272732 46.6237552  23.49558947]
'''

