# coding = utf-8

'''
使用gabor滤波做后处理
首先直接按照gabor滤波处理的结果*预测结果
然后针对每个预测预测结果，得到相关结果
'''

import cv2,os
import numpy as np
import matplotlib.pyplot as plt
import SimpleITK as sitk
from PIL import Image
import math
from skimage import measure
from scipy import ndimage

def liver_index_2_all_index(case_id, origion_id, index):
    big_liver = "E:\Dataset\Liver\qiye\DongBeiDaXue2\liver\\data2_{}_liver_label.mha".format(origion_id)

    big_liver = sitk.GetArrayFromImage(sitk.ReadImage(big_liver))

    for i in range(big_liver.shape[0]):
        if index == 0:
            return i
        if big_liver[i].sum() > 0 :
            index -= 1

def garbor_filter4(image):
        image = image * 255
        image = image.astype(np.uint8)

        temp = np.zeros(image.shape)

        filters3 = []
        # theta2 = [0, 1 * np.pi / 6, 2 * np.pi / 6, 3 * np.pi / 6, 4 * np.pi / 6, 5 * np.pi / 6]
        theta2 = [0, 1 * np.pi / 6, 2 * np.pi / 6, 3 * np.pi / 6, 4 * np.pi / 6, 5 * np.pi / 6]
        for item in theta2:
            kern = cv2.getGaborKernel((2, 2), sigma=1.0, theta=item, lambd=np.pi / 2.0, gamma=0.5, psi=0,
                                      ktype=cv2.CV_32F)
            # kern = cv2.getGaborKernel((10, 10), sigma=1.0, theta=item, lambd=5, gamma=0.5, psi=0, ktype=cv2.CV_32F)
            kern /= 1.5 * kern.sum()
            filters3.append(kern)
        result3 = np.zeros_like(temp)
        for i in range(len(filters3)):
            accum = np.zeros_like(image)
            for kern in filters3[i]:
                fimg = cv2.filter2D(image, cv2.CV_8UC1, kern)
                accum = np.maximum(accum, fimg, accum)
            result3 += np.array(accum)
        result3 = result3 / len(filters3)
        result3 = result3.astype(np.uint8)
        # clahe = cv2.createCLAHE(clipLimit=5, tileGridSize=(100, 100))
        result3 = cv2.equalizeHist(result3)
        # result3 = clahe.apply(result3)

        return result3


#计算灰度共生矩阵
def cal_GLCM(image, label):
    def glgc(data, a, b, instance_level=32):
        glgc_matrix = np.zeros((instance_level, instance_level))

        for i in range(data.shape[0]):
            for j in range(data.shape[1]):
                f1 = data[i][j]
                if f1 == 0:
                    continue
                if i + a < 0 or i + a >= data.shape[0] or j + b < 0 or j + b >= data.shape[1] or data[i+a][j+b] == 0:
                    continue
                else:
                    f2 = data[i + a][j + b]
                glgc_matrix[f1][f2] += 1

        return glgc_matrix


    #首先计算最大椭圆进行填充
    '''
    label = label * 255
    label = label.astype(np.uint8)
    contours, _ = cv2.findContours(label, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
    retval = cv2.fitEllipse(contours[0])  # 取其中一个轮廓拟合椭圆
    label = cv2.ellipse(label, retval, 255, thickness=1) # 在原图画椭圆
    label = ndimage.binary_fill_holes(label).astype(int)
    '''

    label[label > 0] = 1
    label_image = label * image

    #计算GLCM矩阵
    label_image = label_image * 255
    label_image = label_image.astype(np.int64)
    label_image = label_image / 8
    label_image = label_image.astype(np.uint8)
    glgc1 = glgc(label_image, a=1, b=0)
    glgc1 = glgc1 / glgc1.sum()
    glgc2 = glgc(label_image, a=-1, b=0)
    glgc2 = glgc2 / glgc2.sum()
    glgc3 = glgc(label_image, a=0, b=1)
    glgc3 = glgc3 / glgc3.sum()
    glgc4 = glgc(label_image, a=0, b=1)
    glgc4 = glgc4 / glgc4.sum()

    #matrix = (glgc1 + glgc2 + glgc3 + glgc4) / 4
    matrix = glgc1

    supply_matrix = np.zeros((32, 32))
    for i in range(supply_matrix.shape[0]):
        for j in range(supply_matrix.shape[1]):
            supply_matrix[i][j] = math.pow((i - j), 2)
    entropy = (-matrix * np.log(matrix + 0.0000001)).sum()
    energy = np.power(matrix, 2).sum()
    contrast = (supply_matrix * matrix).sum()
    differ_moment = ((1 / (1 + supply_matrix)) * matrix).sum()

    print(entropy, energy, contrast, differ_moment)




def gabor_dice_each_slice(case_id, origion_id, image_index):

    big_image = "E:\Dataset\Liver\qiye\DongBeiDaXue2\image_venous\\data2_{}_venous.mha".format(origion_id)
    big_liver = "E:\Dataset\Liver\qiye\DongBeiDaXue2\liver\\data2_{}_liver_label.mha".format(origion_id)
    big_tumor = "E:\Dataset\Liver\qiye\DongBeiDaXue2\lesion\\data2_{}_lesion_label.mha".format(origion_id)

    predict_tumor = "E:\predict\image_tumor_v3\case_{}\predict_tumor".format(str(case_id).zfill(5))

    big_image = sitk.GetArrayFromImage(sitk.ReadImage(big_image))
    big_image[big_image <= -200] = -200
    big_image[big_image > 250] = 250
    big_image = (big_image + 200) / 450

    big_liver = sitk.GetArrayFromImage(sitk.ReadImage(big_liver))
    big_liver[big_liver > 0] = 1

    mask = sitk.GetArrayFromImage(sitk.ReadImage(big_tumor))
    mask[mask > 0] = 1


    for item in sorted(os.listdir(predict_tumor)):
        index_old = int(item.split(".")[0])
        index = liver_index_2_all_index(case_id=case_id, origion_id=origion_id, index=index_old)
        if index != image_index:
            continue


        #1、得到原始的肿瘤
        file_name = os.path.join(predict_tumor, item)
        data = Image.open(file_name).convert("L")
        data = np.array(data)
        data[data > 0] = 1

        #2、得到gabor滤波结果
        image = big_image[index] * big_liver[index]
        garbor_result = garbor_filter4(image)
        garbor_result = cv2.medianBlur((255 - garbor_result) * big_liver[index], ksize=5)
        garbor_result[garbor_result <= 150] = 0
        garbor_result[garbor_result > 150] = 1



        #3、得到gabor滤波和最终结果相乘
        data_after = data * garbor_result
        data_after = ndimage.binary_fill_holes(data_after).astype(int)


        #4、得到Liver的边缘膨胀结果
        liver = big_liver[index]
        liver[liver > 0] = 1
        liver = liver * 255
        liver = liver.astype(np.uint8)
        kernel = np.ones((15, 15), np.uint8)
        erosion = cv2.erode(liver, kernel, iterations=1)
        erosion = liver - erosion
        erosion[erosion > 0] = 1






        [data_labels, num_before] = measure.label(data, return_num=True)
        [after_labels, num_after] = measure.label(data_after, return_num=True)




        print("predict size:", num_before, ", postprecessing size:", num_after)

        #去除掉所有<20和边缘区域
        for i in range(num_after):
            temp = np.zeros(data_labels.shape)
            temp[after_labels == i+1] = 1
            if temp.sum() < 20:
                after_labels[after_labels == i+1] = 0
                continue
            count = float((erosion[temp == 1] == 1).sum()) / float((temp == 1).sum())
            if count >= 0.6:
                after_labels[after_labels == i+1] = 0
                continue
            print(i+1, (after_labels == i+1).sum())

            if temp.sum() > 500:
                cal_GLCM(image=image, label=temp)


        #为了显示方便，将原始图像和肿瘤图像融合在一起
        image = image * 255
        image = image.astype(np.uint8)
        tumor = mask[index] * 255
        tumor = tumor.astype(np.uint8)
        contours, _ = cv2.findContours(tumor, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
        image = cv2.cvtColor(image, cv2.COLOR_GRAY2BGR)
        for counter in contours:
            data_list = []
            for t in range(counter.shape[0]):
                j = counter[t][0]
                data_list.append(j)
            cv2.polylines(image, np.array([data_list], np.int32), True, [0, 255, 0], thickness=1)

        plt.subplot(1, 3, 1)
        plt.imshow(data_labels)
        plt.subplot(1, 3, 2)
        plt.imshow(after_labels)
        plt.subplot(1, 3, 3)
        plt.imshow(image)
        plt.show()







if __name__ == '__main__':
    gabor_dice_each_slice(case_id="72", origion_id="0628", image_index=141)