# coding = utf-8

'''
针对每个像素点加权的交叉熵损失
'''


import torch
import torch.nn as nn

class CrossEntropyWeightLoss(nn.Module):
    def __init__(self):
        super(CrossEntropyWeightLoss, self).__init__()

    def forward(self, input, target, weight):
        #torch.autograd.set_detect_anomaly(True)
        output = torch.log_softmax(input, dim=1)
        #print(input.shape, target.shape, weight.shape)

        background_label = torch.zeros(target.shape).cuda()
        background_label[target == 0] = 1
        liver_label = torch.zeros(target.shape).cuda()
        liver_label[target == 1] = 1
        tumor_label = torch.zeros(target.shape).cuda()
        tumor_label[target == 2] = 1


        loss = 0
        for i in range(output.shape[0]):


            #loss_item = output[i][0]*weight[i]*background_label + \
             #           output[i][1]*weight[i]*liver_label + output[i][2]*weight[i]*tumor_label

            loss_item = output[i][0] * background_label[i] * weight[i] + \
                          output[i][1]*liver_label[i] * weight[i] + output[i][2]*tumor_label[i]*weight[i]


            #print(loss_item.sum(), weight[i].sum())
            loss_item = loss_item.sum() / (weight[i].sum())
            #print(loss_item)
            loss += loss_item
            #print(-loss_item, output.shape)

        #print(-loss / output.shape[0])
        return -loss / output.shape[0]

class CrossEntropyWeightLossV2(nn.Module):
    def __init__(self):
        super(CrossEntropyWeightLossV2, self).__init__()

    def forward(self, input, target, weight):
        #torch.autograd.set_detect_anomaly(True)
        output = torch.log_softmax(input, dim=1)
        #print(input.shape, target.shape, weight.shape)

        background_label = torch.zeros(target.shape).cuda()
        background_label[target == 0] = 1
        #liver_label = torch.zeros(target.shape).cuda()
        #liver_label[target == 1] = 1
        tumor_label = torch.zeros(target.shape).cuda()
        tumor_label[target == 1] = 1


        loss = 0
        for i in range(output.shape[0]):


            #loss_item = output[i][0]*weight[i]*background_label + \
             #           output[i][1]*weight[i]*liver_label + output[i][2]*weight[i]*tumor_label

            loss_item = output[i][0] * background_label[i] * weight[i] + \
                          output[i][1]*tumor_label[i] * weight[i]


            #print(loss_item.sum(), weight[i].sum())
            loss_item = loss_item.sum() / (weight[i].sum())
            #print(loss_item)
            loss += loss_item
            #print(-loss_item, output.shape)

        #print(-loss / output.shape[0])
        return -loss / output.shape[0]

#三类损失
class CrossEntropyWeightLossV3(nn.Module):
    def __init__(self):
        super(CrossEntropyWeightLossV3, self).__init__()

    def forward(self, input, target, weight):
        #torch.autograd.set_detect_anomaly(True)
        output = torch.log_softmax(input, dim=1)
        #print(input.shape, target.shape, weight.shape, output.shape)

        background_label = torch.zeros(target.shape).cuda()
        background_label[target == 0] = 1
        liver_label = torch.zeros(target.shape).cuda()
        liver_label[target == 1] = 1
        tumor_label = torch.zeros(target.shape).cuda()
        tumor_label[target == 2] = 1
        third_label = torch.zeros(target.shape).cuda()
        third_label[target == 3] = 1


        loss = 0
        for i in range(output.shape[0]):


            #loss_item = output[i][0]*weight[i]*background_label + \
             #           output[i][1]*weight[i]*liver_label + output[i][2]*weight[i]*tumor_label

            loss_item = output[i][0] * background_label[i] * weight[i] + \
                          output[i][1]*liver_label[i] * weight[i] + output[i][2]*tumor_label[i]*weight[i] + \
                          output[i][3]*third_label[i]*weight[i]


            #print(loss_item.sum(), weight[i].sum())
            loss_item = loss_item.sum() / (weight[i].sum())
            #print(loss_item)
            loss += loss_item
            #print(-loss_item, output.shape)

        #print(-loss / output.shape[0])
        return -loss / output.shape[0]