from numpy.lib.function_base import place
import paddle
import paddle.nn as nn
import paddle.nn.functional as F

import numpy as np

class SoftmaxFocalLoss(nn.Layer):
    def __init__(self, gamma, ignore_lb=255, *args, **kwargs):
        super(SoftmaxFocalLoss, self).__init__()
        self.gamma = gamma
        self.nll = nn.NLLLoss(ignore_index=ignore_lb)

    def forward(self, logits, labels):
        scores = F.softmax(logits, axis=1)
        factor = paddle.pow(1.-scores, self.gamma)
        log_score = F.log_softmax(logits, axis=1)
        log_score = factor * log_score
        loss = self.nll(log_score, labels)
        return loss

class ParsingRelationLoss(nn.Layer):
    def __init__(self):
        super(ParsingRelationLoss, self).__init__()
    def forward(self,logits):
        n,c,h,w = logits.shape
        loss_all = []
        for i in range(0,h-1):
            loss_all.append(logits[:,:,i,:] - logits[:,:,i+1,:])
        #loss0 : n,c,w
        loss = paddle.concat(loss_all)
        return F.smooth_l1_loss(loss,paddle.zeros_like(loss))

class ParsingRelationDis(nn.Layer):
    def __init__(self):
        super(ParsingRelationDis, self).__init__()
        self.l1 = nn.L1Loss()
        # self.l1 = torch.nn.MSELoss()
    def forward(self, x):
        n,dim,num_rows,num_cols = x.shape
        x = F.softmax(x[:,:dim-1,:,:],axis=1)
        embedding = paddle.to_tensor(np.arange(dim-1),dtype='float32',place=x.place).reshape([1,-1,1,1])
        pos = paddle.sum(x*embedding,axis = 1)

        diff_list1 = []
        for i in range(0,num_rows // 2):
            diff_list1.append(pos[:,i,:] - pos[:,i+1,:])

        loss = 0
        for i in range(len(diff_list1)-1):
            loss += self.l1(diff_list1[i],diff_list1[i+1])
        loss /= len(diff_list1) - 1
        return loss

def get_loss_dict(config):
    loss_dict = {
            'name': ['cls_loss', 'relation_loss', 'relation_dis'],
            'op': [SoftmaxFocalLoss(2), ParsingRelationLoss(), ParsingRelationDis()],
            'weight': [1.0, config.TRAIN.LOSS.SIM_W, config.TRAIN.LOSS.SHP_W],
            'data_src': [('cls_out', 'cls_label'), ('cls_out',), ('cls_out',)]
        }
    return loss_dict