import paddle
from paddle import nn

def sparsity(arr, lamda2):
    loss = paddle.mean(paddle.norm(arr, axis=0))
    return lamda2*loss


def smooth(arr, lamda1):
    arr2 = paddle.zeros_like(arr)
    arr2[:-1] = arr[1:]
    arr2[-1] = arr[-1]

    loss = paddle.sum((arr2-arr)**2)

    return lamda1*loss


class RTFM_loss(nn.Layer):
    def __init__(self, batch_size, alpha=0.0001, margin=100, **kwargs):
        super(RTFM_loss, self).__init__()
        self.alpha = alpha
        self.margin = margin
        self.batch_size = batch_size
        self.sigmoid = nn.Sigmoid()
        self.criterion = nn.BCELoss()

    def forward(self, predict, **kwargs):
        score_abnormal = predict["score_abnormal"]
        score_normal = predict["score_normal"]
        feat_a = predict["feat_select_abn"]
        feat_n = predict["feat_select_normal"]
        scores = predict["scores"]
        scores = scores.reshape([self.batch_size * 32 * 2, -1])
        scores = scores.squeeze()
        abn_scores = scores[self.batch_size * 32:]

        # TODO nlabel与 alabel合并
        nlabel = paddle.zeros([self.batch_size, 1], dtype="float32")
        alabel = paddle.ones([self.batch_size, 1], dtype="float32")

        label = paddle.concat((nlabel, alabel), 0)
        score_abnormal = score_abnormal
        score_normal = score_normal
        score = paddle.concat((score_normal, score_abnormal), 0)
        loss_cls = self.criterion(score, label)  # BCE loss in the score space
        loss_abn = paddle.abs(self.margin - paddle.norm(paddle.mean(feat_a, axis=1), p=2, axis=1))
        loss_nor = paddle.norm(paddle.mean(feat_n, axis=1), p=2, axis=1)
        loss_rtfm = paddle.mean((loss_abn + loss_nor) ** 2)
        loss_total = loss_cls + self.alpha * loss_rtfm

        loss_sparse = sparsity(abn_scores, 8e-3)
        loss_smooth = smooth(abn_scores, 8e-4)
        return loss_total + loss_smooth + loss_sparse
