from mindspore import nn, ops
from .KL import KLLoss

"""Similarity-Preserving Knowledge Distillation"""
class SPLoss(nn.LossBase):
    def __init__(self, reduction="mean"):
        super(SPLoss, self).__init__(reduction)
        self.l2_normalize = ops.L2Normalize(axis=1)

    def construct(self, f_s, f_t):
        G_s = ops.matmul(f_s, f_s.transpose())
        G_t = ops.matmul(f_t, f_t.transpose())

        G_s = G_s.view(f_s.shape[0], -1)
        G_t = G_t.view(f_t.shape[0], -1)

        G_s = self.l2_normalize(G_s)
        G_t = self.l2_normalize(G_t)
        
        x = (G_s - G_t) ** 2
        x = self.get_loss(x)
        return x

class SPLossCell(nn.Cell):
    def __init__(self, student_net, beta):
        super(SPLossCell, self).__init__(auto_prefix=False)
        self.student_net = student_net
        self.kl_loss = KLLoss(4)
        self.ce_loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
        self.sp_loss = SPLoss()
        self.beta = beta

    def construct(self, teacher_fm, teacher_logits, data, label):
        student_fm, student_logits = self.student_net(data, need_feature_map=True)
        return self.ce_loss(student_logits, label) \
            + self.kl_loss(student_logits, teacher_logits) \
            + self.beta * self.sp_loss(student_fm, teacher_fm)
    
    @property
    def backbone_network(self):
        return self.student_net
