import torch
from torch import nn
import numpy as np


class MaskedL1Loss(nn.Module):
    def __init__(self, weight=1.0):
        super(MaskedL1Loss, self).__init__()
        self._weight = weight

    def forward(self, pred, label, mask):
        assert pred.shape==label.shape
        loss = torch.abs(label * mask - pred * mask)
        loss = loss * self._weight
        norm = torch.sum(mask).clamp(1, 1e30)
        return torch.sum(loss) / norm


class WingLoss(nn.Module):
    def __init__(self, w=10, eps=2, weight=1.0):
        super(WingLoss, self).__init__()
        self.w = w
        self.eps = eps
        self.C = self.w - self.w * np.log(1 + self.w / self.eps)
        self.weight = weight

    def forward(self, pred, target, mask):
        loss = (pred*mask - target*mask).abs()
        loss[loss < self.w] = self.w*torch.log(1. + loss[loss < self.w] / self.eps)
        loss[loss >= self.w] -= self.C      
        norm = torch.sum(mask).clamp(1, 1e30)
        return torch.sum(loss)/norm*self.weight


class SmoothL1Loss(nn.Module):
    def __init__(self, beta=1, weight=1.0):
        super(SmoothL1Loss, self).__init__()
        self.beta = beta
        self.weight = weight

    def forward(self, pred, target, mask):
        # print(pred.shape, mask.shape)
        diff = torch.abs(pred*mask - target*mask)
        loss = torch.where(diff < self.beta, 0.5 * diff * diff / self.beta,
                        diff - 0.5 * self.beta)
        norm = torch.sum(mask).clamp(1, 1e30)
        return torch.sum(loss)/norm*self.weight