import torch
import torch.nn as nn
import torch.nn.functional as F

"""
自定义loss类型
由于求loss时只求一部分要求的，所以需要进行Mask操作
不携带Mask标志的是内置Mask
携带Mask标志的是先Mask再求loss
"""


class MAELoss(nn.Module):
    def __init__(self):
        super(MAELoss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        # 只预测没有mask掉的部分
        masked_label = torch.masked_select(label, mask)
        label = masked_label.view(-1, label.size(-1))
        masked_pred = torch.masked_select(pred, mask)
        pred = masked_pred.view(-1, pred.size(-1))
        loss = F.l1_loss(pred, label)
        return loss


class MSELoss(nn.Module):
    def __init__(self):
        super(MSELoss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        # 只预测没有mask掉的部分
        masked_label = torch.masked_select(label, mask)
        label = masked_label.view(-1, label.size(-1))
        masked_pred = torch.masked_select(pred, mask)
        pred = masked_pred.view(-1, pred.size(-1))
        loss = F.mse_loss(pred, label)
        return loss


class SmoothL1Loss(nn.Module):
    def __init__(self):
        super(SmoothL1Loss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        # 只预测没有mask掉的部分
        masked_label = torch.masked_select(label, mask)
        label = masked_label.view(-1, label.size(-1))
        masked_pred = torch.masked_select(pred, mask)
        pred = masked_pred.view(-1, pred.size(-1))
        loss = F.smooth_l1_loss(pred, label)
        return loss


class MCRMSELoss(nn.Module):
    def __init__(self):
        super(MCRMSELoss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        # 只预测没有mask掉的部分
        masked_label = torch.masked_select(label, mask)
        label = masked_label.view(-1, label.size(-1))
        masked_pred = torch.masked_select(pred, mask)
        pred = masked_pred.view(-1, pred.size(-1))
        loss = torch.mean(torch.sqrt(torch.mean(torch.square((pred - label)), axis=0)))
        return loss


def loss_mask(weights, mask, value=0):
    """在序列中屏蔽不相关的项"""
    weights[~mask] = value
    return weights


def smooth_l1_loss(input, target):
    t = torch.abs(input - target)
    return torch.where(t < 1, 0.5 * t ** 2, t - 0.5)


class MaskedMAELoss(nn.Module):
    def __init__(self):
        super(MaskedMAELoss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        unweight_loss = torch.abs((pred - label))
        # 只预测没有mask掉的部分
        weights = torch.ones_like(label)
        masked = torch.cat((mask, mask, mask), dim=1)
        weights = loss_mask(weights, masked)
        weight_loss = torch.mean((unweight_loss * weights))
        return weight_loss


class MaskedMSELoss(nn.Module):
    def __init__(self):
        super(MaskedMSELoss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        unweight_loss = torch.square((pred - label))
        # 只预测没有mask掉的部分
        weights = torch.ones_like(label)
        masked = torch.cat((mask, mask, mask), dim=1)
        weights = loss_mask(weights, masked)
        weight_loss = torch.mean((unweight_loss * weights))
        return weight_loss


class MaskedSmoothL1Loss(nn.Module):
    def __init__(self):
        super(MaskedSmoothL1Loss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        unweight_loss = torch.abs(pred - label)
        unweight_loss = torch.where(unweight_loss < 1, 0.5 * unweight_loss ** 2, unweight_loss - 0.5)
        # 只预测没有mask掉的部分
        weights = torch.ones_like(label)
        masked = torch.cat((mask, mask, mask), dim=1)
        weights = loss_mask(weights, masked)
        weight_loss = torch.mean((unweight_loss * weights))
        return weight_loss


class MaskedMCRMSELoss(nn.Module):
    def __init__(self):
        super(MaskedMCRMSELoss, self).__init__()

    def forward(self, pred, label, mask):
        pred = pred.to(torch.float32)
        label = label.to(torch.float32)
        unweight_loss = torch.square((pred - label))
        # 只预测没有mask掉的部分
        weights = torch.ones_like(label)
        masked = torch.cat((mask, mask, mask), dim=1)
        weights = loss_mask(weights, masked)
        weight_loss = (unweight_loss * weights)
        weight_loss = torch.mean(torch.sqrt(torch.mean(weight_loss, axis=0)))
        return weight_loss


def my_loss(pred, label, mask, loss_type):
    pred = pred.to(torch.float32)
    label = label.to(torch.float32)
    # Non-Masked Loss
    if loss_type == "MAE":
        get_loss = MAELoss()
    elif loss_type == "MSE":
        get_loss = MSELoss()
    elif loss_type == "SmoothL1":
        get_loss = SmoothL1Loss()
    elif loss_type == "MCRMSE":
        get_loss = MCRMSELoss()
    # Masked Loss
    elif loss_type == "MaskedMAE":
        get_loss = MaskedMAELoss()
    elif loss_type == "MaskedMSE":
        get_loss = MaskedMSELoss()
    elif loss_type == "MaskedSmoothL1":
        get_loss = MaskedSmoothL1Loss()
    elif loss_type == "MaskedMCRMSE":
        get_loss = MaskedMCRMSELoss()
    else:
        print("不存在这种loss_type:" + loss_type)
        raise

    loss = get_loss(pred, label, mask)
    return loss

# if __name__ == '__main__':
#     loss_fn = nn.L1Loss(reduce=False, size_average=False)
#     input = torch.autograd.Variable(torch.randn(3, 4))
#     target = torch.autograd.Variable(torch.randn(3, 4))
#     loss = loss_fn(input, target)
#     print(loss)
