#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created by PyCharm.

@Date    : Thu Feb 25 2021 
@Time    : 03:53:40
@File    : losses.py
@Author  : alpha
"""

import torch
import torch.nn as nn


def hard_samples_mining(norm_anchor, norm_pos, norm_neg, margin):
    d_ap = torch.norm(norm_anchor - norm_pos, dim=1)
    d_an = torch.norm(norm_anchor - norm_neg, dim=1)
    idx = (d_ap - d_an) < margin
    return idx


def renorm(x):
    return x.renorm(2, 0, 1e-5).mul(1e5)


class TripletLoss(nn.Module):
    def __init__(self, margin=0.2, sigma=0.3):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.sigma = sigma

    def forward(self, norm_anchor, norm_pos, norm_neg):
        d_ap = torch.norm(norm_anchor - norm_pos, dim=1) / self.sigma
        d_an = torch.norm(norm_anchor - norm_neg, dim=1) / self.sigma
        return torch.clamp(torch.exp(d_ap) - torch.exp(d_an) + self.margin, 0).sum()


class MetricSoftmaxLoss(nn.Module):
    def __init__(self):
        super(MetricSoftmaxLoss, self).__init__()

    def forward(self, norm_anchor, norm_pos, norm_neg):
        d_ap = torch.norm(norm_anchor - norm_pos, dim=1)
        d_an = torch.norm(norm_anchor - norm_neg, dim=1)
        return - torch.log(torch.exp(d_an) / (torch.exp(d_an) + torch.exp(d_ap))).sum()


class MetricLoss(nn.Module):
    def __init__(self, margin=0.2, sigma=0.3, l=1.):
        super(MetricLoss, self).__init__()
        self.l = l
        self.margin = margin
        self.loss_trip = TripletLoss(margin, sigma)
        self.loss_soft = MetricSoftmaxLoss()

    def forward(self, f_anchor, f_pos, f_neg):
        norm_anchor, norm_pos, norm_neg = renorm(f_anchor), renorm(f_pos), renorm(f_neg)
        loss_trip = self.loss_trip(norm_anchor, norm_pos, norm_neg)
        loss_soft = self.loss_soft(norm_anchor, norm_pos, norm_neg)
        # with torch.no_grad():
        #     idx = hard_samples_mining(norm_anchor, norm_pos, norm_neg, self.margin)
        # loss_trip = self.loss_trip(norm_anchor[idx], norm_pos[idx], norm_neg[idx])
        # loss_soft = self.loss_soft(norm_anchor[idx], norm_pos[idx], norm_neg[idx])
        return loss_trip + self.l * loss_soft


class TripleBCELoss(nn.Module):
    def __init__(self):
        super(TripleBCELoss, self).__init__()
        self.loss_bce = nn.BCEWithLogitsLoss()

    def forward(self, fa, fp, fn, labels):
        la = self.loss_bce(fa, labels)
        lp = self.loss_bce(fp, labels)
        ln = self.loss_bce(fn, (labels == 0).to(torch.float))
        return la + lp + ln