# -*- coding: utf-8 -*-
"""
# @file name  : loss_function_2.py
# @author     : QuZhang
# @date       : 2020-12-17 21:36
# @brief      :
                5. nn.L1Loss
                6. nn.MSELoss
                7. nn.SmoothL1Loss
                8. nn.PoissonNLLLoss
                9. nn.KLDivLoss
                10. nn.MarginRankingLoss
                11. nn.MultiLabelMarginLoss
                12. nn.SoftMarginLoss
                13. nn.MultiLabelSoftMarginLoss
                14. nn.MultiMarginLoss
                15. nn.TripletMarginLoss
                16. nn.HingeEmbeddingLoss
                17. nn.CosineEmbeddingLoss
                18. nn.CTCLoss
"""

from tools.common_tools import set_seed
import torch
import torch.nn as nn
import numpy as np
import matplotlib.pylab as plt
import os

os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"


set_seed(1)

if __name__ == "__main__":
    # flag = True
    flag = False
    if flag:
        inputs = torch.ones((2, 2))
        target = torch.ones((2, 2)) * 3

        # ------------ L1 loss
        loss_f_L1 = nn.L1Loss(reduction='none')
        loss = loss_f_L1(inputs, target)
        print("input：{}\ntarget：{}\nL1 loss：{}".format(inputs, target, loss))

        # ------------- MSE Loss
        loss_f_mse = nn.MSELoss(reduction='none')
        loss_mse = loss_f_mse(inputs, target)
        print("MSE Loss：{}".format(loss_mse))

    # -------------- Smooth L1 loss -----------
    # flag = True
    flag = False
    if flag:
        inputs = torch.linspace(-3, 3, steps=500)
        target = torch.zeros_like(inputs)

        loss_f = nn.SmoothL1Loss(reduction='none')
        loss_smooth = loss_f(inputs, target)

        loss_l1 = np.abs(inputs.numpy())

        plt.plot(inputs.numpy(), loss_smooth.numpy(), label='Smooth L1 Loss')
        plt.plot(inputs.numpy(), loss_l1, label="L1 loss")
        plt.xlabel('x_i = y_i')
        plt.ylabel("loss value")
        plt.legend()
        plt.grid()
        plt.show()

    # ============= KL Divergence Loss ========
    # flag = True
    if flag:
        inputs = torch.tensor([[0.5, 0.3, 0.2], [0.2, 0.3, 0.5]])
        inputs_log = torch.log(inputs)  # 对输入进行log处理
        target = torch.tensor([[0.9, 0.05, 0.05], [0.1, 0.7, 0.2]], dtype=torch.float)

        # 定义loss
        loss_f_none = nn.KLDivLoss(reduction='none')
        loss_f_mean = nn.KLDivLoss(reduction='mean')
        loss_f_bs_mean = nn.KLDivLoss(reduction='batchmean')

        # forward
        loss_none = loss_f_none(inputs, target)
        loss_mean = loss_f_mean(inputs, target)
        loss_bs_mean = loss_f_bs_mean(inputs, target)

        print("loss_none:\n{}\nloss_mean:\n{}\nloss_bs_mean:\n{}".format(loss_none, loss_mean, loss_bs_mean))

    # --------------- Margin Ranking Loss ---------------
    # flag = True
    flag = False
    if flag:
        x1 = torch.tensor([[1], [2], [4]], dtype=torch.float)
        x2 = torch.tensor([[2], [2], [2]], dtype=torch.float)

        target = torch.tensor([1, 1, -1], dtype=torch.float)  # target=1,表示x1大于x2

        loss_f_none = nn.MarginRankingLoss(margin=0, reduction='none')

        # x1中的每一个元素都与x2里面的元素比较
        loss = loss_f_none(x1, x2, target)

        print(loss)

    # -------------- 11 Multi Label Margin Loss --------
    flag = True
    if flag:
        x = torch.tensor([[0.1, 0.2, 0.4, 0.8]])
        y = torch.tensor([[0, 3, -1, -1]], dtype=torch.long)

        loss_f = nn.MultiLabelMarginLoss(reduction='none')

        loss = loss_f(x, y)
        print(loss)
