# -*- coding: utf-8 -*-
# !/usr/bin/env python
"""
-------------------------------------------------
   File Name：     utils
   Description :   
   Author :       lth
   date：          2022/12/9
-------------------------------------------------
   Change Activity:
                   2022/12/9 9:32: create this script
-------------------------------------------------
"""
__author__ = 'lth'

import numpy as np
import torch
from torch import nn


class GaussianPriorNICELoss(nn.Module):
    def __init__(self, size_average=True):
        super(GaussianPriorNICELoss, self).__init__()
        self.size_average = size_average

    def forward(self, fx, diag):
        if self.size_average:
            return torch.mean(-gaussian_nice_loglkhd(fx, diag))
        else:
            return torch.sum(-gaussian_nice_loglkhd(fx, diag))


def gaussian_nice_loglkhd(h, diag):
    """
    Definition of log-likelihood function with a Gaussian prior, as in the paper.

    Args:
    * h: float tensor of shape (N,D). First dimension is batch dim, second dim consists of components
      of a factorized probability distribution.
    * diag: scaling diagonal of shape (D,).

    Returns:
    * loss: torch float tensor of shape (N,).
    """
    # \sum^D_i s_{ii} - { (1/2) * \sum^D_i  h_i**2) + (D/2) * log(2\pi) }
    return torch.sum(diag) - (
            0.5 * torch.sum(torch.pow(h, 2), dim=1) + h.size(1) * 0.5 * torch.log(torch.tensor(2 * np.pi)))

