import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
import math
import torch.nn.functional as F
import pdb

# 计算香农熵
# input_为softmax之后的tensor
def Entropy(input_):
    entropy = -input_ * torch.log(input_ + 1e-5)
    entropy = torch.sum(entropy, dim=1)
    return entropy

# 计算信息量
# input_为softmax之后的tensor
# Information = H_div - H_ent
# 希望Information越大越好 也就是-Information越小越好
# IM_loss = -Information = H_ent - H_div
def Information(input_):
    minput_ = input_.mean(dim=0)
    H_div = torch.sum(-minput_ * torch.log(minput_ + 1e-5))# 输出平均的熵值
    H_ent = torch.mean(Entropy(input_)) # 输出熵的平均
    # H_div最大 最具多样性
    # H_ent最小 最具确定性
    return H_div - H_ent


# 计算领域熵