# import math
# from collections import Counter

# def calculate_entropy(labels):
#     """
#     计算给定类别标签列表的熵。

#     参数:
#     labels (list): 类别标签列表。

#     返回:
#     float: 信息熵。
#     """
#     # 统计每个标签的出现次数
#     label_counts = Counter(labels)
#     total_count = len(labels)
    
#     # 计算每个标签的概率
#     probabilities = [count / total_count for count in label_counts.values()]
    
#     # 计算信息熵
#     entropy = -sum(p * math.log2(p) for p in probabilities)
    
#     return entropy

# # 示例数据
# labels = ['apple', 'banana', 'apple', 'apple', 'orange', 'banana', 'banana', 'banana', 'orange', 'apple']

# # 计算并打印信息熵
# entropy = calculate_entropy(labels)
# print(f"信息熵: {entropy:.4f}")




from numpy import log2,sum,array
def entropy(p):
    p = array(p)
    if 1 in p:
        return 0
    else:
        s = -(p * log2(p)).sum()
    return s



if __name__ == "__main__":
    p = [0.9,0.01]
    en = entropy(p)
    print(en)