'''
Created on Jul 4, 2011

@author: Martin Vegi Kysel
'''
import math

def Entropy(x):
    '''
    @param x: array of probabilities
    @return: Entropy value in bits 
    @summary: Shannon Entropy - S =  - [SUM] P(i) ln P(i)
    '''
    
    S = 0
     
    for current in x:
        
        # log(2) of 0 is not defined!
        if current!=0:
            value = current * math.log(current,2)
            S+= value
    
    
    # return absolute summary
    return math.fabs(-S)
    
    
#to run this use something as:
#x = [0.5,0.25,0.125,0.125]
#x = [1,0,0,0]
#print Entropy(x)