% Entropy of a uniform random variable
M = 1e4; % Number of samples
a = 0;
b = 32;
x = a+rand(M,1)*(b-a);
delta = 0.1;
edges = a:delta:b;
N = histc(x,edges);
p = N(1:end-1)/M/delta;
H = -p'*log2(p)*delta

% Entropy of an exponential random variable
M = 1e5; % Number of samples
lambda = 8;
x = exprnd(1/lambda,M,1);
delta = 0.05/lambda;
% Low frequent sample values present after 3 or more 
% standard deviations influence the estimated entropy
edges = 0:delta:7/lambda; 
N = histc(x,edges);
p = N(1:end-1)/M/delta;
H = -p'*log(p)*delta
Ht = 1 - log(lambda)
% Look for approximate calculations of differential
% entropy