%% compute reduct from numerical data, categorical data and their mixtures with fuzzy information entropy. 
%% Please refer to the following papers: 
%% Qinghua Hu, Daren Yu, Zongxia Xie. Information-preserving hybrid data reduction based on fuzzy-rough techniques. Pattern recognition letters. 2006, 27 (5): 414-423
%% Qinghua Hu, Yu Daren, Zongxia Xie,Jinfu Liu.  Fuzzy probabilistic approximation spaces and their information measures. IEEE transactions on fuzzy systems. 2006, 14 (2): 191-201
%% Qinghua Hu, Daren Yu. Entropies of fuzzy indiscernibility relation and its operations. International Journal of uncertainty, fuzziness and knowledge-based systems. 12 (5):575-589. 2004
%% We compute a reduct with fuzzy information enttropy if their are numerical attributes; otherwise, we search a reduct with Shannon's entropy/ 
%%  In fact, Shannon's entropy and fuzzy entropy are unified in the same form in this model.  

function select_feature=fs_entropy(data,if_fuzzy,neighbor)
%%input
%%%input:
% data is data matrix, where rows for samples and columns for attributes. 
% Numerical attributes should be normalized into [0,1] and decision attribute is put in the last column 
% f is the label of "fuzzy" or "crisp". f=0 menas crisp neighborhoods; while f=1 means triangle fuzzy neighborhoods.
% neighbor means the radius of neighborhood, usually takes value in [0.05  0.5]
%%%output
% a reduct--- the set of selected attributes.
[row column]=size(data);

%%%%%%%%%%%%%compute the relation matrix %%%%%%%%%
if (if_fuzzy==0)
    for i=1:column
        col=i;
        r=[];
        eval(['ssr' num2str(col) '=[];']);
        for j=1:row      
            a=data(j,col);
            x=data(:,col);
       
            for m=1:length(x)
                r(j,m)=kersim_crisp(a,x(m),neighbor);
            end
        end
        eval(['ssr' num2str(col) '=r;']);
    end  

else
    for i=1:column
        col=i;
        r=[];
        eval(['ssr' num2str(col) '=[];']);
        for j=1:row      
            a=data(j,col);
            x=data(:,col);       
            for m=1:length(x)
                r(j,m)=kersim(a,x(m),neighbor);
            end
        end
        eval(['ssr' num2str(col) '=r;']);
    end      
 
 end
%%%%%%%%%%%data reduct based on entropy%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
n=[];
sig=[];
x=0;
base=ones(row);
r=eval(['ssr' num2str(column)]);
entropyd=entropy(r);
attrinu=column-1;
for j=attrinu:-1:1
    for i=1:attrinu
       r1=eval(['ssr' num2str(i)]);
        sig(i)=entropyd+entropy(min(r1,base))-entropy(min(min(r1,r),base));
    end
    [x1,n1]=max(sig);
    x=[x;x1];
    len=length(x);
    if abs(x(len)-x(len-1))>0.001
        base1=eval(['ssr' num2str(n1)]);
        base=min(base,base1);
        n=[n;n1];
    else
        break
    end
end
select_feature=n;
