function [feaScore, feaIdx] = LLEscore(X, K, gamma)
% z: the weight of feature, the samller, the better
% LLE score: a new filter-based unsupervised feature selection method based on nonlinear manifold embedding and its application to image recognition
% Chao Yao, Ya-Feng Liu, Member, IEEE, Bo Jiang, Jungong Han, and Junwei Han, Senior Member, IEEE
% 
% implemented by Liang Du (csliangdu@gmail.com)
%

if ~exist('K', 'var')
    K = 5;
end

if ~exist('gamma', 'var')
    gamma = 1e-5;
end

[nSmp, nFea] = size(X);

M = LLE(X', K);
M = full(M);
z = zeros(nFea,1);
for iDim = 1:nFea
    Mi = LLE(X(:,iDim)', K, gamma);
    z(iDim) = sum(sum( (M - Mi).^2));
end
feaScore = z;
[~, feaIdx] = sort(feaScore, 'ascend');
end


function M = LLE(X, K, gamma)
% X = data as D x N matrix (D = dimensionality, N = #points)
% K = number of neighbors
% if D > K : no regularization
% if K > D > 1: reg_i = tol * norm(x_j - x_i)^2
% if D == 1: reg_i = gamma * 1;
%
% code inherited from Prof. Sam Roweis
%

if ~exist('K', 'var')
    K = 5;
end

if ~exist('gamma', 'var')
    gamma = 1e-5;
end
[D,N] = size(X);
% STEP1: COMPUTE PAIRWISE DISTANCES & FIND NEIGHBORS
X2 = sum(X.^2,1);
distance = repmat(X2,N,1)+repmat(X2',1,N)-2*(X'*X);
[sorted,index] = sort(distance);
neighborhood = index(2:(1+K),:);
% STEP2: SOLVE FOR RECONSTRUCTION WEIGHTS
if(K>D)
    %fprintf(1,'   [note: K>D; regularization will be used]\n');
    tol=1e-3; % regularlizer in case constrained fits are ill conditioned
else
    tol=0;
end
if D > 1
    W = zeros(K,N);
    for ii=1:N
        z = X(:,neighborhood(:,ii))-repmat(X(:,ii),1,K); % shift ith pt to origin
        C = z'*z;                                        % local covariance
        C = C + eye(K,K)*tol*trace(C);                   % regularlization (K>D)
        W(:,ii) = C\ones(K,1);                           % solve Cw=1
        W(:,ii) = W(:,ii)/sum(W(:,ii));                  % enforce sum(w)=1
    end;
else
    if D == 1
        W = zeros(K,N);
        for ii=1:N
            z = X(:,neighborhood(:,ii))-repmat(X(:,ii),1,K); % shift ith pt to origin
            C = z'*z;                                        % local covariance
            C = C + eye(K,K)*gamma;                   % regularlization (K>D) d == 1
            W(:,ii) = C\ones(K,1);                           % solve Cw=1
            W(:,ii) = W(:,ii)/sum(W(:,ii));                  % enforce sum(w)=1
        end;
    end
end

% STEP 3: COMPUTE COST MATRIX M=(I-W)'(I-W)
% M=eye(N,N); % use a sparse matrix with storage for 4KN nonzero elements
M = sparse(1:N,1:N,ones(1,N),N,N,4*K*N);
for ii=1:N
    w = W(:,ii);
    jj = neighborhood(:,ii);
    M(ii,jj) = M(ii,jj) - w'; %#ok
    M(jj,ii) = M(jj,ii) - w;%#ok
    M(jj,jj) = M(jj,jj) + w*w';%#ok
end;
M = max(M,M');
M = sparse(M);
end

