%
% Copyright (c) 2009 Liang Tang
% 
%  SLICE_HI.m
%  written by Liang Tang
%  tangl99@gmail.com
% 
% Version history:
% 
% This is version 1.00, released March 16, 2009
%

% Find the top k linear correlations from the dataset
% @param data: The data matrix. Each row is a data instance.
% @param k: The number of dimensions that are strong correlated.
% @param fmax: The maximum value of k smallest eigenvectors of the strong colrrelated data subsets.
% @param minsup: The minimum proportion of the found subsets
function [corrplanes, corratts, sups]=SLICE_HI(data, k, fmax, minsup)
    % Just for debugging
    if nargin < 1
        test_SLICE_HI();
        return;
    end
    global gData;
    global gLabels;
    global gClusterId;
    corrplanes={};
    corratts={};
    sups=[];
    [M,N] = size(data);
    gData = data;
    gLabels = zeros(M,1);
    gClusterId=1;
    message = sprintf('The dataset is %d*%d.', M,N);
    disp(message);
    minsup_count = M*minsup;
    for i=2:N
        atts=genSubSets(i,N);
        for j=1:length(atts)
            num_noise_points = 0;
            for t=1:M
                if gLabels(t) == -1 || gLabels(t) ==0
                    gLabels(t) = 0; % Marked as unprocessed
                    num_noise_points = num_noise_points +1;
                end
            end
            if num_noise_points <= i || num_noise_points < minsup_count
                return;
            end
            [localcorrs, localsups]=SLICE_Subrotine(k,fmax, minsup, atts{j}, num_noise_points);
            corrplanes={corrplanes{:},localcorrs{:}};
            for t=1:length(localcorrs)
                corratts={corratts{:}, atts{j}};
            end
            sups=[sups, localsups];
        end
    end
end

function [corrplanes, sups]=SLICE_Subrotine(k, fmax, minsup, atts, unlabelled_points_count)
    global gData;
    global gLabels;
    global gClusterId;
    corrplanes={};
    sups=[];
    clusterid = gClusterId;
    [M, junk] = size(gData);
    N = length(atts);
    message = sprintf('search feature set %s', mat2str(atts));
    disp(message);
    minsup_count = M*minsup;
    labelled_counts = [];
    
    % Greedy search all hyperplanes
    while unlabelled_points_count > N
        disp(sprintf('labelled points %d', M-unlabelled_points_count));
        for i=1:M
            if gLabels(i) == 0
                numpoints = checkpoint([i], k, fmax, atts, clusterid, unlabelled_points_count, minsup_count);
                if numpoints < minsup_count
                    gLabels(i) = -1; % labelled as noise
                    unlabelled_points_count = unlabelled_points_count - 1;
                else
                    labelled_counts = [labelled_counts, numpoints];
                    clusterid = clusterid+1;
                    unlabelled_points_count = unlabelled_points_count - numpoints;
                end
                break;
            end
        end
    end
    
    % Construct significant hyperplanes and filter out nosies
    for id=gClusterId:clusterid-1
        % Can be considered as nosie data
        point_count = labelled_counts(id-gClusterId+1);
        if point_count < minsup_count
            continue;
        end
        % Pick up all the labelled data subset
        corr_data = zeros(point_count, N);
        j = 1;
        for i=1:M
            if gLabels(i) == id
                corr_data(j,:) = getDataPoint(i,atts);
                j=j+1;
            end
        end
        % Get the hyperplane
        [eigenvectors, eigenvalues]=PCA(corr_data);
        corr_mean = mean(corr_data);
        coe= eigenvectors(:,1)';
        const = -coe*corr_mean';
        plane=[coe,const];
        sup = point_count/M;
        % Add this plane into return
        corrplanes{end+1} = plane;
        sups= [sups,sup];
    end
    gClusterId = clusterid;
    
%     % Find the local linear correlations in superset of attributes
%     [M,d]=size(gData);
%     for i=1:d
%         if any(atts==i)
%             continue;
%         end
%         
%         localatts=[atts,i];
%         [localcorrs, localatts, localsups]=SLICE_Subrotine(k, fmax, minsup, localatts);
%         corrplanes = {corrplanes{:},localcorrs{:}};
%         corratts = {corratts{:}, localatts{:}};
%         sups=[sups,localsups];
%     end
end

function [numpoints]=checkpoint(seedindice, k, fmax, atts, clusterid, unlabelled_count, minsup_count)
    global gData;
    global gLabels;
    [M,junk] = size(gData);
    N = length(atts);
    i = 1;
    % Generate the seed to construct the hyperplane
    for i=1:M
        if gLabels(i) == 0 && all(seedindice ~= i) % Not used
            seedindice = [seedindice, i];
            if length(seedindice) == N
                % Generate the seed to construct the hyperplane
                seed=zeros(N,N);
                for j=1:N
                    seed(j,:) = getDataPoint(seedindice(j),atts);
                    gLabels(seedindice(j)) = clusterid;
                end
                unlabelled_count = unlabelled_count - N;
                % Search the hyperplane by this seed
                numpoints = greedysearch(k, fmax, atts, clusterid, unlabelled_count, seed);
                % If the number of points is very small, drop it
                if numpoints < minsup_count
                    for j=1:M
                        if gLabels(j) == clusterid
                            gLabels(j) = 0;
                        end
                    end
                end
            else
                numpoints = checkpoint(seedindice, k, fmax, atts, clusterid, unlabelled_count, minsup_count);
                % If the found hyperplane is large enough, then return
                if numpoints >= minsup_count
                    return;
                end 
            end
            break;
        end
    end
    
    % If no used point in dataset, then directly return
    if i==M
        numpoints = 0;
        return;
    end
end

function [numpoints]=greedysearch(k, fmax, atts, clusterid, unlabelled_count, seed)
    global gData;
    global gLabels;
    [M, junk] = size(gData);
    N = length(atts);
    % Search the points in the rest of dataset 
    d_cov = cov(seed);
    d_mean = mean(seed);
    f = 0;
    d_size = N;
    while f < fmax && d_size <unlabelled_count+N
        % Find the nearest point to the current hyperplane 
        nearest_point_index = find_nearest_point(k, f, atts, d_cov, d_mean, d_size);
        gLabels(nearest_point_index) = clusterid;
        % Update the mean and covariance variables
        p = getDataPoint(nearest_point_index, atts);
        d_size_new = d_size + 1;
        d_mean_new = (d_mean*d_size + p) / d_size_new;
        d_cov_new = d_size/d_size_new*(d_cov+d_mean'*d_mean) + p'*p/d_size_new - d_mean_new'*d_mean_new;
        d_size = d_size_new;
        d_mean = d_mean_new;
        d_cov  = d_cov_new;
        % Update the 'thickness' of the current hyperplane
        f = thickness(d_cov_new, k);
    end
    numpoints = d_size;
end

function [nearest_index]=find_nearest_point(k, f, atts, d_cov, d_mean, d_size)
    global gData;
    global gLabels;
    [M,junk] = size(gData);
    N = length(atts);
    min_d = realmax;
    min_index = -1;
    for i=1:M
        % Filter out labelled point
        if gLabels(i) ~= 0 
            continue;
        end
        % Calculate the distance from this point
        p = getDataPoint(i,atts);
        d_size_new = d_size + 1;
        d_mean_new = (d_mean*d_size + p) / d_size_new;
        d_cov_new = d_size/d_size_new*(d_cov+d_mean'*d_mean) + p'*p/d_size_new - d_mean_new'*d_mean_new;
        f_new = thickness(d_cov_new, k);
        d = f_new - f;
        % See if it is closer to current plane
        if d < min_d
            min_d = d;
            min_index = i;
        end
    end
    nearest_index = min_index;
end

function p=getDataPoint(index, atts)
    global gData
    p=zeros(1, length(atts));
    op=gData(index,:);
    for i=1:length(atts)
        p(i) = op(atts(i));
    end
end

function [f]=thickness(covariance, k)
    N = length(covariance);
    [PC,V]=eig(covariance);
    V=diag(V);
    [eigenvalues]=sort(V);
    f = sum(eigenvalues(1:k))/sum(eigenvalues);
end

% Find the smallest component vector
function [eigenvectors,eigenvalues]=PCA(data)
    [M,N]=size(data);
    mn=mean(data,1);
    newdata=data - repmat(mn,M,1);
    covariance=1/M*newdata'*newdata;
    [PC,V]=eig(covariance);
    V=diag(V);
    [eigenvalues, rindices]=sort(V);
    eigenvectors=zeros(N,N);
    for i=1:length(rindices)
        eigenvectors(:,i)=PC(:,rindices(i));
    end
end
