% Mengzi Zhang
% 27 Nov 2011

% Assumes data is loaded

%function [Z] = run_pca (X, k)
% Parameters:
%   X: training data, n x m
%   k: dimension to reduce training data to
% Returns Z, reduced data from X, n x k.
%


% 1400 takes about 5 minutes
% 2800 takes more than an hour
k = 1400;


%% USE WITH CAUTION. Use for run_cv. Comment out for run_all

%clear X;
%clear Y;



%% Should be done in run_submission before calling this script

% CV training / test sets

%XcvTrain = make_sparse(train(bsxfun(@gt, [train().category], 6)));
%YcvTrain = double([train(bsxfun(@gt, [train().category], 6)).rating])';

%XcvTest = make_sparse(train(bsxfun(@lt, [train().category], 7)));
%YcvTest = double([train(bsxfun(@lt, [train().category], 7)).rating])';


% run on a smaller subset to see if still get out of memory
%XcvTrain = make_sparse(train(bsxfun(@eq, [train().category], 9)),size(X, 2));
%YcvTrain = double([train(bsxfun(@eq, [train().category], 9)).rating])';

%XcvTest = make_sparse(train(bsxfun(@eq, [train().category], 3)),size(X, 2));
%YcvTest = double([train(bsxfun(@eq, [train().category], 3)).rating])';



%% USE WITH CAUTION. To save memory. If functions later uses these, need to
%   comment out.
%clear train
%clear test
%clear vocab



%% PCA via SVD, bsxfun runs out of memory. Can't do

%{

% n x m. Mean subtraction. Center data to mean
%diff = bsxfun (@minus, X, mean(X,1));
Xmean = mean (XcvTrain, 1);
% Out of memory
diff = bsxfun (@minus, XcvTrain, Xmean);

% Compute SVD
% U is n x n, V is m x m, S depends on whether use 'econ' arg, if not, n x m,
%   else min(n,m) x min(n,m).
[U, S, V] = svd (diff, 'econ');

% m x k. v1, ..., vk column vecs. vi is m x 1.
% Take first k columns of V as Principal Components, same as
%   taking first k rows of V' as on Wiki.
%   Here taking columns to construct matrix for multiplication.
pc = V (:, 1:k);

% n x k. Project data (mean subtracted) onto principal component (axes of new
%   low-dim space).
% Now have the new k-dimension data matrix Z. n examples, k features.
Z = diff * pc;
%}


%% PCA - Break up training data, see if can get past out of memory errs

%{
% Mean subtraction
n = size (XcvTrain, 1);
m = size (XcvTrain, 2);

block_size = 1400;
nBlocks = ceil (n / block_size);
% both arrays should be size nBlocks
block_start = 1 : block_size : n;
block_end = block_size : block_size : n;
% If block_end is missing n, tack it on
if numel(block_end) < nBlocks
  block_end = [block_end n];
end
%}

%{
% Break up data and process on a small chunk at a time, save all the
%   differences in chunks
for i = 1 : nBlocks

  % Take block rows and operate on these
  Xblock = (XcvTrain (block_start(i) : block_end(i), :));
size(Xblock)
  Xmean = mean (Xblock, 1);

  diffBlock = bsxfun (@minus, Xblock, Xmean);
  % Makes loop suddenly super slow
  %diff (block_start(i) : block_end(i), :) = diffBlock (:, :);

  save (['temp/diffBlock' int2str(i) '.mat'], 'diffBlock');

end
%}

%{
% Load each mean subtraction chunk, concatenate into big matrix

for i = 1 : nBlocks
  
  % Load saved diffBlock var
  load (['temp/diffBlock' int2str(i) '.mat']);
  
  % Takes up too much memory, machine pretty much gets stuck
  % Need better way. Maybe we won't concatenate at all, just do everything
  %   in chunks? but how would we find the SVD then...?
  % Hmm okay I can't do SVD in chunks (maybe, but maybe I can?), but easily,
  %   random projection can be done in chunks. Let's try that.
  % Concatenate to big matrix
  %diff (block_start(i) : block_end(i), :) = diffBlock (:, :);
  
  clear diffBlock;

end
%}



%{
% SVD
[U, S, V] = svd (diff, 'econ');

% Take k largest as axes
pc = V (:, 1:k);

% Project
Z = diff * pc;
%}



%% Random projections. Supposed to be less costly than SVD.

%{

m = size(XcvTrain, 2);

% Out of memory
% Form random matrix
%R = rand ([m k]);

% Normalize columns to have unit length
n = size(XcvTrain, 1);
for i = 1:m
  r = rand (n, 1);
  R(:,i) = r / norm(r);
end

% Project to random new space
Z = XcvTrain * R;

%}



%% Random projections - Break up training data, see if can get past out of 
%   memory errs

% n = size (XcvTrain, 1);
% m = size (XcvTrain, 2);
n = size (X, 1);
m = size (X, 2);

% For breaking R and Z into chunks. A size that won't run out of memory
kblock_size = 1400;
% If k is small, even smaller than preset a block's size, then there's only
%   1 block, size k.
if (k < kblock_size)
  kblock_size = k;
end
n_kblocks = ceil (k / kblock_size);
% Both arrays should be size n_kBlocks
kblock_start = 1 : kblock_size : k;
kblock_end = kblock_size : kblock_size : k;
% If block_end is missing last idx (k), tack it on
if numel(kblock_end) < n_kblocks
  kblock_end = [kblock_end k];
end

% For breaking X into chunks. A size that won't run out of memory
nblock_size = 1400;
% If k is small, even smaller than preset a block's size, then there's only
%   1 block, size n
if (n < nblock_size)
  nblock_size = n;
end
n_nblocks = ceil (n / nblock_size);
% Both arrays should be size n_kBlocks
nblock_start = 1 : nblock_size : n;
nblock_end = nblock_size : nblock_size : n;
% If block_end is missing last idx (k), tack it on
if numel(nblock_end) < n_nblocks
  nblock_end = [nblock_end n];
end

% Projected data
Z = zeros (n, k);

% Find nonzero elts in Z, fill in sparse data
t = CTimeleft(n_kblocks);

% Break up data and process on a small chunk at a time, save projected data
%   Z in chunks
for i = 1 : n_kblocks

  t.timeleft();

  % m x kBlock_size. Make small chunk of columns and operate on these
  Rblock = rand ([m kblock_size]);
  
  % Normalize columns to have unit length
  % Norm is just sqrt of the sum of squares of each column
  % sqrt will take a while, but won't get stuck, for Rblock being m x
  %   kBlock_size
  colNorm = sqrt (sum (Rblock .^ 2, 1));
  % Should only take 10-20 secs, for Rblock being (m x kBlock_size)
  Rblock_norm = bsxfun (@rdivide, Rblock, colNorm);
  
  % Project training data to new space, in chunks
  for j = 1 : n_nblocks

    % (nblock_size x m) * (m x kblock_size) = (nblock_size x kblock_size)
    % Zblock.
%    Z (nblock_start(j) : nblock_end(j), kblock_start(i) : kblock_end(i)) = ...
%      XcvTrain (nblock_start(j) : nblock_end(j), :) * Rblock_norm;
    Z (nblock_start(j) : nblock_end(j), kblock_start(i) : kblock_end(i)) = ...
      X (nblock_start(j) : nblock_end(j), :) * Rblock_norm;

  end
  
end

% Construct arrays for building sparse matrix
[Zi Zj Zdata] = find (Z);

% This exceeds Matlab max variable length. But if just use 
%   [i j s] = find(X), then sparse(i, j, s), it'll never exceed. From help
%   sparse.
% % Make Z into a sparse matrix for training.
% % Init params to pass to sparse()
% Z_n_nonzeros = numel (find (Z ~= 0));
% Zi = zeros (n_nonzeros);
% Zj = zeros (n_nonzeros);
% Zdata = zeros (n_nonzeros);
% idx = 1;
% for i = 1 : m
%   
%   % Find nonzero elts in this row
%   currRow_nonzero_idx = find (Z(i,:) ~= 0);
%   num_currRow_nonzero = numel (currRow_nonzero_idx);
% 
%   % Fill sparse data arrays with all nonzero elts in this row
%   Zi(idx : idx+num_currRow_nonzero-1) = i;
%   Zj(idx : idx+num_currRow_nonzero-1) = currRow_nonzero_idx(:);
%   Zdata(idx : idx+num_currRow_nonzero-1) = Z(i, currRow_nonzero_idx);
% 
%   idx = idx + num_currRow_nonzero;
% 
% end

% Make sparse matrix
Zsparse = sparse (Zi, Zj, Zdata, n, k);
clear Zi;
clear Zj;
clear Zdata;
clear Z;


% Pass new projected data Z into any training method
% Liblinear

solver = 7;
% [results.intersect, info, yhat] = lin_liblinear(Zsparse, YcvTrain, ...
%   XcvTest, YcvTest, solver);
[results.intersect, info, yhat] = lin_liblinear(Zsparse, Y, ...
  Xtest, zeros(size(Xtest,1), 1), solver);


% Calc expected value
p = exp(info.vals);
p = bsxfun(@times, p, 1./sum(p,2));

ratings = [1 2 4 5];
YhatE = sum(bsxfun(@times, p, ratings),2);

% Save predictions
save('-ascii', 'submit_pca.txt', 'YhatE');

    


%% PCA - Skip mean subtraction for sparse data
%{
[U, S, V] = svd (XcvTrain, 'econ');
% Skip largest eigenvec, that's the mean
pc = V (:, 2:k+1);
% Project
Z = diff * pc;

%}



%% Matlab built in PCA
%{
% Out of memory at bsxfun
%[pc, score] = princomp(X);
[pc, score] = princomp(XcvTrain);
%}


%end


