"""

PCA and other normalisation methods.


Copyright 2009 Michael Seiler
Rutgers University
miseiler@gmail.com

This file is part of ConsensusCluster.

ConsensusCluster is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

ConsensusCluster is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with ConsensusCluster.  If not, see <http://www.gnu.org/licenses/>.


"""

import numpy as N

def pca(M, frac = 1.):
    """Takes a matrix M and returns those eigenvectors which explain frac of the variance"""
    
    avg = N.average(M, 0)
    M -= avg    #PCA requires a centered matrix

    u, s, v = N.linalg.svd(M, 0) #Will run out of memory from U otherwise
    
    M += avg    #M is a reference...shouldn't touch user's toys
    
    i = get_var_fractions(s, frac)

    #return N.transpose(N.dot(v[:i], N.transpose(M))) #The transformed data
    return v[:i]

def mds(M, frac = 1.):
    """
    
    Takes a matrix M and returns the classical multidimensional scaling
    
    If a lower dimensional representation is needed (e.g., for a plot), frac is the accuracy of reconstruction

    """
    
    # Sq input data
    Q = M.copy()**2 / -2.0
    
    avgrow = Q.mean(1)
    avgcol = Q.mean(0)
    avg    = Q.mean()

    # Column, then row centering
    # Note that if Q is indeed symmetric, avgrow=avgcol

    Q  = Q - avgcol
    Q  = (Q.T - avgrow).T

    Q += avg

    # Again, symmetric matrices ensure u=v.T
    u, s, v = N.linalg.svd(Q, 0)

    # XXX Oct 15, 2012 Testing new eigenvalue selection method
    i = max(min(get_var_fractions(s, frac), eigval_significance_test(s)), 2)
    if frac == 1.0:
        i = len(s)
    
    print('\nEigvals by variance fractions: %s' % get_var_fractions(s, frac))
    print('\nEigvals by significance tests: %s\n' % eigval_significance_test(s))

    S = N.diag(N.sqrt(s))
    R = N.dot(u, S)

    return R[:,:i].astype(N.float32)

def eigval_significance_test(s):
    """

    Return the first i eigenvalues which satisfy the following equation:

    s[i] > s[i+1:].mean() + s[i+1:].std() * 3

    In other words, is this eigenvalue within three standard deviations of the rest of the eigenvalues?
    If not, it is not significantly different, and therefore those eigenvalues represent noise.

    """

    for i in xrange(len(s) - 1):
        v = s[i+1:]
        if s[i] < (v.mean() + v.std() * 3):
            break

    return i
    
def get_var_fractions(s, frac):
    """
    Get the element that, when summed up to this element, frac
    of the percentage of the variance is explained by the elements
    summed in this way.

    s is the singular matrix (in 1-D)
    frac is the fraction to explain
    
    """

    variances = s**2/len(s)
    total_variances = N.sum(variances, 0)

    variance_fractions = N.divide(variances, total_variances)

    for i in range(1, len(variance_fractions) + 1):
        if N.sum(variance_fractions[:i], 0) >= frac:
            break

    if i < 2:
        i = 2   #Minimum 2 for plotting!

    print('Variance fractions:')
    for k in xrange(i):
        print('PC%s: %s' % (k+1, variance_fractions[k]))

    return i

def get_pca_genes(M, pca_fraction=0.85, eigenvector_weight=0.15):
    """
    Convenience function.  Expects a matrix with samples on the rows and genes on the columns

    pca_fraction        - Fraction of eigenvalues which explain pca_fraction of the variance to accept
    eigenvector_weight  - The top eigenvector_weight (by absolute value) fraction of those genes which occur with high weights
                          in those eigenvectors which correspond to the eigenvalues explained by pca_fraction
    
    Returns a tuple: (pca_fraction eigenvectors, eigenvector_weight gene indices)

    """

    # FIXME: Rather hackish way to determine BaseParser identity
    if hasattr(M, 'M'):
        M = M.M

    V = pca(M, frac=pca_fraction)    #From SVD
    gene_indices = select_genes(V, eigenvector_weight)

    return V, gene_indices

def select_genes(v, weight):
    """Returns a tuple of the indices of those genes which comprise the top weight% in each eigenvector"""

    genes = [0] * len(v[0])
    gene_indices = []

    for vec in v:
        min_value = (1 - weight) * N.min(vec)
        max_value = (1 - weight) * N.max(vec)
 
        for i in xrange(len(vec)):
            if vec[i] <= min_value or vec[i] >= max_value:
                genes[i] = 1

    for i in xrange(len(genes)):
        if genes[i]:
            gene_indices.append(i)

    if len(gene_indices) < 2:
        raise TypeError, "Not enough genes at %s%% weight in eigenvectors" % (weight)

    return tuple(gene_indices)
