# coding=utf8

import numpy as np

'''
matrix = np.random.random([32, 5])  # 32*4 random
print matrix
ids = np.array([0, 5, 17, 14])
print '==============='
print matrix[ids]  # prints a matrix of shape [4, 5]
'''

from sklearn.manifold import TSNE
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt

a = np.array([[1,2,3,4,5,6],
     [6,5,4,3,2,1],
     [2,2,2,2,2,2]])


def zero_mean(data_mat):
    mean_val = np.mean(data_mat,axis=0)     #按列求均值，即求各个特征的均值
    data = data_mat - mean_val
    return data, mean_val


def pca(data_mat,n_components = 2):

    data, mean_val = zero_mean(data_mat)
    cov_mat = np.cov(data,rowvar=False)

    eig_vals, eig_vects = np.linalg.eig(np.mat(cov_mat))

    eig_val_indice = np.argsort(eig_vals)            # 对特征值从小到大排序

    n_eig_val_indice = eig_val_indice[-1:-(n_components+1):-1]        # 最大的n_components个特征值的下标

    n_eig_vect = eig_vects[:,n_eig_val_indice]        # 最大的n个特征值对应的特征向量

    lowD_data_mat = data * n_eig_vect                 # 低维特征空间的数据

    recon_mat =(lowD_data_mat * n_eig_vect.T) + mean_val  # 重构数据

    return lowD_data_mat, recon_mat, n_eig_vect , mean_val


def percentage2n(eig_vals, percentage):
    sortArray=np.sort(eig_vals)   #升序
    sortArray=sortArray[-1::-1]  #逆转，即降序
    arraySum=sum(sortArray)
    tmpSum=0
    num=0
    for i in sortArray:
        tmpSum+=i
        num+=1
        if tmpSum>=arraySum*percentage:
            return num

lowD_data_mat, recon_mat, n_eig_vect , mean_val = pca(a)

print lowD_data_mat

b = np.array( [[1,2,3,4,5,6],
     [1,2,1,2,1,2],
     [6,5,4,3,2,1],
     [2,2,2,2,2,2]])

def transform(dat, n_eig_vect , mean_val ):
     data = dat - mean_val
     return data * n_eig_vect

print transform(b,n_eig_vect,mean_val)

pca = PCA(2)

s = pca.fit_transform(a)

print s

print pca.transform(b)