# 非负矩阵分解
# 理论：一个矩阵可以分解为两个非负矩阵的乘积 V = W*H
# W矩阵称为基础图像矩阵，抽取出的特征
# H矩阵称为系数矩阵
# 所以，广泛应用于，图像处理，文本挖掘，图像处理
#  矩阵分解优化目标，最小化W矩阵H矩阵的乘积和原始矩阵的差别，欧式距离
# n_component 分解后矩阵的维度
# init w和h的初始化方式
import  matplotlib.pyplot as plt
from sklearn import decomposition#PCA算法
from sklearn.datasets import fetch_olivetti_faces #face data

n_row,n_col = 2,3
n_components = n_row*n_col
image_shape = (64,64)

dataset = fetch_olivetti_faces(shuffle=True)
faces = dataset.data


def plot_gallery(title, images, n_col=n_col, n_row=n_row):
    plt.figure(figsize=(2. * n_col, 2.26 * n_row))
    plt.suptitle(title, size=16)

    for i, comp in enumerate(images):
        plt.subplot(n_row, n_col, i + 1)
        vmax = max(comp.max(), -comp.min())

        plt.imshow(comp.reshape(image_shape), cmap=plt.cm.gray,
                   interpolation='nearest', vmin=-vmax, vmax=vmax)
        plt.xticks(())
        plt.yticks(())
    plt.subplots_adjust(0.01, 0.05, 0.99, 0.94, 0.04, 0.)


plot_gallery("First centered Olivetti faces", faces[:n_components])
###############################################################################

estimators = [
    ('Eigenfaces - PCA using randomized SVD',
     decomposition.PCA(n_components=6, whiten=True)),

    ('Non-negative components - NMF',
     decomposition.NMF(n_components=6, init='nndsvda', tol=5e-3))
]

###############################################################################

for name, estimator in estimators:
    print("Extracting the top %d %s..." % (n_components, name))
    print(faces.shape)
    estimator.fit(faces)
    components_ = estimator.components_
    plot_gallery(name, components_[:n_components])

plt.show()