from sklearn import decomposition
from sklearn.manifold import Isomap
import numpy as np
from sklearn import preprocessing


def pca_data(data, a):
    pca = decomposition.PCA(n_components=a)
    return pca.fit_transform(data), pca.explained_variance_ratio_


# 返回1、降维后的数据2、pca降维后方差

def isomap_data(data, a):
    embedding = Isomap(n_components=a)
    return embedding.fit_transform(data), embedding.reconstruction_error()


# 返回1、降维后数据2、isomap后重构误差

def not_reduction(data, i):
    new_data = data[:, i].reshape(-1, 1)
    return new_data, ''


def reducer(temperature='0', feature='1', option="2"):
    feature_array = np.load('./predict_result/feature_array.npy')

    if temperature == '0':
        feature_array_ = feature_array[0, :, :]
    else:
        feature_array_ = feature_array[1, :, :]

    scale_feature_array = preprocessing.scale(feature_array_)

    if option == "0":  # 如果是0用pca降维法
        features, score = pca_data(scale_feature_array, 2)
    elif option == "1":  # 如果是1用isomap降维法
        features, score = isomap_data(scale_feature_array, 2)
    elif option == "2":  # 不降维
        features, score = not_reduction(scale_feature_array, int(feature) - 1)
    np.save('./predict_result/features.npy', features)
    return [features, score]

    # print("PCA_HIGH_FEATURES CONTENT:")
    # print(pca_high_features)
    # print("PCA_LOW_FEATURES CONTENT:")
    # print(pca_low_features)
    # print("ISOMAP_LOW_FEATURES CONTENT:")


if __name__ == '__main__':
    reducer()
