#-*-coding:utf-8-*-
import numpy as np
from sklearn.feature_selection import VarianceThreshold
from sklearn.preprocessing import StandardScaler
from sklearn.feature_selection import chi2
from sklearn.preprocessing import MinMaxScaler

def feature_filter(X, y):
    pool = []
    selector = VarianceThreshold()
    selector.fit(X, y)
    pool.append(selector)
    X = selector.transform(X)

    # scaler = StandardScaler()
    scaler = MinMaxScaler()
    scaler.fit(X)
    pool.append(scaler)
    return pool

def feature_transform(selectors, X):
    for selector in selectors:
        X_new = selector.transform(X)
        X = X_new
    return X

def down_sample(dataset, label, ratio=1.0):
    assert type(dataset) is np.ndarray
    assert type(label) is np.ndarray

    pos = (label == 1)
    neg = (label == 0)

    pos_data = dataset[pos]
    neg_data = dataset[neg]

    pos = np.count_nonzero(pos)
    neg = np.count_nonzero(neg)
    np.random.shuffle(pos_data)
    np.random.shuffle(neg_data)
    # new_data = None
    if pos <= neg:
        t = int(pos * ratio)
        if t > neg_data.shape[0]:
            t = neg_data.shape[0]
        new_neg = neg_data[:t,]
        new_data = np.zeros((pos + t, pos_data.shape[1] + 1))
        new_data[:pos, :-1] = pos_data
        new_data[pos:, :-1] = new_neg
        new_data[:pos, -1] = 1
    else:
        t = int(neg * ratio)
        if t > pos_data.shape[0]:
            t = pos_data.shape[0]
        new_pos = pos_data[:t,]
        new_data = np.zeros((neg + t, pos_data.shape[1] + 1))
        new_data[:neg, :-1] = neg_data
        new_data[neg:, :-1] = new_pos
        new_data[neg:, -1] = 1
    np.random.shuffle(new_data)
    return new_data[:, :-1], new_data[:, -1]