from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.naive_bayes import GaussianNB
from sklearn.feature_selection import VarianceThreshold, SelectKBest, f_classif
from sklearn.feature_selection import SelectKBest, chi2
from sklearn.feature_selection import RFE
from sklearn.linear_model import Lasso
from sklearn.metrics import accuracy_score
def variance_threshold_selector(X, y, threshold=0.0):
    """
    使用方差阈值法进行特征选择，并返回训练集上的准确率
    :param X: 特征矩阵
    :param y: 目标变量
    :param threshold: 方差阈值，默认为0，即去除所有方差为0的特征
    :return: 训练集上的准确率
    """
    # 创建方差阈值选择器对象
    selector = VarianceThreshold(threshold)

    # 使用选择器对象对特征进行选择
    X_new = selector.fit_transform(X)

    # 划分训练集和测试集
    X_train, X_test, y_train, y_test = train_test_split(X_new, y, test_size=0.3, random_state=42)

    # 训练朴素贝叶斯分类器
    clf = GaussianNB()
    clf.fit(X_train, y_train)

    # 返回训练集上的准确率
    return clf.score(X_train, y_train)

def correlation_selector(X, y, k=2):
    """
    使用相关系数法进行特征选择，并返回训练集上的准确率
    :param X: 特征矩阵
    :param y: 目标变量
    :param k: 选择的特征数量，默认为2
    :return: 训练集上的准确率
    """
    # 创建相关系数选择器对象
    selector = SelectKBest(score_func=f_classif, k=k)

    # 使用选择器对象对特征进行选择
    X_new = selector.fit_transform(X, y)

    # 划分训练集和测试集
    X_train, X_test, y_train, y_test = train_test_split(X_new, y, test_size=0.3, random_state=42)

    # 训练朴素贝叶斯分类器
    clf = GaussianNB()
    clf.fit(X_train, y_train)

    # 返回训练集上的准确率
    return clf.score(X_train, y_train)

def chi_square_selector(X, y, k=2):
    """
    使用卡方检验进行特征选择，并返回训练集上的准确率
    :param X: 特征矩阵
    :param y: 目标变量
    :param k: 选择的特征数量，默认为2
    :return: 训练集上的准确率
    """
    # 创建卡方检验选择器对象
    selector = SelectKBest(chi2, k=k)

    # 使用选择器对象对特征进行选择
    X_new = selector.fit_transform(X, y)

    # 划分训练集和测试集
    X_train, X_test, y_train, y_test = train_test_split(X_new, y, test_size=0.3, random_state=42)

    # 训练朴素贝叶斯分类器
    clf = GaussianNB()
    clf.fit(X_train, y_train)

    # 返回训练集上的准确率
    return clf.score(X_train, y_train)


from sklearn.tree import DecisionTreeClassifier
from sklearn.feature_selection import RFE

def recursive_feature_elimination_selector(X, y, n_features_to_select=None):
    """
    使用递归特征消除进行特征选择，并返回训练集上的准确率
    :param X: 特征矩阵
    :param y: 目标变量
    :param n_features_to_select: 选择的特征数量，默认为 None，表示选择一半的特征
    :return: 训练集上的准确率
    """
    # 创建递归特征消除对象
    estimator = DecisionTreeClassifier(random_state=42)
    selector = RFE(estimator, n_features_to_select=n_features_to_select)

    # 使用选择器对象对特征进行选择
    X_new = selector.fit_transform(X, y)

    # 划分训练集和测试集
    X_train, X_test, y_train, y_test = train_test_split(X_new, y, test_size=0.3, random_state=42)

    # 训练决策树分类器
    clf = DecisionTreeClassifier(random_state=42)
    clf.fit(X_train, y_train)

    # 返回训练集上的准确率
    return clf.score(X_train, y_train)



def lasso_feature_selector(X, y, alpha=0.1):
    """
    使用Lasso回归进行特征选择，并返回测试集上的准确率
    :param X: 特征矩阵
    :param y: 目标变量
    :param alpha: 正则化参数，默认为0.1
    :return: 测试集上的准确率
    """
    # 创建Lasso回归对象
    lasso = Lasso(alpha=alpha)

    # 使用Lasso回归对象对特征进行选择
    lasso.fit(X, y)

    # 获取特征系数
    coef = lasso.coef_

    # 根据特征系数选择特征
    selected_features = X[:, coef != 0]

    # 划分训练集和测试集
    X_train, X_test, y_train, y_test = train_test_split(selected_features, y, test_size=0.3, random_state=42)

    # 训练朴素贝叶斯分类器
    clf = GaussianNB()
    clf.fit(X_train, y_train)

    # 预测测试集并计算准确率
    y_pred = clf.predict(X_test)
    accuracy = accuracy_score(y_test, y_pred)

    # 返回测试集上的准确率
    return accuracy


# 加载数据集
iris = load_iris()
X, y = iris.data, iris.target


accuracy1 = variance_threshold_selector(X, y)
accuracy2 = correlation_selector(X, y)
accuracy3 = chi_square_selector(X, y)
accuracy4 = recursive_feature_elimination_selector(X, y, n_features_to_select=2)
accuracy5 = lasso_feature_selector(X, y)

# 打印准确率

print("方差阈值法——准确率：", accuracy1)
print("相关系数法——准确率：", accuracy2)
print("卡方检验——准确率：", accuracy3)
print("递归特征消除——准确率：", accuracy4)
print("Lasso回归——准确率：", accuracy5)