import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
from sklearn.feature_selection import VarianceThreshold, SelectKBest, mutual_info_classif
from sklearn.linear_model import LassoCV, lasso_path, Lasso
import warnings
from sklearn.exceptions import ConvergenceWarning

from utils.utils import *
import numpy as np
# 忽略所有 ConvergenceWarning
warnings.filterwarnings("ignore", category=ConvergenceWarning)

class LassoUtil:
    def __init__(self,dp,pic_save_path,feature_save_path=lasso_feature_path,split_random_state=72,selectK_random_state=0):
        self.alpha_dict = {
            'CollagenFeatureSummary': np.logspace(-4, -1, 70),
            'RegionFeatureSummary': np.logspace(-5, -1, 70),
            'RoiFeatureSummary': np.logspace(-4.5, -1, 70)
        }
        self.pic_save_path = opj(pic_save_path,str(split_random_state),str(selectK_random_state),'feature_extract','lasso')
        self.feature_save_path = opj(feature_save_path,str(split_random_state),str(selectK_random_state))
        self.split_random_state = split_random_state
        self.selectK_random_state = selectK_random_state
        self.total_df = None
        self.dp = dp

    def draw_pic(self, X_train, y_train, data_type, lasso):
        alphas_lasso, coefs_lasso, _ = lasso_path(X_train, y_train, alphas=lasso.alphas_)

        # 绘制lasso路径图
        plt.figure(figsize=(8, 5))
        for coef in coefs_lasso:
            plt.plot(np.log10(alphas_lasso), coef)
        plt.axvline(np.log10(lasso.alpha_), linestyle='--', color='black', label='Best alpha')
        plt.xlabel("log10(alpha)")
        plt.ylabel("Coefficients")
        plt.title("Lasso Path")
        # plt.legend()
        plt.grid(False)
        plt.tight_layout()
        psp = opj(self.pic_save_path, data_type.replace('FeatureSummary', ''))
        md(psp)
        plt.savefig(opj(psp, 'lasso_path.pdf'))
        plt.close()

        mse_mean = np.mean(lasso.mse_path_, axis=1)
        mse_std = np.std(lasso.mse_path_, axis=1)
        log_alphas = np.log10(lasso.alphas_)

        nonzero_counts = []
        for a in lasso.alphas_:
            model = Lasso(alpha=a, max_iter=10000)
            model.fit(X_train, y_train)
            nonzero_counts.append(np.sum(model.coef_ != 0))

        # 绘制MSE图
        fig, ax = plt.subplots(figsize=(12, 6))
        ax.errorbar(log_alphas, mse_mean, yerr=mse_std, fmt='-o', color='blue',
                    ecolor='gray', capsize=5, capthick=1.5, elinewidth=1.2,
                    label="CV MSE ± STD")
        ax.axvline(np.log10(lasso.alpha_), color='red', linestyle='--', label='Best alpha')
        ax.set_xlabel("log10(alpha)")
        ax.set_ylabel("Mean Squared Error")
        ax.set_title("LassoCV MSE with Top X-axis Showing Non-zero Coefficients")
        ax.grid(False)
        ax.legend(loc="upper right")

        ax_top = ax.twiny()
        ax_top.set_xlim(ax.get_xlim())
        ax_top.set_xticks(log_alphas[::8])
        ax_top.set_xticklabels(nonzero_counts[::8])
        ax_top.set_xlabel("Number of Non-zero Coefficients")

        plt.tight_layout()
        plt.savefig(opj(psp, 'lasso_MSE.pdf'))
        plt.close()


    def extract_feature(self, df, data_type):
        # 拆分训练测试集
        X_train, X_test, y_train, y_test, _, _ = self.dp.split_train_and_test(data_type=data_type,X=df.copy(),flag=True,model_name="feature_extract")
        vt = VarianceThreshold(threshold=1e-5)
        X_train_vt = vt.fit_transform(X_train)

        skb = SelectKBest(score_func=lambda X, y: mutual_info_classif(X, y, random_state=self.selectK_random_state), k=('all' if X_train_vt.shape[1] < X_train_vt.shape[0] else 200))
        X_train_selected = skb.fit_transform(X_train_vt, y_train)

        scaler = StandardScaler()
        X_train_scaled = scaler.fit_transform(X_train_selected)

        # 训练 LassoCV
        lasso = LassoCV(
            # alphas=self.alpha_dict.get(data_type, np.logspace(-4, -1, 70)),
            cv=5,
            max_iter=200000,
            tol=1e-4,
            random_state=100,
            n_jobs=-1
        )
        lasso.fit(X_train_scaled, y_train)

        # 特征名映射（注意特征筛选后的映射）
        original_feature_names = X_train.columns
        vt_mask = vt.get_support()
        skb_mask = skb.get_support()
        vt_features = original_feature_names[vt_mask]
        skb_features = vt_features[skb_mask]
        selected_features = skb_features[lasso.coef_ != 0]

        print(f"{data_type.replace('FeatureSummary', '')}:{df.shape[1]}->{len(selected_features)}")

        # 绘图
        self.draw_pic(X_train_scaled, y_train, data_type, lasso)

        # 保存筛选特征的数据
        df_filtered = df.loc[:, ['Patient_ID', target_column] + selected_features.tolist()]
        df_filtered = df_filtered.dropna(how='all', subset=selected_features.tolist())

        md(self.feature_save_path)
        df_filtered.to_csv(opj(self.feature_save_path, data_type.replace('FeatureSummary', '') + '.csv'), index=False)

        if self.total_df is None:
            self.total_df = df_filtered
        else:
            self.total_df = self.total_df.merge(df_filtered, how='left', on=['Patient_ID', target_column])
        if len(selected_features) < 2:
            return True
        return False

    def save_total_df(self):
        md(self.feature_save_path)
        self.total_df.to_csv(opj(self.feature_save_path, 'total.csv'), index=False)

