"""
Week 12: 正则化方法详细实现
Regularization Methods Detailed Implementation
"""

import numpy as np
import matplotlib.pyplot as plt
from typing import , Optional
from sklearn.linear_model import Ridge, Lasso, ElasticNet, LassoCV, RidgeCV
from sklearn.preprocessing import StandardScaler, PolynomialFeatures
from sklearn.model_selection import cross_val_score, GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.datasets import make_regression
import matplotlib
matplotlib.rcParams['font.sans-serif'] = ['SimHei']
matplotlib.rcParams['axes.unicode_minus'] = False

class RegularizationAnalyzer:
    """正则化分析器"""
    
    def __init__(self):
        self.name = "Regularization Analyzer"
    
    def generate_high_dimensional_data(self, n_samples: int = 100, n_features: int = 50, 
                                     n_informative: int = 10, noise: float = 0.1) -> [np.ndarray, np.ndarray]:
        """生成高维数据"""
        X, y = make_regression(
            n_samples=n_samples,
            n_features=n_features,
            n_informative=n_informative,
            noise=noise,
            random_state=42
        )
        return X, y
    
    def ridge_regression_analysis(self, X: np.ndarray, y: np.ndarray):
        """Ridge回归分析"""
        print("=== Ridge回归 (L2正则化) 分析 ===")
        
        # 标准化数据
        scaler = StandardScaler()
        X_scaled = scaler.fit_transform(X)
        
        # 正则化强度范围
        alphas = np.logspace(-4, 4, 100)
        
        # 计算正则化路径
        ridge_coefs = []
        ridge_scores = []
        
        for alpha in alphas:
            ridge = Ridge(alpha=alpha)
            ridge.fit(X_scaled, y)
            
            ridge_coefs.append(ridge.coef_)
            
            # 交叉验证得分
            cv_score = cross_val_score(ridge, X_scaled, y, cv=5, 
                                     scoring='neg_mean_squared_error').mean()
            ridge_scores.append(-cv_score)
        
        ridge_coefs = np.array(ridge_coefs)
        
        # 找到最优α
        optimal_idx = np.argmin(ridge_scores)
        optimal_alpha = alphas[optimal_idx]
        
        print(f"最优正则化强度: α = {optimal_alpha:.4f}")
        print(f"最优交叉验证得分: {ridge_scores[optimal_idx]:.4f}")
        
        return alphas, ridge_coefs, ridge_scores, optimal_alpha
    
    def lasso_regression_analysis(self, X: np.ndarray, y: np.ndarray):
        """Lasso回归分析"""
        print("\n=== Lasso回归 (L1正则化) 分析 ===")
        
        # 标准化数据
        scaler = StandardScaler()
        X_scaled = scaler.fit_transform(X)
        
        # 使用LassoCV自动选择最优α
        lasso_cv = LassoCV(alphas=None, cv=5, random_state=42)
        lasso_cv.fit(X_scaled, y)
        
        optimal_alpha = lasso_cv.alpha_
        
        # 计算正则化路径
        alphas = np.logspace(-4, 1, 100)
        lasso_coefs = []
        lasso_scores = []
        n_selected_features = []
        
        for alpha in alphas:
            lasso = Lasso(alpha=alpha, max_iter=2000)
            lasso.fit(X_scaled, y)
            
            lasso_coefs.append(lasso.coef_)
            
            # 交叉验证得分
            cv_score = cross_val_score(lasso, X_scaled, y, cv=5, 
                                     scoring='neg_mean_squared_error').mean()
            lasso_scores.append(-cv_score)
            
            # 选择的特征数量
            n_selected = np.sum(np.abs(lasso.coef_) > 1e-5)
            n_selected_features.append(n_selected)
        
        lasso_coefs = np.array(lasso_coefs)
        
        print(f"最优正则化强度: α = {optimal_alpha:.4f}")
        
        # 使用最优α训练最终模型
        final_lasso = Lasso(alpha=optimal_alpha)
        final_lasso.fit(X_scaled, y)
        selected_features = np.where(np.abs(final_lasso.coef_) > 1e-5)[0]
        
        print(f"选择的特征数量: {len(selected_features)}/{X.shape[1]}")
        print(f"选择的特征索引: {selected_features[:10]}...")  # 只显示前10个
        
        return alphas, lasso_coefs, lasso_scores, n_selected_features, optimal_alpha
    
    def elastic_net_analysis(self, X: np.ndarray, y: np.ndarray):
        """弹性网络分析"""
        print("\n=== 弹性网络 (L1+L2正则化) 分析 ===")
        
        # 标准化数据
        scaler = StandardScaler()
        X_scaled = scaler.fit_transform(X)
        
        # 网格搜索最优参数
        param_grid = {
            'alpha': np.logspace(-4, 1, 20),
            'l1_ratio': np.linspace(0.1, 0.9, 9)
        }
        
        elastic_net = ElasticNet(max_iter=2000, random_state=42)
        grid_search = GridSearchCV(elastic_net, param_grid, cv=5, 
                                 scoring='neg_mean_squared_error')
        grid_search.fit(X_scaled, y)
        
        optimal_alpha = grid_search.best_params_['alpha']
        optimal_l1_ratio = grid_search.best_params_['l1_ratio']
        
        print(f"最优正则化强度: α = {optimal_alpha:.4f}")
        print(f"最优L1比例: l1_ratio = {optimal_l1_ratio:.2f}")
        
        # 分析不同l1_ratio的效果
        l1_ratios = np.linspace(0, 1, 11)
        elastic_results = {}
        
        for l1_ratio in l1_ratios:
            elastic_net = ElasticNet(alpha=optimal_alpha, l1_ratio=l1_ratio, max_iter=2000)
            elastic_net.fit(X_scaled, y)
            
            cv_score = cross_val_score(elastic_net, X_scaled, y, cv=5, 
                                     scoring='neg_mean_squared_error').mean()
            n_selected = np.sum(np.abs(elastic_net.coef_) > 1e-5)
            
            elastic_results[l1_ratio] = {
                'coef': elastic_net.coef_,
                'cv_score': -cv_score,
                'n_selected': n_selected
            }
        
        return elastic_results, optimal_alpha, optimal_l1_ratio
    
    def visualize_regularization_comparison(self, X: np.ndarray, y: np.ndarray):
        """可视化正则化方法比较"""
        # 执行分析
        ridge_alphas, ridge_coefs, ridge_scores, ridge_optimal = self.ridge_regression_analysis(X, y)
        lasso_alphas, lasso_coefs, lasso_scores, n_selected, lasso_optimal = self.lasso_regression_analysis(X, y)
        elastic_results, elastic_alpha, elastic_l1_ratio = self.elastic_net_analysis(X, y)
        
        # 创建图形
        fig, axes = plt.subplots(3, 3, figsize=(18, 15))
        
        # 1. Ridge系数路径
        ax1 = axes[0, 0]
        for i in range(min(10, ridge_coefs.shape[1])):
            ax1.semilogx(ridge_alphas, ridge_coefs[:, i], linewidth=1, alpha=0.7)
        ax1.axvline(x=ridge_optimal, color='red', linestyle='--', label=f'最优α={ridge_optimal:.3f}')
        ax1.set_xlabel('正则化强度 α')
        ax1.set_ylabel('系数值')
        ax1.set_title('Ridge回归系数路径')
        ax1.legend()
        ax1.grid(True, alpha=0.3)
        
        # 2. Lasso系数路径
        ax2 = axes[0, 1]
        for i in range(min(10, lasso_coefs.shape[1])):
            ax2.semilogx(lasso_alphas, lasso_coefs[:, i], linewidth=1, alpha=0.7)
        ax2.axvline(x=lasso_optimal, color='red', linestyle='--', label=f'最优α={lasso_optimal:.3f}')
        ax2.set_xlabel('正则化强度 α')
        ax2.set_ylabel('系数值')
        ax2.set_title('Lasso回归系数路径')
        ax2.legend()
        ax2.grid(True, alpha=0.3)
        
        # 3. 特征选择效果
        ax3 = axes[0, 2]
        ax3.semilogx(lasso_alphas, n_selected, 'b-', linewidth=2)
        ax3.axvline(x=lasso_optimal, color='red', linestyle='--', label=f'最优α={lasso_optimal:.3f}')
        ax3.set_xlabel('正则化强度 α')
        ax3.set_ylabel('选择的特征数量')
        ax3.set_title('Lasso特征选择效果')
        ax3.legend()
        ax3.grid(True, alpha=0.3)
        
        # 4. 交叉验证得分比较
        ax4 = axes[1, 0]
        ax4.semilogx(ridge_alphas, ridge_scores, 'b-', linewidth=2, label='Ridge')
        ax4.semilogx(lasso_alphas, lasso_scores, 'r-', linewidth=2, label='Lasso')
        ax4.axvline(x=ridge_optimal, color='blue', linestyle='--', alpha=0.7)
        ax4.axvline(x=lasso_optimal, color='red', linestyle='--', alpha=0.7)
        ax4.set_xlabel('正则化强度 α')
        ax4.set_ylabel('交叉验证误差')
        ax4.set_title('Ridge vs Lasso 性能比较')
        ax4.legend()
        ax4.grid(True, alpha=0.3)
        
        # 5. ElasticNet L1比例效果
        ax5 = axes[1, 1]
        l1_ratios = list(elastic_results.keys())
        cv_scores = [elastic_results[ratio]['cv_score'] for ratio in l1_ratios]
        n_selected_elastic = [elastic_results[ratio]['n_selected'] for ratio in l1_ratios]
        
        ax5_twin = ax5.twinx()
        
        line1 = ax5.plot(l1_ratios, cv_scores, 'b-o', linewidth=2, label='交叉验证误差')
        line2 = ax5_twin.plot(l1_ratios, n_selected_elastic, 'r-s', linewidth=2, label='选择特征数')
        
        ax5.axvline(x=elastic_l1_ratio, color='green', linestyle='--', 
                   label=f'最优l1_ratio={elastic_l1_ratio:.2f}')
        
        ax5.set_xlabel('L1比例')
        ax5.set_ylabel('交叉验证误差', color='blue')
        ax5_twin.set_ylabel('选择特征数', color='red')
        ax5.set_title('ElasticNet L1比例效果')
        
        # 合并图例
        lines = line1 + line2
        labels = [l.get_label() for l in lines]
        ax5.legend(lines, labels, loc='upper right')
        ax5.grid(True, alpha=0.3)
        
        # 6. 正则化几何解释
        ax6 = axes[1, 2]
        self.plot_regularization_geometry(ax6)
        
        # 7. 系数大小比较
        ax7 = axes[2, 0]
        
        # 使用最优参数训练最终模型
        scaler = StandardScaler()
        X_scaled = scaler.fit_transform(X)
        
        ridge_final = Ridge(alpha=ridge_optimal)
        ridge_final.fit(X_scaled, y)
        
        lasso_final = Lasso(alpha=lasso_optimal)
        lasso_final.fit(X_scaled, y)
        
        elastic_final = ElasticNet(alpha=elastic_alpha, l1_ratio=elastic_l1_ratio)
        elastic_final.fit(X_scaled, y)
        
        feature_indices = range(min(20, X.shape[1]))  # 只显示前20个特征
        
        ax7.plot(feature_indices, ridge_final.coef_[:20], 'bo-', label='Ridge', alpha=0.7)
        ax7.plot(feature_indices, lasso_final.coef_[:20], 'ro-', label='Lasso', alpha=0.7)
        ax7.plot(feature_indices, elastic_final.coef_[:20], 'go-', label='ElasticNet', alpha=0.7)
        
        ax7.set_xlabel('特征索引')
        ax7.set_ylabel('系数值')
        ax7.set_title('不同正则化方法的系数比较')
        ax7.legend()
        ax7.grid(True, alpha=0.3)
        
        # 8. 稀疏性比较
        ax8 = axes[2, 1]
        
        methods = ['Ridge', 'Lasso', 'ElasticNet']
        sparsity = [
            np.sum(np.abs(ridge_final.coef_) < 1e-5) / len(ridge_final.coef_),
            np.sum(np.abs(lasso_final.coef_) < 1e-5) / len(lasso_final.coef_),
            np.sum(np.abs(elastic_final.coef_) < 1e-5) / len(elastic_final.coef_)
        ]
        
        bars = ax8.bar(methods, sparsity, color=['blue', 'red', 'green'], alpha=0.7)
        ax8.set_ylabel('稀疏性比例')
        ax8.set_title('不同方法的稀疏性比较')
        ax8.grid(True, alpha=0.3)
        
        # 添加数值标注
        for bar, sparse in zip(bars, sparsity):
            height = bar.get_height()
            ax8.text(bar.get_x() + bar.get_width()/2., height,
                    f'{sparse:.2f}', ha='center', va='bottom')
        
        # 9. 预测性能比较
        ax9 = axes[2, 2]
        
        cv_scores_final = []
        for model in [ridge_final, lasso_final, elastic_final]:
            cv_score = cross_val_score(model, X_scaled, y, cv=5, 
                                     scoring='neg_mean_squared_error').mean()
            cv_scores_final.append(-cv_score)
        
        bars = ax9.bar(methods, cv_scores_final, color=['blue', 'red', 'green'], alpha=0.7)
        ax9.set_ylabel('交叉验证误差')
        ax9.set_title('最终模型性能比较')
        ax9.grid(True, alpha=0.3)
        
        # 添加数值标注
        for bar, score in zip(bars, cv_scores_final):
            height = bar.get_height()
            ax9.text(bar.get_x() + bar.get_width()/2., height,
                    f'{score:.3f}', ha='center', va='bottom')
        
        plt.tight_layout()
        plt.show()
    
    def plot_regularization_geometry(self, ax):
        """绘制正则化的几何解释"""
        # 创建等高线 (目标函数)
        theta1 = np.linspace(-2, 2, 100)
        theta2 = np.linspace(-2, 2, 100)
        Theta1, Theta2 = np.meshgrid(theta1, theta2)
        
        # 简单的二次函数作为目标函数
        Z = 0.5 * (Theta1 - 0.5)**2 + 0.3 * (Theta2 + 0.3)**2
        
        # 绘制等高线
        contour = ax.contour(Theta1, Theta2, Z, levels=10, colors='gray', alpha=0.6)
        
        # L1约束 (菱形)
        l1_constraint = 1.0
        l1_x = [-l1_constraint, 0, l1_constraint, 0, -l1_constraint]
        l1_y = [0, l1_constraint, 0, -l1_constraint, 0]
        ax.plot(l1_x, l1_y, 'r-', linewidth=3, label='L1约束 (|θ₁|+|θ₂|≤c)')
        
        # L2约束 (圆形)
        l2_constraint = 1.0
        circle = plt.Circle((0, 0), l2_constraint, fill=False, color='blue', linewidth=3)
        ax.add_patch(circle)
        ax.plot([], [], 'b-', linewidth=3, label='L2约束 (θ₁²+θ₂²≤c)')
        
        # 标记最优解
        ax.plot(0.5, -0.3, 'ko', markersize=10, label='无约束最优解')
        
        # L1最优解 (通常在轴上)
        ax.plot(0, -0.3, 'ro', markersize=8, label='L1最优解')
        
        # L2最优解
        # 计算L2约束下的最优解
        unconstrained_opt = np.array([0.5, -0.3])
        l2_opt = unconstrained_opt / np.linalg.norm(unconstrained_opt) * l2_constraint
        ax.plot(l2_opt[0], l2_opt[1], 'bo', markersize=8, label='L2最优解')
        
        ax.set_xlim(-2, 2)
        ax.set_ylim(-2, 2)
        ax.set_xlabel('θ₁')
        ax.set_ylabel('θ₂')
        ax.set_title('正则化几何解释')
        ax.legend()
        ax.grid(True, alpha=0.3)
        ax.set_aspect('equal')

def demonstrate_regularization_methods():
    """演示正则化方法"""
    print("=== 正则化方法详细分析 ===\n")
    
    # 生成高维数据
    analyzer = RegularizationAnalyzer()
    X, y = analyzer.generate_high_dimensional_data(
        n_samples=100, n_features=50, n_informative=10, noise=0.1
    )
    
    print(f"数据维度: {X.shape}")
    print(f"信息特征数: 10")
    print(f"噪声特征数: {X.shape[1] - 10}")
    
    # 执行完整的正则化分析和可视化
    analyzer.visualize_regularization_comparison(X, y)
    
    print("\n=== 正则化方法总结 ===")
    print("1. Ridge (L2): 系数收缩，保留所有特征")
    print("2. Lasso (L1): 特征选择，产生稀疏解")
    print("3. ElasticNet: 结合L1和L2的优点")
    print("4. L1正则化在高维稀疏数据中特别有用")
    print("5. L2正则化在特征相关性高时表现更好")
    print("6. 正则化强度需要通过交叉验证选择")

if __name__ == "__main__":
    demonstrate_regularization_methods()