"""
Week 1: 机器学习基本概念实现
Basic Concepts Implementation for Machine Learning
"""

import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rcParams['font.sans-serif'] = ['SimHei']  # 支持中文显示
matplotlib.rcParams['axes.unicode_minus'] = False

class MLBasics:
    """机器学习基础概念类"""
    
    def __init__(self):
        self.name = "Machine Learning Basics"
    
    def generate_sample_data(self, n_samples: int = 100, noise: float = 0.1) -> tuple[np.ndarray, np.ndarray]:
        """
        生成示例数据
        Args:
            n_samples: 样本数量
            noise: 噪声水平
        Returns:
            X: 特征矩阵
            y: 标签向量
        """
        np.random.seed(42)
        X = np.random.randn(n_samples, 1)
        y = 2 * X.flatten() + 1 + noise * np.random.randn(n_samples)
        return X, y
    
    def hypothesis_function(self, X: np.ndarray, theta: np.ndarray) -> np.ndarray:
        """
        假设函数 h(x) = θ₀ + θ₁x
        Args:
            X: 特征矩阵 (包含偏置项)
            theta: 参数向量
        Returns:
            预测值
        """
        return X @ theta
    
    def cost_function(self, X: np.ndarray, y: np.ndarray, theta: np.ndarray) -> float:
        """
        代价函数 J(θ) = 1/(2m) * Σ(h(x) - y)²
        Args:
            X: 特征矩阵
            y: 真实标签
            theta: 参数向量
        Returns:
            代价值
        """
        m = len(y)
        predictions = self.hypothesis_function(X, theta)
        cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2)
        return cost
    
    def add_bias_term(self, X: np.ndarray) -> np.ndarray:
        """
        添加偏置项 (截距项)
        Args:
            X: 原始特征矩阵
        Returns:
            添加偏置项后的特征矩阵
        """
        return np.column_stack([np.ones(X.shape[0]), X])
    
    def visualize_data_and_hypothesis(self, X: np.ndarray, y: np.ndarray, theta: np.ndarray):
        """
        可视化数据和假设函数
        """
        plt.figure(figsize=(10, 6))
        
        # 绘制原始数据点
        plt.scatter(X[:, 1], y, alpha=0.6, color='blue', label='训练数据')
        
        # 绘制假设函数
        X_line = np.linspace(X[:, 1].min(), X[:, 1].max(), 100)
        X_line_with_bias = self.add_bias_term(X_line.reshape(-1, 1))
        y_line = self.hypothesis_function(X_line_with_bias, theta)
        plt.plot(X_line, y_line, 'r-', linewidth=2, label=f'假设函数: h(x) = {theta[0]:.2f} + {theta[1]:.2f}x')
        
        plt.xlabel('特征 x')
        plt.ylabel('标签 y')
        plt.title('机器学习基本概念：数据与假设函数')
        plt.legend()
        plt.grid(True, alpha=0.3)
        plt.show()
    
    def cost_function_surface(self, X: np.ndarray, y: np.ndarray):
        """
        绘制代价函数的3D表面图
        """
        # 创建参数网格
        theta0_vals = np.linspace(-2, 4, 50)
        theta1_vals = np.linspace(-1, 4, 50)
        
        J_vals = np.zeros((len(theta0_vals), len(theta1_vals)))
        
        for i, theta0 in enumerate(theta0_vals):
            for j, theta1 in enumerate(theta1_vals):
                theta = np.array([theta0, theta1])
                J_vals[i, j] = self.cost_function(X, y, theta)
        
        # 创建3D图
        fig = plt.figure(figsize=(12, 5))
        
        # 3D表面图
        ax1 = fig.add_subplot(121, projection='3d')
        theta0_mesh, theta1_mesh = np.meshgrid(theta0_vals, theta1_vals)
        ax1.plot_surface(theta0_mesh, theta1_mesh, J_vals.T, alpha=0.8, cmap='viridis')
        ax1.set_xlabel('θ₀')
        ax1.set_ylabel('θ₁')
        ax1.set_zlabel('代价 J(θ)')
        ax1.set_title('代价函数3D表面')
        
        # 等高线图
        ax2 = fig.add_subplot(122)
        contour = ax2.contour(theta0_mesh, theta1_mesh, J_vals.T, levels=20)
        ax2.clabel(contour, inline=True, fontsize=8)
        ax2.set_xlabel('θ₀')
        ax2.set_ylabel('θ₁')
        ax2.set_title('代价函数等高线')
        
        plt.tight_layout()
        plt.show()

def demonstrate_ml_basics():
    """演示机器学习基本概念"""
    ml = MLBasics()
    
    print("=== 机器学习基本概念演示 ===\n")
    
    # 1. 生成示例数据
    print("1. 生成示例数据...")
    X, y = ml.generate_sample_data(n_samples=50, noise=0.2)
    X_with_bias = ml.add_bias_term(X)
    
    print(f"   特征矩阵形状: {X.shape}")
    print(f"   标签向量形状: {y.shape}")
    print(f"   添加偏置项后特征矩阵形状: {X_with_bias.shape}\n")
    
    # 2. 定义假设函数参数
    print("2. 定义假设函数参数...")
    theta = np.array([1.0, 2.0])  # θ₀=1, θ₁=2
    print(f"   参数 θ = {theta}\n")
    
    # 3. 计算预测值和代价
    print("3. 计算预测值和代价...")
    predictions = ml.hypothesis_function(X_with_bias, theta)
    cost = ml.cost_function(X_with_bias, y, theta)
    
    print(f"   前5个预测值: {predictions[:5]}")
    print(f"   前5个真实值: {y[:5]}")
    print(f"   当前代价: {cost:.4f}\n")
    
    # 4. 可视化
    print("4. 可视化结果...")
    ml.visualize_data_and_hypothesis(X_with_bias, y, theta)
    ml.cost_function_surface(X_with_bias, y)
    
    # 5. 数学基础验证
    print("5. 数学基础验证...")
    print("   向量运算示例:")
    a = np.array([1, 2, 3])
    b = np.array([4, 5, 6])
    print(f"   a = {a}")
    print(f"   b = {b}")
    print(f"   a · b = {np.dot(a, b)}")
    print(f"   ||a|| = {np.linalg.norm(a):.4f}")
    
    print("\n   矩阵运算示例:")
    A = np.array([[1, 2], [3, 4]])
    B = np.array([[5, 6], [7, 8]])
    print(f"   A = \n{A}")
    print(f"   B = \n{B}")
    print(f"   A @ B = \n{A @ B}")

if __name__ == "__main__":
    demonstrate_ml_basics()