#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
微积分基础 - AI应用开发必备数学知识
重点：理解梯度、导数概念，为深度学习优化算法打基础
"""

import numpy as np
import matplotlib.pyplot as plt
import sympy as sp
from scipy.optimize import approx_fprime
plt.rcParams['font.sans-serif'] = ['Microsoft YaHei']  # 用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False    # 用来正常显示负号
print("📈 微积分基础教学开始")
print("=" * 50)

# 1. 导数概念
print("1. 导数概念与几何意义")
print("-" * 30)

# 定义符号变量
x = sp.Symbol('x')

# 示例函数
f = x**2 + 3*x + 2
f_prime = sp.diff(f, x)  # 求导

print(f"函数: f(x) = {f}")
print(f"导数: f'(x) = {f_prime}")
print(f"在x=2处的导数: {f_prime.subs(x, 2)}")

# 数值方法求导
def func(x):
    return x**2 + 3*x + 2

# 使用中心差分法手动计算数值导数
def numerical_derivative(f, x, dx=1e-6):
    return (f(x + dx) - f(x - dx)) / (2 * dx)

numeric_derivative = numerical_derivative(func, 2.0)
print(f"数值方法求导结果: {numeric_derivative:.6f}")

# 2. 常用函数的导数
print("2. 常用函数的导数")
print("-" * 30)

# 常见函数及其导数
functions = [
    (x**2, "x²"),
    (sp.sin(x), "sin(x)"),
    (sp.exp(x), "e^x"),
    (sp.log(x), "ln(x)"),
    (1/x, "1/x")
]

for func_symbol, name in functions:
    derivative_func = sp.diff(func_symbol, x)
    print(f"{name} 的导数是: {derivative_func}")

# 3. 梯度概念（多变量导数）
print("3. 梯度概念")
print("-" * 30)

# 多变量函数
y = sp.Symbol('y')
f_multi = x**2 + y**2 + 2*x*y

# 计算偏导数
partial_x = sp.diff(f_multi, x)
partial_y = sp.diff(f_multi, y)

print(f"多变量函数: f(x,y) = {f_multi}")
print(f"对x的偏导数: ∂f/∂x = {partial_x}")
print(f"对y的偏导数: ∂f/∂y = {partial_y}")
print(f"梯度向量: ∇f = [{partial_x}, {partial_y}]")

# 在点(1,2)处的梯度
gradient_at_point = [partial_x.subs({x: 1, y: 2}), partial_y.subs({x: 1, y: 2})]
print(f"在点(1,2)处的梯度: {gradient_at_point}")

# 4. 可视化理解
print("4. 可视化理解")
print("-" * 30)

fig = plt.figure(figsize=(15, 10))

# 子图1: 函数图像和切线
plt.subplot(2, 3, 1)
x_vals = np.linspace(-2, 4, 100)
y_vals = func(x_vals)

# 在x=2处画切线
tangent_x = 2
tangent_y = func(tangent_x)
tangent_slope = f_prime.subs(x, tangent_x)
tangent_line = tangent_slope * (x_vals - tangent_x) + tangent_y

plt.plot(x_vals, y_vals, 'b-', label='f(x) = x² + 3x + 2', linewidth=2)
plt.plot(x_vals, tangent_line, 'r--', label=f'切线 (斜率={tangent_slope})', linewidth=2)
plt.scatter([tangent_x], [tangent_y], color='red', s=100, zorder=5)
plt.xlabel('x')
plt.ylabel('f(x)')
plt.title('函数图像与切线')
plt.legend()
plt.grid(True)

# 子图2: 导数图像
plt.subplot(2, 3, 2)
derivative_vals = [f_prime.subs(x, val) for val in x_vals]
plt.plot(x_vals, derivative_vals, 'g-', label="f'(x) = 2x + 3", linewidth=2)
plt.xlabel('x')
plt.ylabel("f'(x)")
plt.title('导数图像')
plt.legend()
plt.grid(True)

# 子图3: 梯度下降示意图
plt.subplot(2, 3, 3)
# 创建损失函数表面
X, Y = np.meshgrid(np.linspace(-3, 3, 20), np.linspace(-3, 3, 20))
Z = X**2 + Y**2  # 简单的二次函数

# 梯度下降路径
start_point = [2.5, 2.5]
learning_rate = 0.1
path = [start_point]
current_point = start_point.copy()

for _ in range(20):
    grad_x = 2 * current_point[0]
    grad_y = 2 * current_point[1]
    current_point[0] -= learning_rate * grad_x
    current_point[1] -= learning_rate * grad_y
    path.append(current_point.copy())

path = np.array(path)
plt.contour(X, Y, Z, levels=20, alpha=0.6)
plt.plot(path[:, 0], path[:, 1], 'ro-', markersize=4, linewidth=2)
plt.xlabel('x')
plt.ylabel('y')
plt.title('梯度下降路径')

# 子图4: 多变量函数等高线
plt.subplot(2, 3, 4)
X, Y = np.meshgrid(np.linspace(-2, 2, 30), np.linspace(-2, 2, 30))
Z = X**2 + Y**2 + 2*X*Y

plt.contour(X, Y, Z, levels=15)
plt.xlabel('x')
plt.ylabel('y')
plt.title('多变量函数 f(x,y) = x² + y² + 2xy')

# 在几个点处画梯度向量
points = [(1, 0), (0, 1), (-1, -1)]
for px, py in points:
    grad_x_val = 2*px + 2*py
    grad_y_val = 2*py + 2*px
    plt.quiver(px, py, grad_x_val, grad_y_val, color='red', scale=10)

# 子图5: 链式法则示例
plt.subplot(2, 3, 5)
# 复合函数: f(g(x)) where g(x) = x^2, f(u) = sin(u)
g_x = x**2
f_u = sp.sin(x)
composite = sp.sin(x**2)

# 计算导数
composite_derivative = sp.diff(composite, x)
print(f"\n链式法则示例:")
print(f"复合函数: sin(x^2)")
print(f"导数: {composite_derivative}")

# 绘制复合函数
x_composite = np.linspace(-2, 2, 100)
y_composite = np.sin(x_composite**2)
plt.plot(x_composite, y_composite, 'purple', linewidth=2)
plt.xlabel('x')
plt.ylabel('f(g(x))')
plt.title('复合函数: sin(x^2)')
plt.grid(True)

# 子图6: 在机器学习中的应用
plt.subplot(2, 3, 6)
# 损失函数示例: 均方误差
x_loss = np.linspace(-2, 2, 100)
y_true = 2  # 真实值
y_pred = x_loss  # 预测值
loss = (y_pred - y_true)**2  # 均方误差

plt.plot(x_loss, loss, 'orange', linewidth=2, label='损失函数 L = (y_pred - y_true)²')
plt.axvline(x=2, color='green', linestyle='--', label='最优解 (y_pred = y_true)')
plt.xlabel('预测值 y_pred')
plt.ylabel('损失 L')
plt.title('损失函数与梯度下降')
plt.legend()
plt.grid(True)

plt.tight_layout()
plt.savefig('calculus_visualization.png', dpi=300, bbox_inches='tight')
plt.show()

# 5. 在深度学习中的应用
print("5. 深度学习中的应用")
print("-" * 30)

# 示例1: 神经网络中的反向传播
print("示例1: 神经网络反向传播中的链式法则")

# 简单神经网络: 输入 -> 隐藏层 -> 输出
input_val = 2.0
w1 = 0.5  # 输入到隐藏层的权重
b1 = 0.1  # 隐藏层偏置
w2 = 1.2  # 隐藏层到输出的权重
b2 = 0.3  # 输出层偏置

# 前向传播
hidden = w1 * input_val + b1
hidden_activated = max(0, hidden)  # ReLU激活函数
output = w2 * hidden_activated + b2
target = 3.0  # 目标值
loss = 0.5 * (output - target)**2  # 均方误差损失

print(f"输入: {input_val}")
print(f"隐藏层输出: {hidden_activated:.3f}")
print(f"网络输出: {output:.3f}")
print(f"目标值: {target}")
print(f"损失: {loss:.3f}")

# 反向传播（手动计算梯度）
d_loss_d_output = output - target  # ∂L/∂y_pred
d_output_d_hidden = w2  # ∂y_pred/∂h
d_hidden_d_w1 = input_val  # ∂h/∂w1

# 链式法则计算权重梯度
d_loss_d_w1 = d_loss_d_output * d_output_d_hidden * d_hidden_d_w1
print(f"梯度计算:")
print(f"损失对输出的梯度: ∂L/∂y_pred = {d_loss_d_output:.3f}")
print(f"损失对w1的梯度: ∂L/∂w1 = {d_loss_d_w1:.3f}")

# 示例2: 梯度下降优化
print("示例2: 梯度下降优化过程")

def quadratic_function(x):
    """简单的二次函数，用于演示梯度下降"""
    return (x - 3)**2 + 2

def gradient(x):
    """计算梯度"""
    return 2 * (x - 3)

# 梯度下降过程
current_x = 0.0  # 初始点
learning_rate = 0.1
iterations = 10

print("梯度下降过程:")
print(f"初始点: x = {current_x:.3f}, f(x) = {quadratic_function(current_x):.3f}")

for i in range(iterations):
    grad = gradient(current_x)
    current_x -= learning_rate * grad
    if i % 2 == 0:  # 每2次迭代打印一次
        print(f"迭代 {i+1}: x = {current_x:.3f}, f(x) = {quadratic_function(current_x):.3f}, 梯度 = {grad:.3f}")

print(f"最终结果: x = {current_x:.3f}, f(x) = {quadratic_function(current_x):.3f}")

# 6. 实战练习
print("6. 实战练习")
print("-" * 30)

print("练习1: 计算导数")
print("计算以下函数的导数:")
print("1. f(x) = 3x³ - 2x² + 5x - 1")
print("2. g(x) = e^x * sin(x)")
print("3. h(x) = ln(x² + 1)")

# 验证答案
x_sym = sp.Symbol('x')
f1 = 3*x_sym**3 - 2*x_sym**2 + 5*x_sym - 1
f2 = sp.exp(x_sym) * sp.sin(x_sym)
f3 = sp.log(x_sym**2 + 1)

print("验证答案:")
print(f"1. f'(x) = {sp.diff(f1, x_sym)}")
print(f"2. g'(x) = {sp.diff(f2, x_sym)}")
print(f"3. h'(x) = {sp.diff(f3, x_sym)}")

print("练习2: 梯度下降实现")
print("实现一个简单的梯度下降算法来最小化函数 f(x) = (x-2)² + 3")

# 示例实现
def simple_gradient_descent():
    x = 0.0
    lr = 0.1
    for i in range(20):
        grad = 2 * (x - 2)
        x -= lr * grad
        if i in [0, 4, 9, 14, 19]:
            print(f"迭代 {i+1}: x = {x:.3f}, f(x) = {(x-2)**2+3:.3f}")

simple_gradient_descent()

# print("" + "=" * 50)
print("🎯 学习要点总结:")
print("1. 理解导数的几何意义和计算方法")
print("2. 掌握常见函数的导数公式")
print("3. 理解梯度和多变量函数的偏导数")
print("4. 学会链式法则在复合函数中的应用")
print("5. 重点：梯度下降算法和深度学习中的优化")

print("下一步学习:")
print("- 机器学习常用模型（线性回归、逻辑回归）")
print("- Scikit-learn库使用")
print("- 第一个机器学习项目：泰坦尼克生存预测")

print("📚 推荐资源:")
print("- 《微积分学教程》")
print("- 3Blue1Brown的微积分系列视频")
print("- Coursera: Mathematics for Machine Learning")