import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from matplotlib.patches import Circle
from matplotlib.lines import Line2D

# 设置随机种子以便结果可重现
np.random.seed(42)


# 定义目标函数：一个简单的二次函数 f(x) = (x-2)^2 + 1
def objective_function(x):
    return (x - 2) ** 2 + 1


# 定义目标函数的梯度
def gradient(x):
    return 2 * (x - 2)


# 定义不同的学习率调度器
class LearningRateScheduler:
    def __init__(self, initial_lr):
        self.initial_lr = initial_lr

    def get_lr(self, epoch):
        pass


class ConstantLR(LearningRateScheduler):
    def get_lr(self, epoch):
        return self.initial_lr


class TimeDecayLR(LearningRateScheduler):
    def __init__(self, initial_lr, decay_rate):
        super().__init__(initial_lr)
        self.decay_rate = decay_rate

    def get_lr(self, epoch):
        return self.initial_lr / (1 + self.decay_rate * epoch)


class StepDecayLR(LearningRateScheduler):
    def __init__(self, initial_lr, drop_factor, epochs_drop):
        super().__init__(initial_lr)
        self.drop_factor = drop_factor
        self.epochs_drop = epochs_drop

    def get_lr(self, epoch):
        return self.initial_lr * (self.drop_factor ** np.floor(epoch / self.epochs_drop))


class ExponentialDecayLR(LearningRateScheduler):
    def __init__(self, initial_lr, decay_rate):
        super().__init__(initial_lr)
        self.decay_rate = decay_rate

    def get_lr(self, epoch):
        return self.initial_lr * np.exp(-self.decay_rate * epoch)


# 梯度下降优化过程
def gradient_descent(initial_x, scheduler, epochs=100):
    x = initial_x
    history = [x]
    lr_history = [scheduler.get_lr(0)]

    for epoch in range(1, epochs + 1):
        lr = scheduler.get_lr(epoch)
        grad = gradient(x)
        x = x - lr * grad
        history.append(x)
        lr_history.append(lr)

    return np.array(history), np.array(lr_history)


# 实验参数
initial_x = 10.0
initial_lr = 0.1
epochs = 50

# 定义不同的学习率调度器
schedulers = {
    "Constant LR": ConstantLR(initial_lr),
    "Time Decay": TimeDecayLR(initial_lr, decay_rate=0.1),
    "Step Decay": StepDecayLR(initial_lr, drop_factor=0.5, epochs_drop=10),
    "Exponential Decay": ExponentialDecayLR(initial_lr, decay_rate=0.1)
}

# 运行实验
results = {}
lr_histories = {}

for name, scheduler in schedulers.items():
    history, lr_history = gradient_descent(initial_x, scheduler, epochs)
    results[name] = history
    lr_histories[name] = lr_history

# 创建图形
plt.figure(figsize=(14, 6))

# 可视化学习率变化
ax1 = plt.subplot(1, 2, 1)
lr_lines = []
for name, lr_history in lr_histories.items():
    line, = ax1.plot(lr_history, label=name)
    lr_lines.append(line)
ax1.set_xlabel('Epoch')
ax1.set_ylabel('Learning Rate')
ax1.set_title('Learning Rate Schedule')
ax1.legend()
ax1.grid(True)

# 可视化优化过程
ax2 = plt.subplot(1, 2, 2)
x_range = np.linspace(-2, 12, 100)
ax2.plot(x_range, objective_function(x_range), 'k-', label='Objective Function')

# 绘制初始点
ax2.plot(initial_x, objective_function(initial_x), 'ro', markersize=8, label='Initial Point')

# 为每个优化器创建线条和点
colors = ['g', 'b', 'c', 'm']
path_lines = []
step_points = []
step_circles = []
labels = list(results.keys())

for i, (name, history) in enumerate(results.items()):
    # 路径线条
    line, = ax2.plot([], [], colors[i] + '-', linewidth=1.5, label=name)
    path_lines.append(line)

    # 离散的步骤点
    points, = ax2.plot([], [], colors[i] + 'o', markersize=4)
    step_points.append(points)

    # 当前位置的圆标记（动画用）
    circle = Circle((history[0], objective_function(history[0])), 0.15,
                    color=colors[i], alpha=0.7, label=name if i == 0 else "")
    ax2.add_patch(circle)
    step_circles.append(circle)

ax2.set_xlabel('x')
ax2.set_ylabel('f(x)')
ax2.set_title('Optimization Path with Steps')
ax2.legend()
ax2.grid(True)

# 添加当前迭代的文本显示
iteration_text = ax2.text(0.02, 0.95, '', transform=ax2.transAxes)


# 初始化函数
def init():
    for line in path_lines:
        line.set_data([], [])
    for points in step_points:
        points.set_data([], [])
    for circle in step_circles:
        circle.center = (initial_x, objective_function(initial_x))
    iteration_text.set_text('')
    return path_lines + step_points + step_circles + [iteration_text]


# 更新函数
def update(frame):
    for i, (name, history) in enumerate(results.items()):
        # 更新路径线条
        path_lines[i].set_data(history[:frame + 1], objective_function(history[:frame + 1]))

        # 更新步骤点
        step_points[i].set_data(history[:frame + 1], objective_function(history[:frame + 1]))

        # 更新当前位置的圆
        step_circles[i].center = (history[frame], objective_function(history[frame]))

    # 更新迭代文本
    iteration_text.set_text(f'Epoch: {frame}')

    return path_lines + step_points + step_circles + [iteration_text]


# 创建动画
ani = FuncAnimation(plt.gcf(), update, frames=range(epochs + 1),
                    init_func=init, blit=True, interval=200, repeat=True)

plt.tight_layout()
plt.show()

# 打印最终结果
print("\n最终优化结果:")
for name, history in results.items():
    final_x = history[-1]
    final_loss = objective_function(final_x)
    print(f"{name}: x = {final_x:.6f}, f(x) = {final_loss:.6f}")