import time

import numpy as np
import matplotlib.pyplot as plt
import streamlit as st


def one_sample_code():
    data = np.array([
        [0.8, 1.0],
        [1.7, 0.9],
        [2.7, 2.5],
        [3.2, 2.9],
        [3.7, 2.8],
        [4.1, 3.5],
        [4.5, 3.7],
        [4.9, 4.6],
    ])

    x_data = data[:, 0]
    y_data = data[:, 1]

    # 2. 前向传播 -- 拟合一条线，预测对应y的值
    w = 0.9
    b = 0
    y_predict = w * x_data + b

    # 3. 计算误差  ESM 均方误差
    esm_loss = np.mean(np.square(y_data - y_predict))
    print(esm_loss)

    plt.subplot(121)
    plt.xlim(0, 5)
    plt.ylim(0, 5)
    plt.scatter(x_data, y_data, alpha=0.5, c='b')
    plt.plot([0, 5], [w * 0 + b, w * 5 + b], c='r')

    plt.subplot(122)
    w_values = np.linspace(-2, 4, 1000)
    loss_values = [np.mean((y_data - (w * x_data + b)) ** 2) for w in w_values]
    plt.plot(w_values, loss_values)
    # plt.show()
    st.pyplot(plt)


def sample_code_two():
    # 1. 散点输入
    data = np.array([
        [-0.5, 7.7],
        [1.8, 93.5],
        [0.9, 57.8],
        [0.4, 39.2],
        [-1.4, -15.7],
        [-1.4, -37.3],
        [-1.8, -49.1],
        [1.5, 75.6],
        [0.4, 34],
        [0.8, 62.3]
    ])
    x_data = data[:, 0]
    y_data = data[:, 1]

    # 2. 期望函数（拟合线）
    def f(w, x, b):
        return w * x + b

    # 3. ESM 均方误差损失函数
    def loss_fn(y_true, y_pre):
        return np.mean((y_true - y_pre) ** 2)

    # 4. 超参数
    lr = 0.01  # 学习率 lr=1e-2  lr=2e-5 科学计数法
    Epochs = 400  # 训练轮数

    # 5. 初始化参数
    w = 0
    b = 0

    """
    画图显示 —— 散点图 + 预测线 + 梯度下降图
    """
    fig = plt.figure("show Linear regression", figsize=(12, 6))
    # 散点图 ----------------------------------------------------------
    ax1 = fig.add_subplot(2, 2, 1)
    ax1.scatter(x_data, y_data, color="b")
    # 梯度下降的3D曲面图 ------------------------------------------------
    ax2 = fig.add_subplot(1, 2, 2, projection="3d")
    w_values = np.linspace(-20, 80, 100)
    b_values = np.linspace(-20, 80, 100)
    W, B = np.meshgrid(w_values, b_values)
    # print(w_values.shape)
    # print(W.shape)
    # print(W)    # W 的每一行都是 w_values
    # print(B)    # B 的每一列都是 b_values， 每一行的值是重复值
    loss_values = np.zeros_like(W)
    for i, w_v in enumerate(w_values):
        for j, b_v in enumerate(b_values):
            loss_values[j, i] = loss_fn(y_data, f(w_v, x_data, b_v))
            # 注意这里：我写的是[j, i]
    ax2.plot_surface(W, B, loss_values, cmap="viridis", alpha=0.8)
    # 绘制等高线图 ------------------------------------------------------
    ax3 = fig.add_subplot(2, 2, 3)
    ax3.contour(W, B, loss_values, cmap="viridis")
    ax3.scatter(w, b, color="black", s=20)

    # 6. 循环训练
    # 定义存储沿着梯度下降方向更新参数 w b 的列表
    gd_path = []
    placeholder = st.empty()  # 创建一个占位符，用于更新图像

    for epoch in range(Epochs):
        # 前向传播
        y_pre = f(w, x_data, b)
        # 计算损失
        loss = loss_fn(y_data, y_pre)
        # 反向传播（计算梯度  梯度就是损失参数的偏导数）
        # ESM_loss = np.mean((y_true - y_pre)**2)
        dw = np.mean(-2 * (y_data - y_pre) * x_data)
        db = np.mean(-2 * (y_data - y_pre))
        # 更新参数  w新 = w旧 - 学习率 * 斜率(偏导数)
        w = w - lr * dw
        b = b - lr * db
        # 存储 w 和 b
        gd_path.append((w, b))
        # 打印信息，观察
        if epoch == 0 or (epoch + 1) % 10 == 0:
            # print(f"[{epoch + 1}/{Epochs}] w:{round(w, 3)} b:{round(b, 3)} Loss:{round(loss, 3)}")
            # 更新散点图和拟合线
            ax1.clear()
            ax1.scatter(x_data, y_data, color="b")
            x_min, x_max = x_data.min(), x_data.max()
            y_min, y_max = round(f(w, x_min, b), 3), round(f(w, x_max, b), 3)
            ax1.plot([x_min, x_max], [y_min, y_max], color="r")
            ax1.set_title(f"w:{round(w, 3)} b:{round(b, 3)} Loss:{round(loss, 3)}")
            # 更新3D曲面上的损失点
            ax2.scatter(w, b, loss_fn(y_data, f(w, x_data, b)), color="black", s=20)
            # 更新等高线图
            ax3.clear()
            ax3.contour(W, B, loss_values, cmap="viridis")
            ax3.scatter(w, b, color="black", s=20)
            # 将沿着梯度下降方向的损失点连接起来 绘制损失下降线
            if len(gd_path) > 0:
                gd_w, gd_b = zip(*gd_path)
                loss_path = [loss_fn(y_data, f(w_val, x_data, b_val)) for w_val, b_val in zip(gd_w, gd_b)]
                ax2.plot(gd_w, gd_b, loss_path, color="black")
                ax3.plot(gd_w, gd_b)

            # 暂停一下
            # plt.pause(1)
            # 用 Streamlit 刷新图像，而不是 plt.pause
            placeholder.pyplot(fig)

    # 7. 画图
    # plt.show()
    # st.pyplot(fig)


def sample_code():
    one_sample_code = """
import numpy as np
import matplotlib.pyplot as plt

data = np.array([
    [0.8, 1.0],
    [1.7, 0.9],
    [2.7, 2.5],
    [3.2, 2.9],
    [3.7, 2.8],
    [4.1, 3.5],
    [4.5, 3.7],
    [4.9, 4.6],
])

x_data = data[:, 0]
y_data = data[:, 1]

# 2. 前向传播 -- 拟合一条线，预测对应y的值
w = 0.9
b = 0
y_predict = w * x_data + b

# 3. 计算误差  ESM 均方误差
esm_loss = np.mean(np.square(y_data - y_predict))
print(esm_loss)

plt.subplot(121)
plt.xlim(0, 5)
plt.ylim(0, 5)
plt.scatter(x_data, y_data, alpha=0.5, c='b')
plt.plot([0, 5], [w * 0 + b, w * 5 + b], c='r')

plt.subplot(122)
w_values = np.linspace(-2, 4, 1000)
loss_values = [np.mean((y_data - (w * x_data + b))**2) for w in w_values]
plt.plot(w_values, loss_values)
plt.show()
"""
    two_sample_code = """
import numpy as np
import matplotlib.pyplot as plt
# 1. 散点输入
data = np.array([
    [-0.5, 7.7],
    [1.8, 93.5],
    [0.9, 57.8],
    [0.4, 39.2],
    [-1.4, -15.7],
    [-1.4, -37.3],
    [-1.8, -49.1],
    [1.5, 75.6],
    [0.4, 34],
    [0.8, 62.3]
])
x_data = data[:, 0]
y_data = data[:, 1]


# 2. 期望函数（拟合线）
def f(w, x, b):
    return w * x + b


# 3. ESM 均方误差损失函数
def loss_fn(y_true, y_pre):
    return np.mean((y_true - y_pre) ** 2)


# 4. 超参数
lr = 0.01  # 学习率 lr=1e-2  lr=2e-5 科学计数法
Epochs = 400  # 训练轮数

# 5. 初始化参数
w = 0
b = 0

# 画图显示 —— 散点图 + 预测线 + 梯度下降图
fig = plt.figure("show Linear regression", figsize=(12, 6))
# 散点图 ----------------------------------------------------------
ax1 = fig.add_subplot(2, 2, 1)
ax1.scatter(x_data, y_data, color="b")
# 梯度下降的3D曲面图 ------------------------------------------------
ax2 = fig.add_subplot(1, 2, 2, projection="3d")
w_values = np.linspace(-20, 80, 100)
b_values = np.linspace(-20, 80, 100)
W, B = np.meshgrid(w_values, b_values)
# print(w_values.shape)
# print(W.shape)
# print(W)    # W 的每一行都是 w_values
# print(B)    # B 的每一列都是 b_values， 每一行的值是重复值
loss_values = np.zeros_like(W)
for i, w_v in enumerate(w_values):
    for j, b_v in enumerate(b_values):
        loss_values[j, i] = loss_fn(y_data, f(w_v, x_data, b_v))
        # 注意这里：我写的是[j, i]
ax2.plot_surface(W, B, loss_values, cmap="viridis", alpha=0.8)
# 绘制等高线图 ------------------------------------------------------
ax3 = fig.add_subplot(2, 2, 3)
ax3.contour(W, B, loss_values, cmap="viridis")
ax3.scatter(w, b, color="black", s=20)

# 6. 循环训练
# 定义存储沿着梯度下降方向更新参数 w b 的列表
gd_path = []
for epoch in range(Epochs):
    # 前向传播
    y_pre = f(w, x_data, b)
    # 计算损失
    loss = loss_fn(y_data, y_pre)
    # 反向传播（计算梯度  梯度就是损失参数的偏导数）
    # ESM_loss = np.mean((y_true - y_pre)**2)
    dw = np.mean(-2 * (y_data - y_pre) * x_data)
    db = np.mean(-2 * (y_data - y_pre))
    # 更新参数  w新 = w旧 - 学习率 * 斜率(偏导数)
    w = w - lr * dw
    b = b - lr * db
    # 存储 w 和 b
    gd_path.append((w, b))
    # 打印信息，观察
    if epoch == 0 or (epoch + 1) % 10 == 0:
        print(f"[{epoch + 1}/{Epochs}] w:{round(w, 3)} b:{round(b, 3)} Loss:{round(loss, 3)}")
        # 更新散点图和拟合线
        ax1.clear()
        ax1.scatter(x_data, y_data, color="b")
        x_min, x_max = x_data.min(), x_data.max()
        y_min, y_max = round(f(w, x_min, b), 3), round(f(w, x_max, b), 3)
        ax1.plot([x_min, x_max], [y_min, y_max], color="r")
        ax1.set_title(f"w:{round(w, 3)} b:{round(b, 3)} Loss:{round(loss, 3)}")
        # 更新3D曲面上的损失点
        ax2.scatter(w, b, loss_fn(y_data, f(w, x_data, b)), color="black", s=20)
        # 更新等高线图
        ax3.clear()
        ax3.contour(W, B, loss_values, cmap="viridis")
        ax3.scatter(w, b, color="black", s=20)
        # 将沿着梯度下降方向的损失点连接起来 绘制损失下降线
        if len(gd_path) > 0:
            gd_w, gd_b = zip(*gd_path)
            loss_path = [loss_fn(y_data, f(w_val, x_data, b_val)) for w_val, b_val in zip(gd_w, gd_b)]
            ax2.plot(gd_w, gd_b, loss_path, color="black")
            ax3.plot(gd_w, gd_b)

        # 暂停一下
        plt.pause(1)

# 7. 画图
plt.show()
"""
    return one_sample_code, two_sample_code
