import torch
import matplotlib.pyplot as plt
from torch import nn

# ==================== 构建数据分布 ===================
x = torch.linspace(0, 2 * torch.pi, 100)
y = torch.sin(x) + 2
y += torch.normal(0, 0.2, y.shape)

plt.plot(torch.arange(0, len(y), 1).numpy(), y.detach().numpy(), "-", label="real")
# ==================== 设计序列模型数据 ===================
input_size = 5
output_size = 5
steps = 100 - (input_size + output_size) + 1
# 定义所选的数据大小
data = torch.zeros((steps, input_size + output_size))  # (91,10)
# 通过顺序关系，将数据取出
for i in range(steps):
    data[i] = y[i:i + (input_size + output_size)]
inputs = data[:, :5].unsqueeze(-1)  # (91,5,1)  nxdxv n=91 d=5 v=1
outputs = data[:, -5:].unsqueeze(-1)  # (91,5,1)

# ==================== 设计RNN ===================
num_steps = 91  # n 数据长度
vocab_size = 1  # v
hidden_size = 10  # h
w_xh = torch.randn((vocab_size, hidden_size), requires_grad=True)  # (1,10)
w_hh = torch.randn((hidden_size, hidden_size), requires_grad=True)  # (10,10)
b_h = torch.randn((hidden_size,), requires_grad=True)  # (10,)

w_hq = torch.randn((hidden_size, vocab_size), requires_grad=True)  # (10,1)
b_q = torch.randn((vocab_size,), requires_grad=True)  # (1,)
# ==================== 训练过程 ===================
# inputs (seq_len,vocab_size)
# outputs (seq_len,vocab_size)
inputs = inputs.transpose(0, 1)  # (5,91,1) 因为5代表输入数据的长度，实际上也就是时间步t
outputs = outputs.transpose(0, 1)  # (5,91,1)

criterion = nn.MSELoss()
optimizer = torch.optim.SGD([w_xh, w_hh, b_h, w_hq, b_q], lr=0.1, momentum=0.9)

epochs = 5000
for epoch in range(epochs):
    optimizer.zero_grad()
    # 隐藏状态设置
    h = torch.zeros((num_steps, hidden_size))  # (91,10)
    # 输出值的列表
    predicts = []
    for x in inputs:  # 循环时间序列维度
        # x 的shape (91,1)
        # 计算当前时刻的隐藏状态
        h = torch.tanh(torch.mm(x, w_xh) + torch.mm(h, w_hh) + b_h)  # (91,10)
        y_hat = torch.mm(h, w_hq) + b_q  # (91,1)
        predicts.append(y_hat)
    predicts = torch.stack(predicts, dim=0)  # (5,91,1)

    loss = criterion(predicts, outputs)
    loss.backward()
    optimizer.step()

    print(f"epoch:{epoch + 1}/{epochs} -- loss:{loss.item():.4f}")
# ==================== 预测结果 ===================
h = torch.zeros((num_steps, hidden_size))  # (91,10)
# 输出值的列表
predicts = []
for x in inputs:  # 循环时间序列维度
    # x 的shape (91,1)
    # 计算当前时刻的隐藏状态
    h = torch.tanh(torch.mm(x, w_xh) + torch.mm(h, w_hh) + b_h)  # (91,10)
    y_hat = torch.mm(h, w_hq) + b_q  # (91,1)
    predicts.append(y_hat)
# 维度要进行还原
predicts = torch.stack(predicts, dim=0).squeeze(-1).permute([1, 0])  # (91,5)
predicts = predicts[:, -1]
plt.plot(torch.arange(0, len(predicts), 1).numpy(), predicts.detach().numpy(), "-", label="predict")
plt.show()
