{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import mataplotlib.pyplot as plt\n",
    "from tqdm import tqdm\n",
    "# Function to generate a dataset based on a sine wave\n",
    "def generate_dataset(size=280, timesteps=25):\n",
    "    # Initialize the sine wave\n",
    "    sin_wave = np.sin(np.arange(size))\n",
    "\n",
    "    # Initialize lists to store the x and y values\n",
    "    x, y = [], []\n",
    "\n",
    "    # Loop through the sine wave to generate the dataset\n",
    "    for step in range(len(sin_wave) - timesteps):\n",
    "        # Append the timesteps of sine wave to x\n",
    "        x.append(sin_wave[step:step + timesteps])\n",
    "\n",
    "        # Append the next value in the sine wave to y\n",
    "        y.append(sin_wave[step + timesteps])\n",
    "\n",
    "    # Convert the lists to numpy arrays and reshape them\n",
    "    return np.array(x).reshape(len(y), timesteps, 1), np.array(y).reshape(len(y), 1)\n",
    "\n",
    "\n",
    "# Test generate_dataset\n",
    "x,y = generate_dataset()\n",
    "print(x.shape, y.shape)\n",
    "plt.plot(x[-1])\n",
    "plt.plot(y)\n",
    "\n",
    "#Simple RNN class\n",
    "class SimpleRNN():\n",
    "    def __init__(self, x,y,hidden_units):\n",
    "        self.x = x\n",
    "        self.y = y\n",
    "        self.hidden_units = hidden_units\n",
    "        # 随机初始化权重\n",
    "        self.Wx = np.random.randn(self.hidden_units,self.x.shape[2])\n",
    "        self.Wh = np.random.randn(self.hidden_units,self.hidden_units)\n",
    "        self.Wy = np.random.randn(self.y.shape[1],self.hidden_units)\n",
    "        print(\"RNN init finished,The Wx Wh Wy is:\")\n",
    "        print(self.Wx,self.Wh,self.Wy)\n",
    "        # 预计结果为: \n",
    "    # Function for a single RNN cell\n",
    "    def __call__(self, xt, ht_1):\n",
    "        print(\"Check parameter shape\")\n",
    "        print(xt.shape,ht_1.shape)\n",
    "        # 计算隐藏状态\n",
    "        ht = np.tanh(np.dot(self.Wx,xt.reshape(1,1))+np.dot(self.Wh,ht_1))\n",
    "        print(\"The ht Shape is :\"+ht.shape)\n",
    "        # 计算输出\n",
    "        yt = np.dot(self.Wy,ht)\n",
    "        print(\"The yt Shape is :\"+yt.shape)\n",
    "        return ht,yt\n",
    "    # Forward pass through the network \n",
    "    def forward(self,sample):\n",
    "        sample_x,sample_y = self.x[sample],self.y[sample]\n",
    "        print(\"Confirm the shape:\",sample_x,sample_y)\n",
    "        ht = np.zeros((self.hidden_units,1))\n",
    "        self.hidden_states = [ht]\n",
    "        self.inputs = []\n",
    "        for step in range(len(sample_x)):\n",
    "            ht,yt = self.cell(sample_x[step],ht)\n",
    "            self.inputs.append(sample_x[step].reshape(1,1))\n",
    "            self.hidden_states.append(ht)\n",
    "        print(self.hidden_states,self.hidden_states[-1].shape)\n",
    "        self.error = yt-sample_y\n",
    "        self.loss = 0.5*self.error**2\n",
    "        self.yt =yt\n",
    "    # Backward pass through network (backpropagation)\n",
    "    def backward(self):\n",
    "        n = len(self.inputs)\n",
    "        # Compute the gradient of the output\n",
    "        dyt = self.error\n",
    "        # Compute the gradient of the output weights\n",
    "        dwy = np.dot(dyt, self.hidden_states[-1].T)\n",
    "        # Compute the gradient of the hidden state\n",
    "        dht = np.dot(dyt, self.Wy).T\n",
    "        dwx = np.zeros(self.Wx.shape)\n",
    "        dwh = np.zeros(self.Wh.shape)\n",
    "        # Backpropagate through time\n",
    "        for step in reversed(range(n)):\n",
    "            temp = (1 - self.hidden_states[step + 1] ** 2) * dht\n",
    "            dwx += np.dot(temp, self.inputs[step].T)\n",
    "            dwh += np.dot(temp, self.hidden_states[step].T)\n",
    "            dht = np.dot(self.Wh, temp)\n",
    "        # Clip to prevent exploding gradients\n",
    "        dwy = np.clip(dwy, -1, 1)\n",
    "        dWx = np.clip(dwx, -1, 1)\n",
    "        dWh = np.clip(dwh, -1, 1)\n",
    "        # Update weights\n",
    "        self.Wy -= self.lr * dwy\n",
    "        self.Wx -= self.lr * dWx\n",
    "        self.Wh -= self.lr * dWh\n",
    "    # Training function\n",
    "    def train(self, epochs, learning_rate):\n",
    "        self.Ovr_loss = []\n",
    "        self.lr = learning_rate\n",
    "        for epoch in tqdm(range(epochs)):\n",
    "            for sample in range(self.x.shape[0]):  # 修正了索引 e 为 0\n",
    "                self.forward(sample)\n",
    "                self.backward()\n",
    "\n",
    "            self.Ovr_loss.append(np.squeeze(self.loss / self.x.shape[0]))\n",
    "            self.loss = 0  # 可能需要重置 loss 的值\n",
    "\n",
    "    # Testing function\n",
    "    def test(self, X, y):\n",
    "        self.X = X\n",
    "        self.y = y\n",
    "        self.outputs = []\n",
    "\n",
    "        for sample in range(len(X)):\n",
    "            self.forward(sample)\n",
    "            self.outputs.append(self.yt)\n",
    "          \n",
    "# Generate dataset \n",
    "x , y =generate_dataset()\n",
    "x_test ,y_test = generate_dataset(300)\n",
    "x_test = x_test[250:]\n",
    "y_test = y_test[250:]\n",
    "\n",
    "# Create SimpleRNN instance and train \n",
    "rnn = SimpleRNN(x,y,100)\n",
    "rnn.train(25,1e-2)\n",
    "\n",
    "# Test the trained SimpleRNN\n",
    "rnn.test(x_test,y_test)\n",
    "# Plot the results\n",
    "plt.tight_layout()\n",
    "plt.figure(dpi=120)\n",
    "plt.subplot(121)\n",
    "plt.plot(rnn.Ovr_loss)\n",
    "plt.subplot(122)\n",
    "plt.plot([i for i in range(len(x_test))],y_test,np.array(rnn.outputs).reshape(y_test.shape))\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from tqdm import tqdm\n",
    "\n",
    "shape = 30\n",
    "arangeResult = np.arange(shape)\n",
    "print(arangeResult) # 现在这是实数x轴\n",
    "\n",
    "# 对a中元素取正弦值。a可以是ndarray数据也可以是单个数据。也可以返回ndarray\n",
    "sin_wave = np.sin(np.arange(shape))  # 正弦函数值即实数y轴\n",
    "print(sin_wave)\n",
    "\n",
    "# 绘制图像\n",
    "plt.plot(arangeResult, sin_wave)\n",
    "plt.title('Sine Wave')\n",
    "plt.xlabel('X Axis')\n",
    "plt.ylabel('Y Axis')\n",
    "plt.grid(True)\n",
    "plt.show()\n",
    "\n",
    "# Function to generate a dataset based on a sine wave\n",
    "def generate_dataset(size=200, timesteps=25):\n",
    "    # Initialize the sine wave\n",
    "    sin_wave = np.sin(np.arange(size))\n",
    "    # Initialize lists to store the x and y values\n",
    "    x, y = [], []\n",
    "\n",
    "    # Loop through the sine wave to generate the dataset\n",
    "    for step in range(len(sin_wave) - timesteps):\n",
    "        # Append the timesteps of sine wave to x\n",
    "        x.append(sin_wave[step:step + timesteps])\n",
    "\n",
    "        # Append the next value in the sine wave to y\n",
    "        y.append(sin_wave[step + timesteps])\n",
    "\n",
    "    # Convert the lists to numpy arrays and reshape them\n",
    "    return np.array(x).reshape(len(y), timesteps,1), np.array(y).reshape(len(y), 1)\n",
    "\n",
    "print(x)\n",
    "\n",
    "# about reshape\n",
    "original = np.array([[2, 4, 1, 3],\n",
    "                     [1, 2, 5, 2]])\n",
    "print(original.shape)\n",
    "after = original.reshape(4, 2)\n",
    "print(after)\n",
    "\n",
    "# about the third parameter\n",
    "thirdDimension = after.reshape(4,2,1) # 似乎在列的角度上继续拆分了\n",
    "print(thirdDimension)\n",
    "\n",
    "# Test generate_dataset\n",
    "x,y = generate_dataset(50)\n",
    "print(x.shape, y.shape)\n",
    "# by default the index would be x axis\n",
    "plt.plot(x[1],color='red', label='x Data') # it means each item in the x is a\n",
    "plt.plot(y, color='blue', label='y Data')\n",
    "\n",
    "\n",
    "#Simple RNN class\n",
    "class SimpleRNN():\n",
    "    def __init__(self, x,y,hidden_units):\n",
    "        # TODO check the data is correct \n",
    "        self.x = x\n",
    "        self.y = y\n",
    "        self.hidden_units = hidden_units\n",
    "        # 随机初始化权重Matrix\n",
    "        self.Wx = np.random.randn(self.hidden_units,self.x.shape[2])\n",
    "        self.Wh = np.random.randn(self.hidden_units,self.hidden_units)\n",
    "        self.Wy = np.random.randn(self.y.shape[1],self.hidden_units)\n",
    "      \n",
    "    def checkDataFormat(self):\n",
    "        # print(\"RNN init finished,The Wx Wh Wy is:\"self.Wx.shape,self.Wh.shape,self.Wy.shape)\n",
    "        # if fails then print the result \n",
    "        pass \n",
    "\n",
    "    def getInitResult(self):\n",
    "        return self.Wx,self.Wh,self.Wy\n",
    "    # Function for a single RNN cell\n",
    "    def cell(self, xt, ht_1):\n",
    "        # print(\"Check parameter shape\")\n",
    "        # print(xt.shape,ht_1.shape)\n",
    "        # 计算隐藏状态\n",
    "        ht = np.tanh(np.dot(self.Wx,xt.reshape(1,1))+np.dot(self.Wh,ht_1))\n",
    "        # print(\"The ht Shape is :\"+ht.shape)\n",
    "        # 计算输出\n",
    "        yt = np.dot(self.Wy,ht)\n",
    "        # print(\"The yt Shape is :\"+yt.shape)\n",
    "        return ht,yt\n",
    "    # Forward pass through the network \n",
    "    def forward(self,sample):\n",
    "        sample_x,sample_y = self.x[sample],self.y[sample]\n",
    "        # print(\"Confirm the shape:\",sample_x,sample_y)\n",
    "        ht = np.zeros((self.hidden_units,1))\n",
    "        self.hidden_states = [ht]\n",
    "        self.inputs = []\n",
    "        for step in range(len(sample_x)):\n",
    "            ht,yt = self.cell(sample_x[step],ht)\n",
    "            self.inputs.append(sample_x[step].reshape(1,1))\n",
    "            self.hidden_states.append(ht)\n",
    "        # print(self.hidden_states,self.hidden_states[-1].shape)\n",
    "        self.error = yt-sample_y\n",
    "        self.loss = 0.5*self.error**2\n",
    "        self.yt =yt\n",
    "    # Backward pass through network (backpropagation)\n",
    "    def backward(self):\n",
    "        # 获取输入序列的长度 \n",
    "        n = len(self.inputs)\n",
    "        # Compute the gradient of the output\n",
    "        dyt = self.error\n",
    "        # Compute the gradient of the output weights\n",
    "        dwy = np.dot(dyt, self.hidden_states[-1].T)\n",
    "        # Compute the gradient of the hidden state\n",
    "        dht = np.dot(dyt, self.Wy).T\n",
    "        dwx = np.zeros(self.Wx.shape)\n",
    "        dwh = np.zeros(self.Wh.shape)\n",
    "        # Backpropagate through time\n",
    "        for step in reversed(range(n)):\n",
    "            temp = (1 - self.hidden_states[step + 1] ** 2) * dht\n",
    "            dwx += np.dot(temp, self.inputs[step].T)\n",
    "            dwh += np.dot(temp, self.hidden_states[step].T)\n",
    "            dht = np.dot(self.Wh, temp)\n",
    "        # Clip to prevent exploding gradients\n",
    "        dwy = np.clip(dwy, -1, 1)\n",
    "        dWx = np.clip(dwx, -1, 1)\n",
    "        dWh = np.clip(dwh, -1, 1)\n",
    "        # Update weights\n",
    "        self.Wy -= self.lr * dwy\n",
    "        self.Wx -= self.lr * dWx\n",
    "        self.Wh -= self.lr * dWh\n",
    "    # Training function\n",
    "    def train(self, epochs, learning_rate):\n",
    "        self.Ovr_loss = []\n",
    "        self.lr = learning_rate\n",
    "        for epoch in tqdm(range(epochs)):\n",
    "            for sample in range(self.x.shape[0]):  # 修正了索引 e 为 0\n",
    "                self.forward(sample)\n",
    "                self.backward()\n",
    "\n",
    "            self.Ovr_loss.append(np.squeeze(self.loss / self.x.shape[0]))\n",
    "            self.loss = 0  # 可能需要重置 loss 的值\n",
    "\n",
    "    # Testing function\n",
    "    def test(self, x, y):\n",
    "        self.x = x\n",
    "        self.y = y\n",
    "        self.outputs = []\n",
    "        for sample in range(len(x)):\n",
    "            self.forward(sample)\n",
    "            self.outputs.append(self.yt)\n",
    "          \n",
    "          \n",
    "# Generate dataset \n",
    "x , y =generate_dataset()\n",
    "x_test ,y_test = generate_dataset(300)\n",
    "x_test = x_test[250:]\n",
    "y_test = y_test[250:]\n",
    "\n",
    "# Create SimpleRNN instance\n",
    "rnn = SimpleRNN(x,y,100)\n",
    "\n",
    "# train \n",
    "rnn.train(25,1e-2)\n",
    "\n",
    "# Test the trained SimpleRNN\n",
    "rnn.test(x_test,y_test)\n",
    "\n",
    "# Plot the results\n",
    "plt.tight_layout()\n",
    "plt.figure(dpi=120)\n",
    "plt.subplot(121)\n",
    "plt.plot(rnn.Ovr_loss)\n",
    "plt.subplot(122)\n",
    "plt.plot([i for i in range(len(x_test))],y_test,np.array(rnn.outputs).reshape(y_test.shape))\n"
   ]
  }
 ],
 "metadata": {
  "language_info": {
   "name": "python"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
