{
 "cells": [
  {
   "cell_type": "code",
   "id": "initial_id",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "ExecuteTime": {
     "end_time": "2025-07-09T12:57:41.208261Z",
     "start_time": "2025-07-09T12:57:41.203295Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "\n",
    "batch_size = 2\n",
    "seq_len = 5\n",
    "d_model = 512 # 词向量的长度\n",
    "vocab_size = 100\n",
    "\n",
    "# 生成两个句子，每个句子有5个词\n",
    "# 随机生成0-100的数字，形状为(batch_size, seq_len)，表示batch_size条sequence，每条sequence的长度为seq_len\n",
    "inputs = torch.randint(0, vocab_size, (batch_size, seq_len))\n",
    "inputs"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[24, 96, 79,  8, 90],\n",
       "        [73, 67, 52, 48, 30]])"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 20
  },
  {
   "cell_type": "code",
   "id": "d75a5f086f4eea9c",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T12:58:59.354834Z",
     "start_time": "2025-07-09T12:58:59.351002Z"
    }
   },
   "source": [
    "# 词嵌入层\n",
    "# 两个句子，每个句子有5个词，每个词的词向量长度为512\n",
    "embedding = nn.Embedding(vocab_size, d_model)\n",
    "embeddings = embedding(inputs)\n",
    "embeddings.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 23
  },
  {
   "cell_type": "code",
   "id": "b78fcbd2c1eeab9e",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T12:59:47.466522Z",
     "start_time": "2025-07-09T12:59:47.457648Z"
    }
   },
   "source": [
    "import math\n",
    "\n",
    "# 位置编码\n",
    "# 位置编码只跟位置有关，和具体位置上输入的数据无关\n",
    "pe = torch.zeros(seq_len, d_model)\n",
    "position = torch.arange(0, seq_len, dtype=torch.float).unsqueeze(1)\n",
    "div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))\n",
    "pe[:, 0::2] = torch.sin(position * div_term)\n",
    "pe[:, 1::2] = torch.cos(position * div_term)\n",
    "pe"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[ 0.0000e+00,  1.0000e+00,  0.0000e+00,  ...,  1.0000e+00,\n",
       "          0.0000e+00,  1.0000e+00],\n",
       "        [ 8.4147e-01,  5.4030e-01,  8.2186e-01,  ...,  1.0000e+00,\n",
       "          1.0366e-04,  1.0000e+00],\n",
       "        [ 9.0930e-01, -4.1615e-01,  9.3641e-01,  ...,  1.0000e+00,\n",
       "          2.0733e-04,  1.0000e+00],\n",
       "        [ 1.4112e-01, -9.8999e-01,  2.4509e-01,  ...,  1.0000e+00,\n",
       "          3.1099e-04,  1.0000e+00],\n",
       "        [-7.5680e-01, -6.5364e-01, -6.5717e-01,  ...,  1.0000e+00,\n",
       "          4.1465e-04,  1.0000e+00]])"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 24
  },
  {
   "cell_type": "code",
   "id": "220f121c2cc2e2ef",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:00:25.085541Z",
     "start_time": "2025-07-09T13:00:25.082288Z"
    }
   },
   "source": [
    "# 位置编码+词向量\n",
    "encoder_inputs = embeddings + pe\n",
    "encoder_inputs.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 25
  },
  {
   "cell_type": "code",
   "id": "b87ee8e199a33508",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:01:56.131996Z",
     "start_time": "2025-07-09T13:01:56.122763Z"
    }
   },
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "\n",
    "\n",
    "class MultiHeadAttention(nn.Module):\n",
    "\n",
    "    def __init__(self, embed_dim: int, attn_dim: int, output_dim: int, num_heads: int):\n",
    "        super().__init__()\n",
    "\n",
    "        self.embed_dim = embed_dim\n",
    "        self.attn_dim = attn_dim\n",
    "        self.output_dim = output_dim\n",
    "        self.num_heads = num_heads\n",
    "        self.head_dim = attn_dim // num_heads # //表示向下取整，attn_dim是head_dim的整数倍\n",
    "\n",
    "        # QKV投影层：从输入维度映射到内部维度\n",
    "        # projection\n",
    "        self.q_proj = nn.Linear(embed_dim, self.attn_dim)\n",
    "        self.k_proj = nn.Linear(embed_dim, self.attn_dim)\n",
    "        self.v_proj = nn.Linear(embed_dim, self.attn_dim)\n",
    "\n",
    "        # 输出投影层：从内部维度映射到输出维度\n",
    "        self.out_proj = nn.Linear(self.attn_dim, self.output_dim)\n",
    "\n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        输入: [batch_size, seq_len, embed_dim]\n",
    "        返回: [batch_size, seq_len, output_dim]\n",
    "        \"\"\"\n",
    "        batch_size, seq_len, embed_dim = x.shape\n",
    "\n",
    "        # 投影到QKV空间\n",
    "        q = self.q_proj(x)  # [batch_size, seq_len, attn_dim]\n",
    "        k = self.k_proj(x)  # [batch_size, seq_len, attn_dim]\n",
    "        v = self.v_proj(x)  # [batch_size, seq_len, attn_dim]\n",
    "\n",
    "        # [batch_size, seq_len, num_heads, head_dim]\n",
    "        # 分割多头 [batch_size, num_heads, seq_len, head_dim]\n",
    "        q = q.view(batch_size, seq_len, self.num_heads, self.head_dim).transpose(1, 2)\n",
    "        k = k.view(batch_size, seq_len, self.num_heads, self.head_dim).transpose(1, 2)\n",
    "        v = v.view(batch_size, seq_len, self.num_heads, self.head_dim).transpose(1, 2)\n",
    "\n",
    "        # 计算注意力得分\n",
    "        # q   [batch_size, num_heads, seq_len, head_dim]\n",
    "        # k.T [batch_size, num_heads, head_dim, seq_len]\n",
    "        # q @ k.T 形状: [batch_size, num_heads, seq_len, seq_len]\n",
    "        attn_scores = torch.matmul(q, k.transpose(-2, -1))\n",
    "\n",
    "        # 缩放因子：防止乘积过大\n",
    "        d_k = k.size(-1)\n",
    "        attn_scores = attn_scores / torch.sqrt(torch.tensor(d_k))\n",
    "\n",
    "        # 计算注意力权重\n",
    "        attn_weights = torch.softmax(attn_scores, dim=-1)\n",
    "\n",
    "        # 计算注意力输出\n",
    "        # attn_weights [batch_size, num_heads, seq_len, seq_len]\n",
    "        # v            [batch_size, num_heads, seq_len, head_dim]\n",
    "        # [batch_size, num_heads, seq_len, head_dim]\n",
    "        attn_out = torch.matmul(attn_weights, v)\n",
    "\n",
    "        # 合并多头 [batch_size, seq_len, attn_dim]\n",
    "\n",
    "        # [batch_size, seq_len, num_heads, head_dim]\n",
    "        attn_out = attn_out.transpose(1, 2).reshape(batch_size, seq_len, self.attn_dim)\n",
    "\n",
    "        # 投影到输出空间\n",
    "        return self.out_proj(attn_out)"
   ],
   "outputs": [],
   "execution_count": 26
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:03:19.157694Z",
     "start_time": "2025-07-09T13:03:19.149515Z"
    }
   },
   "cell_type": "code",
   "source": [
    "attn = MultiHeadAttention(embed_dim=d_model, attn_dim=d_model, output_dim=d_model, num_heads=2)\n",
    "encoder_attn_outputs = attn(encoder_inputs)\n",
    "\n",
    "print(encoder_inputs.shape)\n",
    "print(encoder_attn_outputs.shape)"
   ],
   "id": "eee8fb2ee0dd3cb2",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([2, 5, 512])\n",
      "torch.Size([2, 5, 512])\n"
     ]
    }
   ],
   "execution_count": 27
  },
  {
   "cell_type": "code",
   "id": "7cadc119cd37aaa3",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:05:41.813283Z",
     "start_time": "2025-07-09T13:05:41.806990Z"
    }
   },
   "source": [
    "## 残差连接\n",
    "encoder_attn_add_outputs = encoder_inputs + encoder_attn_outputs\n",
    "encoder_attn_add_outputs.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 28
  },
  {
   "cell_type": "code",
   "id": "fa842128f70b648e",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:07:12.481802Z",
     "start_time": "2025-07-09T13:07:12.474217Z"
    }
   },
   "source": [
    "## 层归一化\n",
    "layer_norm_1 = nn.LayerNorm(d_model)\n",
    "encoder_attn_add_norm_outputs = layer_norm_1(encoder_attn_add_outputs)\n",
    "encoder_attn_add_norm_outputs.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 29
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:09:16.113015Z",
     "start_time": "2025-07-09T13:09:16.099252Z"
    }
   },
   "cell_type": "code",
   "source": "encoder_attn_add_outputs[0][0]",
   "id": "e3280f1cd2f9b1ff",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([ 5.0309e-01,  3.0113e+00, -1.4073e+00,  9.0966e-01,  1.8818e-01,\n",
       "         1.4762e+00, -1.2888e+00,  2.0468e+00, -3.9786e-01,  9.4535e-01,\n",
       "        -3.8544e-01, -1.0160e+00,  5.3504e-02,  1.9697e+00,  3.0542e-01,\n",
       "         2.3454e+00,  1.7809e+00,  1.0295e+00, -1.3140e+00, -7.0677e-02,\n",
       "        -1.0392e+00,  1.0252e+00,  9.3748e-01,  1.0512e+00,  6.4578e-02,\n",
       "         3.2386e-01,  1.6453e-01,  3.3651e-01,  4.3271e-01,  3.6858e+00,\n",
       "         7.1499e-01,  1.5103e+00,  1.0904e+00,  6.8327e-01, -1.5255e+00,\n",
       "         2.0200e+00,  1.4908e+00,  8.5945e-01,  1.0925e+00,  7.6333e-01,\n",
       "         1.4020e+00,  1.4512e+00, -1.5314e-01,  1.8259e+00,  2.5188e-01,\n",
       "         2.6082e-01, -1.7249e+00,  3.5851e-01, -1.0283e+00,  2.6943e+00,\n",
       "         1.2197e+00,  9.4957e-01,  3.8882e-01, -8.2401e-01,  2.5809e-01,\n",
       "         1.4278e+00,  7.1896e-01,  1.5700e+00, -2.1259e+00,  2.5116e+00,\n",
       "         3.9522e-01,  1.5258e+00,  5.3219e-01,  1.3722e+00, -4.4132e-03,\n",
       "         3.8127e-01, -4.9950e-01,  9.5999e-01, -1.7467e+00,  8.7110e-01,\n",
       "        -5.4749e-01, -2.9765e-01, -2.8260e+00,  1.6268e+00,  4.8949e-01,\n",
       "         5.3084e-01, -7.4856e-01,  1.4232e-01, -5.2306e-01,  1.1046e+00,\n",
       "        -5.7760e-01,  1.4276e+00, -2.3090e-01,  6.1347e-01, -1.1583e+00,\n",
       "         1.6947e+00,  1.3823e+00,  1.9138e+00,  9.1169e-01,  7.5913e-01,\n",
       "        -9.3118e-01,  2.4789e+00,  1.6492e+00,  7.9428e-01,  5.4446e-01,\n",
       "         4.7869e-01, -1.3088e+00,  1.0628e+00, -1.0920e+00, -4.9031e-01,\n",
       "        -1.0836e+00, -1.1146e-01, -5.5512e-02,  2.2905e-01,  7.7815e-01,\n",
       "         1.3961e+00,  9.5135e-02,  1.5685e+00,  1.6276e+00,  1.7375e-01,\n",
       "         3.9978e-01,  1.2597e+00, -1.0678e+00,  2.3065e+00,  3.6905e-01,\n",
       "         1.6201e-01, -9.3766e-01,  1.9738e+00,  9.0286e-01,  3.3785e-01,\n",
       "        -1.1734e+00,  1.5739e+00, -1.6398e+00,  5.3454e-01,  6.5886e-01,\n",
       "         4.5211e-01,  1.3216e-01,  3.3099e+00, -1.0918e-01,  9.4354e-01,\n",
       "        -5.4190e-01,  1.7389e+00,  9.1983e-01,  1.0258e+00, -6.6321e-01,\n",
       "         1.6133e+00, -3.3181e-01,  2.1664e+00, -7.4648e-01,  2.5781e+00,\n",
       "         4.6309e-01,  2.8605e+00, -1.1714e+00,  1.8664e+00, -1.6140e-02,\n",
       "        -2.9544e-01,  4.3849e-01,  2.4128e+00,  5.6766e-02,  3.8123e-01,\n",
       "        -7.4722e-01,  8.1156e-01,  3.1595e-01, -5.5618e-01,  1.8115e-01,\n",
       "         7.9704e-01, -1.2489e+00,  7.1678e-01, -1.2936e+00, -5.5806e-01,\n",
       "         2.6481e-01,  2.3528e+00, -1.0316e-01,  8.6078e-01,  8.7030e-01,\n",
       "        -5.5944e-01,  4.0030e-01,  1.2894e+00,  1.4228e+00,  1.1804e+00,\n",
       "        -1.4508e-01, -1.7822e-01,  7.3168e-01,  1.7362e+00, -2.6391e-01,\n",
       "        -1.3850e+00, -1.1091e-02,  1.9919e+00, -1.0848e+00,  1.3879e+00,\n",
       "        -1.6282e+00,  5.1077e-01,  4.7941e-01,  2.6071e-01, -1.6406e+00,\n",
       "         1.4002e-02,  5.8913e-01,  7.8695e-01,  1.5157e+00, -6.9755e-02,\n",
       "        -1.1175e-01,  3.2084e-01, -1.1692e-01, -1.9233e-02,  6.3173e-02,\n",
       "         1.4583e+00,  7.2402e-01,  2.7326e-01, -3.9400e-01,  1.0009e+00,\n",
       "        -6.4477e-01,  6.9619e-01, -2.3103e+00,  9.7220e-01,  2.3174e+00,\n",
       "         7.1832e-03,  1.1981e+00,  1.2749e+00, -1.8925e-01,  8.6516e-01,\n",
       "        -2.0985e+00,  8.2029e-01,  1.5212e+00,  1.5358e+00, -3.5182e-01,\n",
       "         1.6836e+00, -1.3544e+00,  8.3529e-01, -9.0731e-01,  4.8003e+00,\n",
       "        -1.1955e+00,  2.6387e-02,  1.2386e+00,  6.9763e-01,  4.0965e-01,\n",
       "        -1.3957e+00, -1.2072e+00,  2.9659e+00, -6.1860e-01,  5.9781e-01,\n",
       "         1.8665e+00,  2.5789e-01, -7.5452e-01,  7.8815e-01,  1.3601e+00,\n",
       "         1.9465e+00,  8.6838e-01,  2.8568e-01, -3.8017e-01, -1.2553e+00,\n",
       "         4.6768e-02,  1.8557e+00, -1.3961e+00,  1.2241e+00,  1.2111e+00,\n",
       "         1.7423e+00, -9.1500e-01,  2.1659e+00, -4.9706e-01,  1.2262e+00,\n",
       "        -2.1551e-01, -1.0352e+00,  1.2164e+00,  2.2830e+00, -2.4403e+00,\n",
       "         2.4509e+00, -4.2191e-01, -3.3247e-01, -1.8362e+00,  7.4095e-02,\n",
       "        -2.3142e+00,  1.4213e+00,  7.7761e-01,  5.9026e-01,  2.7298e-01,\n",
       "         1.2515e+00,  4.1070e-01,  1.2318e+00,  5.0622e-01,  1.7177e+00,\n",
       "        -2.8465e-01,  5.7281e-01,  3.2765e-02,  3.9912e-01, -7.0554e-02,\n",
       "         1.6586e+00,  1.5368e+00,  1.4661e+00,  8.1235e-01,  6.5536e-01,\n",
       "         1.8011e-01, -2.8633e-01,  1.1794e+00,  1.5311e+00,  1.2628e+00,\n",
       "         8.5361e-01, -9.4193e-01,  1.8697e+00,  9.3611e-01,  1.5448e+00,\n",
       "         2.5147e-01,  6.5428e-02, -2.5229e-02,  1.8941e+00, -1.3850e-01,\n",
       "         2.6496e+00,  7.4141e-01,  4.7261e-01, -6.6254e-01,  9.4609e-01,\n",
       "        -5.0612e-01,  6.0012e-01,  3.0615e-01,  2.3187e+00, -5.7803e-01,\n",
       "         2.1095e-01,  9.9141e-01, -3.9118e-02,  5.3606e-01,  1.3319e+00,\n",
       "         7.1081e-01,  9.1592e-01, -8.4419e-01, -9.0717e-01, -3.3437e-01,\n",
       "         9.7207e-01, -1.5934e+00,  5.1330e-02, -2.9088e-01, -6.5874e-01,\n",
       "        -9.0462e-01, -2.2000e-02, -2.6461e-01,  1.2841e+00, -5.2650e-01,\n",
       "        -7.9023e-02, -1.3590e+00,  3.0225e+00, -1.9828e-02,  1.4039e-01,\n",
       "        -5.0040e-01,  2.1414e+00, -7.3698e-01,  3.9182e-01, -5.9251e-01,\n",
       "        -4.8175e-02,  1.1248e+00, -1.4057e-01,  3.5974e-01, -1.0243e+00,\n",
       "         7.7260e-02,  7.3733e-01, -9.4667e-01,  1.2351e+00, -1.1546e+00,\n",
       "         1.8135e+00,  1.1983e+00, -9.7375e-02, -7.6107e-01, -2.2253e-01,\n",
       "        -9.1805e-01,  3.0576e-01, -5.5742e-02,  5.2007e-01, -4.2086e-01,\n",
       "         3.4130e+00,  2.3944e-01, -1.2045e+00, -9.2789e-01,  3.2239e-01,\n",
       "         3.8202e-01,  2.2064e+00, -1.8190e+00,  2.2897e+00, -1.0273e+00,\n",
       "         3.5852e-01,  7.7021e-01,  5.1983e-01,  1.4701e+00,  2.8580e+00,\n",
       "         1.0123e-01,  3.1556e-01,  1.7599e-01, -3.4999e-01, -8.8646e-01,\n",
       "        -4.1553e-01,  5.9763e-01,  2.0432e+00,  6.1102e-01,  9.9803e-01,\n",
       "         6.0438e-01,  1.4100e+00,  1.2410e+00,  1.9609e+00,  9.0534e-01,\n",
       "         2.6057e-01,  4.6670e-01,  7.4820e-01, -1.2899e+00,  8.1731e-01,\n",
       "        -1.0210e-02,  6.2982e-02, -1.0908e+00, -7.1754e-01,  3.3970e-01,\n",
       "         3.1727e-01,  7.1451e-01,  2.2451e-01, -1.0341e+00,  1.7002e+00,\n",
       "         7.7746e-01,  2.4783e+00,  9.6520e-01, -5.5354e-02, -4.6957e-01,\n",
       "         4.0949e-01, -6.8685e-01,  8.8054e-01,  1.2553e-01,  2.0892e+00,\n",
       "        -2.2310e-04,  9.0910e-01,  1.1847e+00,  3.3912e-01,  1.4248e+00,\n",
       "         6.3996e-01, -1.7209e+00,  2.0554e+00,  1.0555e-01,  1.2852e+00,\n",
       "        -1.3408e+00,  7.5141e-01,  8.9343e-02,  2.6942e+00,  3.3342e-01,\n",
       "         6.3184e-01,  3.2076e-01,  1.1502e+00, -2.0330e+00,  6.2033e-02,\n",
       "        -1.3997e+00, -4.1879e-01, -5.0977e-01,  3.3433e+00, -6.0557e-01,\n",
       "         1.2192e+00, -1.4714e+00,  7.0322e-01,  3.0790e-01,  3.0242e+00,\n",
       "         6.5036e-01, -1.2176e+00, -9.9031e-01,  5.9948e-01,  2.3846e-01,\n",
       "         2.3915e+00,  4.9906e-01,  2.7303e+00,  1.3056e+00, -3.3239e-01,\n",
       "        -1.2462e+00,  9.9074e-01, -4.4968e-02,  4.2475e-01, -7.2399e-01,\n",
       "         2.0180e+00, -1.0145e+00,  2.3442e+00, -1.0158e-01,  2.3106e+00,\n",
       "         1.0221e+00,  7.9205e-01, -2.0158e+00,  5.6392e-01,  5.3930e-01,\n",
       "         1.9545e+00,  8.6837e-01,  2.1304e+00, -1.4330e-01,  2.1417e+00,\n",
       "        -1.0024e+00,  2.8841e+00,  3.2818e-01, -4.7304e-01,  1.6137e-01,\n",
       "        -3.5623e-01,  9.7921e-01,  2.1796e-01, -7.5007e-01,  1.5947e+00,\n",
       "         8.6899e-02,  1.2940e+00,  1.4296e+00,  1.4265e+00, -8.8190e-01,\n",
       "        -3.8092e-01,  2.8097e-01,  1.5005e+00, -3.4778e-01,  2.4577e+00,\n",
       "        -6.5889e-01,  8.2826e-01,  1.3170e+00,  2.0147e+00,  9.1138e-01,\n",
       "         1.9049e+00, -4.2619e-01,  1.0912e+00, -1.0535e+00,  9.5740e-01,\n",
       "         1.8320e+00,  4.9942e-01, -2.2812e+00,  1.6502e+00, -1.5415e+00,\n",
       "        -3.9376e-01,  9.9704e-01,  2.2941e+00, -1.6928e+00,  3.3883e-01,\n",
       "        -3.0625e-01,  2.2351e+00], grad_fn=<SelectBackward0>)"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 32
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:09:34.834294Z",
     "start_time": "2025-07-09T13:09:34.828164Z"
    }
   },
   "cell_type": "code",
   "source": [
    "# 做归一化\n",
    "import numpy as np\n",
    "y = encoder_attn_add_outputs[0][0].detach().numpy()\n",
    "y_hat = (y - np.mean(y)) / np.std(y)\n",
    "y_hat"
   ],
   "id": "7bf9f3938a226b62",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([ 4.57407162e-02,  2.22469163e+00, -1.61388862e+00,  3.98935795e-01,\n",
       "       -2.27838442e-01,  8.91130686e-01, -1.51098633e+00,  1.38679409e+00,\n",
       "       -7.36955881e-01,  4.29945320e-01, -7.26166725e-01, -1.27394545e+00,\n",
       "       -3.44835907e-01,  1.31985974e+00, -1.25987738e-01,  1.64621580e+00,\n",
       "        1.15580940e+00,  5.03022850e-01, -1.53285789e+00, -4.52716798e-01,\n",
       "       -1.29410648e+00,  4.99313384e-01,  4.23106372e-01,  5.21940053e-01,\n",
       "       -3.35215151e-01, -1.09967500e-01, -2.48387590e-01, -9.89793241e-02,\n",
       "       -1.54040512e-02,  2.81065893e+00,  2.29825661e-01,  9.20695484e-01,\n",
       "        5.55941105e-01,  2.02267066e-01, -1.71655083e+00,  1.36355138e+00,\n",
       "        9.03790057e-01,  3.55323911e-01,  5.57817459e-01,  2.71813720e-01,\n",
       "        8.26694608e-01,  8.69380534e-01, -5.24359167e-01,  1.19488990e+00,\n",
       "       -1.72494844e-01, -1.64736256e-01, -1.88979459e+00, -7.98681527e-02,\n",
       "       -1.28461230e+00,  1.94933975e+00,  6.68241262e-01,  4.33611244e-01,\n",
       "       -5.35366833e-02, -1.10716331e+00, -1.67101145e-01,  8.49092066e-01,\n",
       "        2.33268470e-01,  9.72578824e-01, -2.23815203e+00,  1.79060817e+00,\n",
       "       -4.79733497e-02,  9.34186876e-01,  7.10208490e-02,  8.00774872e-01,\n",
       "       -3.95150840e-01, -6.00923449e-02, -8.25251520e-01,  4.42663580e-01,\n",
       "       -1.90871954e+00,  3.65444273e-01, -8.66943717e-01, -6.49898350e-01,\n",
       "       -2.84635711e+00,  1.02190697e+00,  3.39217894e-02,  6.98424205e-02,\n",
       "       -1.04161584e+00, -2.67673701e-01, -8.45721960e-01,  5.68255961e-01,\n",
       "       -8.93101573e-01,  8.48914564e-01, -5.91906786e-01,  1.41623989e-01,\n",
       "       -1.39757121e+00,  1.08097029e+00,  8.09519053e-01,  1.27131057e+00,\n",
       "        4.00703400e-01,  2.68164814e-01, -1.20027232e+00,  1.76220870e+00,\n",
       "        1.04144549e+00,  2.98707396e-01,  8.16786960e-02,  2.45363545e-02,\n",
       "       -1.52836311e+00,  5.31990290e-01, -1.34000564e+00, -8.17265272e-01,\n",
       "       -1.33268917e+00, -4.88147378e-01, -4.39542025e-01, -1.92330241e-01,\n",
       "        2.84689128e-01,  8.21493804e-01, -3.08669090e-01,  9.71282542e-01,\n",
       "        1.02260447e+00, -2.40377247e-01, -4.40138243e-02,  7.02997983e-01,\n",
       "       -1.31893778e+00,  1.61239231e+00, -7.07127303e-02, -2.50574261e-01,\n",
       "       -1.20590007e+00,  1.32340074e+00,  3.93032730e-01, -9.78131369e-02,\n",
       "       -1.41071904e+00,  9.75968421e-01, -1.81584620e+00,  7.30543360e-02,\n",
       "        1.81063324e-01,  1.44887983e-03, -2.76501119e-01,  2.48416162e+00,\n",
       "       -4.86161619e-01,  4.28373933e-01, -8.62083316e-01,  1.11929846e+00,\n",
       "        4.07771379e-01,  4.99798149e-01, -9.67475593e-01,  1.01024377e+00,\n",
       "       -6.79573655e-01,  1.49067712e+00, -1.03981555e+00,  1.84835207e+00,\n",
       "        1.09834429e-02,  2.09367371e+00, -1.40897155e+00,  1.23008335e+00,\n",
       "       -4.05338198e-01, -6.47981167e-01, -1.03807654e-02,  1.70480990e+00,\n",
       "       -3.42002153e-01, -6.01242445e-02, -1.04045951e+00,  3.13712925e-01,\n",
       "       -1.16839781e-01, -8.74492884e-01, -2.33942300e-01,  3.01103026e-01,\n",
       "       -1.47629893e+00,  2.31375262e-01, -1.51513147e+00, -8.76123011e-01,\n",
       "       -1.61266193e-01,  1.65266514e+00, -4.80939329e-01,  3.56479377e-01,\n",
       "        3.64749253e-01, -8.77320409e-01, -4.35635895e-02,  7.28863895e-01,\n",
       "        8.44700754e-01,  6.34122670e-01, -5.17356098e-01, -5.46144366e-01,\n",
       "        2.44321615e-01,  1.11700845e+00, -6.20585859e-01, -1.59447849e+00,\n",
       "       -4.00951684e-01,  1.33913279e+00, -1.33368516e+00,  8.14385653e-01,\n",
       "       -1.80576384e+00,  5.24124689e-02,  2.51643006e-02, -1.64826825e-01,\n",
       "       -1.81653047e+00, -3.79153132e-01,  1.20487198e-01,  2.92333752e-01,\n",
       "        9.25427794e-01, -4.51915473e-01, -4.88399893e-01, -1.12592116e-01,\n",
       "       -4.92891431e-01, -4.08025712e-01, -3.36436272e-01,  8.75557899e-01,\n",
       "        2.37667769e-01, -1.53929263e-01, -7.33603179e-01,  4.78174180e-01,\n",
       "       -9.51453388e-01,  2.13494405e-01, -2.39837408e+00,  4.53272045e-01,\n",
       "        1.62192786e+00, -3.85076582e-01,  6.49488330e-01,  7.16220021e-01,\n",
       "       -5.55723190e-01,  3.60278606e-01, -2.21436930e+00,  3.21301132e-01,\n",
       "        9.30226028e-01,  9.42894816e-01, -6.96960568e-01,  1.07132220e+00,\n",
       "       -1.56796622e+00,  3.34334105e-01, -1.17953575e+00,  3.77889299e+00,\n",
       "       -1.42992306e+00, -3.68393064e-01,  6.84660852e-01,  2.14745119e-01,\n",
       "       -3.54399495e-02, -1.60378814e+00, -1.44005442e+00,  2.18529916e+00,\n",
       "       -9.28714633e-01,  1.28019810e-01,  1.23015380e+00, -1.67278886e-01,\n",
       "       -1.04680097e+00,  2.93375880e-01,  7.90235281e-01,  1.29965413e+00,\n",
       "        3.63080353e-01, -1.43139869e-01, -7.21587002e-01, -1.48188138e+00,\n",
       "       -3.50687742e-01,  1.22077727e+00, -1.60419285e+00,  6.72110736e-01,\n",
       "        6.60799325e-01,  1.12225795e+00, -1.18620992e+00,  1.49027133e+00,\n",
       "       -8.23136032e-01,  6.73904657e-01, -5.78538835e-01, -1.29065371e+00,\n",
       "        6.65394962e-01,  1.59200597e+00, -2.51126790e+00,  1.73785841e+00,\n",
       "       -7.57848203e-01, -6.80143058e-01, -1.98648477e+00, -3.26948017e-01,\n",
       "       -2.40174937e+00,  8.43388319e-01,  2.84220397e-01,  1.21465541e-01,\n",
       "       -1.54168695e-01,  6.95916176e-01, -3.45217213e-02,  6.78781033e-01,\n",
       "        4.84598316e-02,  1.10095406e+00, -6.38603926e-01,  1.06307007e-01,\n",
       "       -3.62852961e-01, -4.45894189e-02, -4.52610284e-01,  1.04957426e+00,\n",
       "        9.43754375e-01,  8.82332802e-01,  3.14403564e-01,  1.78016081e-01,\n",
       "       -2.34848246e-01, -6.40059233e-01,  6.33300483e-01,  9.38794851e-01,\n",
       "        7.05750287e-01,  3.50248247e-01, -1.20961010e+00,  1.23292828e+00,\n",
       "        4.21915144e-01,  9.50727642e-01, -1.72851890e-01, -3.34476620e-01,\n",
       "       -4.13234562e-01,  1.25418866e+00, -5.11635542e-01,  1.91048992e+00,\n",
       "        2.52777100e-01,  1.92566495e-02, -9.66893792e-01,  4.30587202e-01,\n",
       "       -8.31003189e-01,  1.30029052e-01, -1.25355139e-01,  1.62301540e+00,\n",
       "       -8.93475831e-01, -2.08053753e-01,  4.69958305e-01, -4.25300360e-01,\n",
       "        7.43808597e-02,  7.65749574e-01,  2.26193398e-01,  4.04377133e-01,\n",
       "       -1.12469947e+00, -1.17940962e+00, -6.81795537e-01,  4.53154474e-01,\n",
       "       -1.77556610e+00, -3.46724749e-01, -6.44012392e-01, -9.63591218e-01,\n",
       "       -1.17719817e+00, -4.10429329e-01, -6.21194959e-01,  7.24255800e-01,\n",
       "       -8.48707080e-01, -4.59967554e-01, -1.57196915e+00,  2.23443270e+00,\n",
       "       -4.08542246e-01, -2.69354969e-01, -8.26032758e-01,  1.46903825e+00,\n",
       "       -1.03156006e+00, -5.09320311e-02, -9.06056702e-01, -4.33168232e-01,\n",
       "        5.85797548e-01, -5.13434112e-01, -7.87979215e-02, -1.28117418e+00,\n",
       "       -3.24198514e-01,  2.49229401e-01, -1.21372509e+00,  6.81702912e-01,\n",
       "       -1.39434493e+00,  1.18415570e+00,  6.49728537e-01, -4.75910515e-01,\n",
       "       -1.05248344e+00, -5.84636807e-01, -1.18886590e+00, -1.25693560e-01,\n",
       "       -4.39742059e-01,  6.04892448e-02, -7.56932437e-01,  2.57367301e+00,\n",
       "       -1.83310017e-01, -1.43771398e+00, -1.19741094e+00, -1.11247964e-01,\n",
       "       -5.94441779e-02,  1.52548039e+00, -1.97159016e+00,  1.59785163e+00,\n",
       "       -1.28373504e+00, -7.98533931e-02,  2.77795434e-01,  6.02769963e-02,\n",
       "        8.85844171e-01,  2.09153032e+00, -3.03371459e-01, -1.17181063e-01,\n",
       "       -2.38430247e-01, -6.95365429e-01, -1.16141558e+00, -7.52302885e-01,\n",
       "        1.27865657e-01,  1.38368952e+00,  1.39499784e-01,  4.75713640e-01,\n",
       "        1.33730456e-01,  8.33580792e-01,  6.86780155e-01,  1.31219995e+00,\n",
       "        3.95191014e-01, -1.64947346e-01,  1.41209736e-02,  2.58673280e-01,\n",
       "       -1.51192832e+00,  3.18713039e-01, -4.00186926e-01, -3.36602211e-01,\n",
       "       -1.33892918e+00, -1.01466978e+00, -9.62029621e-02, -1.15695626e-01,\n",
       "        2.29403749e-01, -1.96278870e-01, -1.28967226e+00,  1.08568811e+00,\n",
       "        2.84089297e-01,  1.76167178e+00,  4.47189897e-01, -4.39404666e-01,\n",
       "       -7.99246788e-01, -3.55799906e-02, -9.88010347e-01,  3.73642564e-01,\n",
       "       -2.82260090e-01,  1.42364013e+00, -3.91510695e-01,  3.98455471e-01,\n",
       "        6.37909591e-01, -9.67097431e-02,  8.46503258e-01,  1.64645180e-01,\n",
       "       -1.88635445e+00,  1.39428890e+00, -2.99619853e-01,  7.25165606e-01,\n",
       "       -1.55612397e+00,  2.61464953e-01, -3.13701004e-01,  1.94923484e+00,\n",
       "       -1.01660222e-01,  1.57582894e-01, -1.12656429e-01,  6.07897937e-01,\n",
       "       -2.15745974e+00, -3.37426484e-01, -1.60732758e+00, -7.55137265e-01,\n",
       "       -8.34175646e-01,  2.51315689e+00, -9.17399704e-01,  6.67807937e-01,\n",
       "       -1.66957927e+00,  2.19598427e-01, -1.23831205e-01,  2.23593998e+00,\n",
       "        1.73674196e-01, -1.44912648e+00, -1.25163853e+00,  1.29474387e-01,\n",
       "       -1.84158728e-01,  1.68624604e+00,  4.22374047e-02,  1.98062110e+00,\n",
       "        7.42928684e-01, -6.80074513e-01, -1.47392881e+00,  4.69379336e-01,\n",
       "       -4.30382401e-01, -2.23194864e-02, -1.02027333e+00,  1.36179781e+00,\n",
       "       -1.27268231e+00,  1.64518404e+00, -4.79560763e-01,  1.61602318e+00,\n",
       "        4.96658593e-01,  2.96769708e-01, -2.14251995e+00,  9.85859707e-02,\n",
       "        7.71900713e-02,  1.30665541e+00,  3.63068879e-01,  1.45941174e+00,\n",
       "       -5.15804350e-01,  1.46922183e+00, -1.26216781e+00,  2.11420131e+00,\n",
       "       -1.06211163e-01, -8.02266657e-01, -2.51129806e-01, -7.00787842e-01,\n",
       "        4.59356278e-01, -2.01967508e-01, -1.04293537e+00,  9.94082689e-01,\n",
       "       -3.15824121e-01,  7.32837319e-01,  8.50659907e-01,  8.47906888e-01,\n",
       "       -1.15745711e+00, -7.22238660e-01, -1.47227168e-01,  9.12265718e-01,\n",
       "       -6.93450391e-01,  1.74378383e+00, -9.63716924e-01,  3.28226268e-01,\n",
       "        7.52827942e-01,  1.35893214e+00,  4.00432169e-01,  1.26354551e+00,\n",
       "       -7.61565447e-01,  5.56616783e-01, -1.30654526e+00,  4.40410405e-01,\n",
       "        1.20022726e+00,  4.25478294e-02, -2.37311196e+00,  1.04227209e+00,\n",
       "       -1.73049724e+00, -7.33389974e-01,  4.74847555e-01,  1.60162973e+00,\n",
       "       -1.86190712e+00, -9.69646573e-02, -6.57371461e-01,  1.55038822e+00],\n",
       "      dtype=float32)"
      ]
     },
     "execution_count": 33,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 33
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:09:53.051408Z",
     "start_time": "2025-07-09T13:09:53.046747Z"
    }
   },
   "cell_type": "code",
   "source": "encoder_attn_add_norm_outputs[0][0]",
   "id": "8d2717d2b4a8ef82",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([ 4.5741e-02,  2.2247e+00, -1.6139e+00,  3.9893e-01, -2.2784e-01,\n",
       "         8.9113e-01, -1.5110e+00,  1.3868e+00, -7.3695e-01,  4.2994e-01,\n",
       "        -7.2616e-01, -1.2739e+00, -3.4483e-01,  1.3199e+00, -1.2599e-01,\n",
       "         1.6462e+00,  1.1558e+00,  5.0302e-01, -1.5329e+00, -4.5272e-01,\n",
       "        -1.2941e+00,  4.9931e-01,  4.2310e-01,  5.2194e-01, -3.3521e-01,\n",
       "        -1.0997e-01, -2.4839e-01, -9.8979e-02, -1.5404e-02,  2.8106e+00,\n",
       "         2.2982e-01,  9.2069e-01,  5.5594e-01,  2.0227e-01, -1.7165e+00,\n",
       "         1.3635e+00,  9.0379e-01,  3.5532e-01,  5.5782e-01,  2.7181e-01,\n",
       "         8.2669e-01,  8.6938e-01, -5.2436e-01,  1.1949e+00, -1.7249e-01,\n",
       "        -1.6474e-01, -1.8898e+00, -7.9868e-02, -1.2846e+00,  1.9493e+00,\n",
       "         6.6824e-01,  4.3361e-01, -5.3536e-02, -1.1072e+00, -1.6710e-01,\n",
       "         8.4909e-01,  2.3327e-01,  9.7258e-01, -2.2381e+00,  1.7906e+00,\n",
       "        -4.7973e-02,  9.3418e-01,  7.1021e-02,  8.0077e-01, -3.9515e-01,\n",
       "        -6.0092e-02, -8.2525e-01,  4.4266e-01, -1.9087e+00,  3.6544e-01,\n",
       "        -8.6694e-01, -6.4990e-01, -2.8463e+00,  1.0219e+00,  3.3922e-02,\n",
       "         6.9842e-02, -1.0416e+00, -2.6767e-01, -8.4572e-01,  5.6825e-01,\n",
       "        -8.9310e-01,  8.4891e-01, -5.9190e-01,  1.4162e-01, -1.3976e+00,\n",
       "         1.0810e+00,  8.0952e-01,  1.2713e+00,  4.0070e-01,  2.6816e-01,\n",
       "        -1.2003e+00,  1.7622e+00,  1.0414e+00,  2.9871e-01,  8.1678e-02,\n",
       "         2.4536e-02, -1.5284e+00,  5.3199e-01, -1.3400e+00, -8.1726e-01,\n",
       "        -1.3327e+00, -4.8815e-01, -4.3954e-01, -1.9233e-01,  2.8469e-01,\n",
       "         8.2149e-01, -3.0867e-01,  9.7128e-01,  1.0226e+00, -2.4038e-01,\n",
       "        -4.4014e-02,  7.0300e-01, -1.3189e+00,  1.6124e+00, -7.0712e-02,\n",
       "        -2.5057e-01, -1.2059e+00,  1.3234e+00,  3.9303e-01, -9.7813e-02,\n",
       "        -1.4107e+00,  9.7596e-01, -1.8158e+00,  7.3054e-02,  1.8106e-01,\n",
       "         1.4489e-03, -2.7650e-01,  2.4842e+00, -4.8616e-01,  4.2837e-01,\n",
       "        -8.6208e-01,  1.1193e+00,  4.0777e-01,  4.9980e-01, -9.6747e-01,\n",
       "         1.0102e+00, -6.7957e-01,  1.4907e+00, -1.0398e+00,  1.8483e+00,\n",
       "         1.0983e-02,  2.0937e+00, -1.4090e+00,  1.2301e+00, -4.0534e-01,\n",
       "        -6.4798e-01, -1.0381e-02,  1.7048e+00, -3.4200e-01, -6.0124e-02,\n",
       "        -1.0405e+00,  3.1371e-01, -1.1684e-01, -8.7449e-01, -2.3394e-01,\n",
       "         3.0110e-01, -1.4763e+00,  2.3137e-01, -1.5151e+00, -8.7612e-01,\n",
       "        -1.6127e-01,  1.6527e+00, -4.8094e-01,  3.5648e-01,  3.6475e-01,\n",
       "        -8.7732e-01, -4.3563e-02,  7.2886e-01,  8.4470e-01,  6.3412e-01,\n",
       "        -5.1735e-01, -5.4614e-01,  2.4432e-01,  1.1170e+00, -6.2058e-01,\n",
       "        -1.5945e+00, -4.0095e-01,  1.3391e+00, -1.3337e+00,  8.1438e-01,\n",
       "        -1.8058e+00,  5.2412e-02,  2.5164e-02, -1.6483e-01, -1.8165e+00,\n",
       "        -3.7915e-01,  1.2049e-01,  2.9233e-01,  9.2542e-01, -4.5191e-01,\n",
       "        -4.8840e-01, -1.1259e-01, -4.9289e-01, -4.0802e-01, -3.3643e-01,\n",
       "         8.7555e-01,  2.3767e-01, -1.5393e-01, -7.3360e-01,  4.7817e-01,\n",
       "        -9.5145e-01,  2.1349e-01, -2.3984e+00,  4.5327e-01,  1.6219e+00,\n",
       "        -3.8508e-01,  6.4949e-01,  7.1622e-01, -5.5572e-01,  3.6028e-01,\n",
       "        -2.2144e+00,  3.2130e-01,  9.3022e-01,  9.4289e-01, -6.9696e-01,\n",
       "         1.0713e+00, -1.5680e+00,  3.3433e-01, -1.1795e+00,  3.7789e+00,\n",
       "        -1.4299e+00, -3.6839e-01,  6.8466e-01,  2.1474e-01, -3.5440e-02,\n",
       "        -1.6038e+00, -1.4400e+00,  2.1853e+00, -9.2871e-01,  1.2802e-01,\n",
       "         1.2301e+00, -1.6728e-01, -1.0468e+00,  2.9337e-01,  7.9023e-01,\n",
       "         1.2996e+00,  3.6308e-01, -1.4314e-01, -7.2158e-01, -1.4819e+00,\n",
       "        -3.5069e-01,  1.2208e+00, -1.6042e+00,  6.7211e-01,  6.6080e-01,\n",
       "         1.1223e+00, -1.1862e+00,  1.4903e+00, -8.2313e-01,  6.7390e-01,\n",
       "        -5.7854e-01, -1.2906e+00,  6.6539e-01,  1.5920e+00, -2.5113e+00,\n",
       "         1.7379e+00, -7.5785e-01, -6.8014e-01, -1.9865e+00, -3.2695e-01,\n",
       "        -2.4017e+00,  8.4339e-01,  2.8422e-01,  1.2147e-01, -1.5417e-01,\n",
       "         6.9591e-01, -3.4522e-02,  6.7878e-01,  4.8460e-02,  1.1009e+00,\n",
       "        -6.3860e-01,  1.0631e-01, -3.6285e-01, -4.4589e-02, -4.5261e-01,\n",
       "         1.0496e+00,  9.4375e-01,  8.8233e-01,  3.1440e-01,  1.7802e-01,\n",
       "        -2.3485e-01, -6.4006e-01,  6.3330e-01,  9.3879e-01,  7.0575e-01,\n",
       "         3.5025e-01, -1.2096e+00,  1.2329e+00,  4.2191e-01,  9.5072e-01,\n",
       "        -1.7285e-01, -3.3448e-01, -4.1323e-01,  1.2542e+00, -5.1163e-01,\n",
       "         1.9105e+00,  2.5278e-01,  1.9257e-02, -9.6689e-01,  4.3059e-01,\n",
       "        -8.3100e-01,  1.3003e-01, -1.2535e-01,  1.6230e+00, -8.9347e-01,\n",
       "        -2.0805e-01,  4.6996e-01, -4.2530e-01,  7.4381e-02,  7.6575e-01,\n",
       "         2.2619e-01,  4.0438e-01, -1.1247e+00, -1.1794e+00, -6.8179e-01,\n",
       "         4.5315e-01, -1.7756e+00, -3.4672e-01, -6.4401e-01, -9.6359e-01,\n",
       "        -1.1772e+00, -4.1043e-01, -6.2119e-01,  7.2425e-01, -8.4870e-01,\n",
       "        -4.5997e-01, -1.5720e+00,  2.2344e+00, -4.0854e-01, -2.6935e-01,\n",
       "        -8.2603e-01,  1.4690e+00, -1.0316e+00, -5.0932e-02, -9.0605e-01,\n",
       "        -4.3317e-01,  5.8580e-01, -5.1343e-01, -7.8798e-02, -1.2812e+00,\n",
       "        -3.2420e-01,  2.4923e-01, -1.2137e+00,  6.8170e-01, -1.3943e+00,\n",
       "         1.1842e+00,  6.4973e-01, -4.7591e-01, -1.0525e+00, -5.8463e-01,\n",
       "        -1.1889e+00, -1.2569e-01, -4.3974e-01,  6.0489e-02, -7.5693e-01,\n",
       "         2.5737e+00, -1.8331e-01, -1.4377e+00, -1.1974e+00, -1.1125e-01,\n",
       "        -5.9444e-02,  1.5255e+00, -1.9716e+00,  1.5978e+00, -1.2837e+00,\n",
       "        -7.9853e-02,  2.7779e-01,  6.0277e-02,  8.8584e-01,  2.0915e+00,\n",
       "        -3.0337e-01, -1.1718e-01, -2.3843e-01, -6.9536e-01, -1.1614e+00,\n",
       "        -7.5230e-01,  1.2787e-01,  1.3837e+00,  1.3950e-01,  4.7571e-01,\n",
       "         1.3373e-01,  8.3358e-01,  6.8678e-01,  1.3122e+00,  3.9519e-01,\n",
       "        -1.6495e-01,  1.4121e-02,  2.5867e-01, -1.5119e+00,  3.1871e-01,\n",
       "        -4.0019e-01, -3.3660e-01, -1.3389e+00, -1.0147e+00, -9.6203e-02,\n",
       "        -1.1570e-01,  2.2940e-01, -1.9628e-01, -1.2897e+00,  1.0857e+00,\n",
       "         2.8409e-01,  1.7617e+00,  4.4719e-01, -4.3940e-01, -7.9924e-01,\n",
       "        -3.5580e-02, -9.8801e-01,  3.7364e-01, -2.8226e-01,  1.4236e+00,\n",
       "        -3.9151e-01,  3.9845e-01,  6.3791e-01, -9.6709e-02,  8.4650e-01,\n",
       "         1.6464e-01, -1.8863e+00,  1.3943e+00, -2.9962e-01,  7.2516e-01,\n",
       "        -1.5561e+00,  2.6146e-01, -3.1370e-01,  1.9492e+00, -1.0166e-01,\n",
       "         1.5758e-01, -1.1266e-01,  6.0790e-01, -2.1575e+00, -3.3743e-01,\n",
       "        -1.6073e+00, -7.5513e-01, -8.3417e-01,  2.5131e+00, -9.1740e-01,\n",
       "         6.6781e-01, -1.6696e+00,  2.1960e-01, -1.2383e-01,  2.2359e+00,\n",
       "         1.7367e-01, -1.4491e+00, -1.2516e+00,  1.2947e-01, -1.8416e-01,\n",
       "         1.6862e+00,  4.2237e-02,  1.9806e+00,  7.4293e-01, -6.8007e-01,\n",
       "        -1.4739e+00,  4.6938e-01, -4.3038e-01, -2.2319e-02, -1.0203e+00,\n",
       "         1.3618e+00, -1.2727e+00,  1.6452e+00, -4.7956e-01,  1.6160e+00,\n",
       "         4.9666e-01,  2.9677e-01, -2.1425e+00,  9.8586e-02,  7.7190e-02,\n",
       "         1.3067e+00,  3.6307e-01,  1.4594e+00, -5.1580e-01,  1.4692e+00,\n",
       "        -1.2622e+00,  2.1142e+00, -1.0621e-01, -8.0226e-01, -2.5113e-01,\n",
       "        -7.0079e-01,  4.5935e-01, -2.0197e-01, -1.0429e+00,  9.9408e-01,\n",
       "        -3.1582e-01,  7.3283e-01,  8.5066e-01,  8.4790e-01, -1.1575e+00,\n",
       "        -7.2224e-01, -1.4723e-01,  9.1226e-01, -6.9345e-01,  1.7438e+00,\n",
       "        -9.6371e-01,  3.2823e-01,  7.5283e-01,  1.3589e+00,  4.0043e-01,\n",
       "         1.2635e+00, -7.6156e-01,  5.5661e-01, -1.3065e+00,  4.4041e-01,\n",
       "         1.2002e+00,  4.2548e-02, -2.3731e+00,  1.0423e+00, -1.7305e+00,\n",
       "        -7.3339e-01,  4.7485e-01,  1.6016e+00, -1.8619e+00, -9.6964e-02,\n",
       "        -6.5737e-01,  1.5504e+00], grad_fn=<SelectBackward0>)"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 34
  },
  {
   "cell_type": "code",
   "id": "a004b60ec103d66a",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:13:48.735032Z",
     "start_time": "2025-07-09T13:13:48.708020Z"
    }
   },
   "source": [
    "## Feed Forward，两个线性层，最后输出维度不变\n",
    "class FeedForward(nn.Module):\n",
    "    def __init__(self, d_model, d_ff):\n",
    "        super().__init__()\n",
    "        self.fc1 = nn.Linear(d_model, d_ff)\n",
    "        self.fc2 = nn.Linear(d_ff, d_model)\n",
    "\n",
    "    def forward(self, x):\n",
    "        return self.fc2(torch.relu(self.fc1(x)))\n",
    "\n",
    "\n",
    "feed_forward = FeedForward(d_model=d_model, d_ff=2048)\n",
    "\n",
    "encoder_attn_add_norm_feed_forward_outputs = feed_forward(encoder_attn_add_norm_outputs)\n",
    "encoder_attn_add_norm_feed_forward_outputs.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 35,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 35
  },
  {
   "cell_type": "code",
   "id": "1d01b7ff51008594",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:14:27.113230Z",
     "start_time": "2025-07-09T13:14:27.103108Z"
    }
   },
   "source": [
    "# 再来一次残差连接和层归一化，得到最终的Encoder输出\n",
    "layer_norm_2 = nn.LayerNorm(d_model)\n",
    "encoder_outputs = layer_norm_2(encoder_attn_add_norm_feed_forward_outputs + encoder_attn_add_norm_outputs)\n",
    "encoder_outputs.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 36,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 36
  },
  {
   "cell_type": "code",
   "id": "16fba6d2199cee2b",
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-07-09T13:19:32.749407Z",
     "start_time": "2025-07-09T13:19:32.653832Z"
    }
   },
   "source": [
    "# 定义Encoder\n",
    "class Encoder(nn.Module):\n",
    "    def __init__(self, d_model, d_ff, num_heads, num_encoder_layers):\n",
    "        super().__init__()\n",
    "        self.layers = nn.ModuleList([EncoderLayer(d_model, d_ff, num_heads) for _ in range(num_encoder_layers)])\n",
    "\n",
    "    def forward(self, x):\n",
    "        for layer in self.layers:\n",
    "            x = layer(x)\n",
    "        return x\n",
    "\n",
    "\n",
    "# 定义EncoderLayer\n",
    "class EncoderLayer(nn.Module):\n",
    "    def __init__(self, d_model, d_ff, num_heads):\n",
    "        super().__init__()\n",
    "        self.mha = MultiHeadAttention(embed_dim=d_model, attn_dim=d_model, output_dim=d_model, num_heads=num_heads)\n",
    "        self.ff = FeedForward(d_model, d_ff)\n",
    "        self.layer_norm1 = nn.LayerNorm(d_model)\n",
    "        self.layer_norm2 = nn.LayerNorm(d_model)\n",
    "\n",
    "    def forward(self, x):\n",
    "        # 1.多头注意力\n",
    "        # 2.残差连接和层归一化\n",
    "        x = self.layer_norm1(x + self.mha(x))\n",
    "\n",
    "        # 3.Feed Forward\n",
    "        # 4.残差连接和层归一化\n",
    "        return self.layer_norm2(x + self.ff(x))\n",
    "\n",
    "\n",
    "encoder = Encoder(d_model=512, d_ff=2048, num_heads=8, num_encoder_layers=6)\n",
    "\n",
    "encoder_outputs = encoder(encoder_inputs)\n",
    "encoder_outputs.shape"
   ],
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2, 5, 512])"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 37
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
