{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "3a800397",
   "metadata": {},
   "source": [
    "# Attention\n",
    "\n",
    "注意力机制\n",
    "\n",
    "tf=2.9.1\n",
    "\n",
    "\n",
    "**call 方法**\n",
    "```python\n",
    "\n",
    "# call 方法参数\n",
    "call(self,\n",
    "   inputs,\n",
    "   mask=None,\n",
    "   training=None,\n",
    "   return_attention_scores=False)\n",
    "\n",
    "```\n",
    "**call 参数**\n",
    "\n",
    "- inputs:\n",
    "    - query: [batch_size, Tq, dim]\n",
    "    - value: [batch_size, Tv, dim]\n",
    "    - key:   [batch_size, Tv, dim] 可选，若不存在，则直接使用value\n",
    "- mask:\n",
    "    - query_mask\n",
    "    - value_mask\n",
    "- return_attention_scores\n",
    "\n",
    "**返回结果**\n",
    "- Attention outputs of shape `[batch_size, Tq, dim]`.\n",
    "- [Optional] Attention scores after masking and softmax with shape\n",
    "    - [batch_size, Tq, Tv]\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "1b8d2095",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'2.9.1'"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "tf.__version__"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "9996b5a5",
   "metadata": {},
   "source": [
    "## Attention\n",
    "\n",
    "点积attention\n",
    "\n",
    "\n",
    "```python\n",
    "tf.keras.layers.Attention(use_scale=True, dropout=0)\n",
    "```\n",
    "\n",
    "\n",
    "**计算过程**\n",
    "\n",
    "- k, q, v\n",
    "    - k: [batch_size, Tq, dim]\n",
    "    - v: [batch_size, Tv, dim]\n",
    "    - q: [batch_size, Tv, dim]\n",
    "- 计算得分：scores = tf.matmul(query, key, transpose_b=True)\n",
    "    - [batch_size, Tq, Tv]\n",
    "- 得分用softmax进行处理：distribution = tf.nn.softmax(scores)\n",
    "    - [batch_size, Tq, dim]\n",
    "- 计算att: tf.matmul(distribution, value)\n",
    "    - [batch_size, Tq, dim]\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "c1c380ce",
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "\n",
    "q = np.random.random(size=(32, 1, 128))  # [batch_size, Tq, dim]\n",
    "v = np.random.random(size=(32, 64, 128))  # [batch_size, Tv, dim]\n",
    "k = np.random.random(size=(32, 64, 128))  #  [batch_size, Tv, dim]\n",
    "\n",
    "attention = tf.keras.layers.Attention(use_scale=False, dropout=0)\n",
    "\n",
    "# att: [batch_size, Tq, dim]\n",
    "att = attention([q, v, k])\n",
    "# att: [batch_size, Tq, dim]  sorce: [batch_size, Tq, Tv]\n",
    "att, score = attention([q, v, k], return_attention_scores=True)\n",
    "\n",
    "\n",
    "# add mask\n",
    "q_mask = tf.constant(np.random.randint(0, 2, (32, 1)), dtype=tf.bool)\n",
    "v_mask = tf.constant(np.random.randint(0, 2, (32, 64)), dtype=tf.bool)\n",
    "\n",
    "# att: [batch_size, Tq, dim]\n",
    "att = attention(inputs=[q, v, k], mask=[q_mask, v_mask]) "
   ]
  },
  {
   "cell_type": "markdown",
   "id": "9363615f",
   "metadata": {},
   "source": [
    "## AdditiveAttention\n",
    "\n",
    "加性注意力 \n",
    "\n",
    "```python\n",
    "\n",
    "tf.keras.layers.AdditiveAttention(*args, **kwargs)\n",
    "\n",
    "# 核心代码\n",
    "tf.reduce_sum(scale * tf.tanh(q_reshaped + k_reshaped), axis=-1)\n",
    "```\n",
    "\n",
    "**计算过程**\n",
    "\n",
    "- k, q, v\n",
    "    - k: [batch_size, Tq, dim]\n",
    "    - v: [batch_size, Tv, dim]\n",
    "    - q: [batch_size, Tv, dim]\n",
    "- 计算得分：scores = tf.reduce_sum(tf.tanh(query + value), axis=-1)\n",
    "    - [batch_size, Tq, Tv]\n",
    "- 得分用softmax进行处理：distribution = tf.nn.softmax(scores)\n",
    "    - [batch_size, Tq, dim]\n",
    "- 计算att: tf.matmul(distribution, value)\n",
    "    - [batch_size, Tq, dim]\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "103cb2e3",
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "\n",
    "q = np.random.random(size=(32, 1, 128))  # [batch_size, Tq, dim]\n",
    "v = np.random.random(size=(32, 64, 128))  # [batch_size, Tv, dim]\n",
    "k = np.random.random(size=(32, 64, 128))  #  [batch_size, Tv, dim]\n",
    "\n",
    "attention = tf.keras.layers.AdditiveAttention(use_scale=False, dropout=0)\n",
    "\n",
    "# att: [batch_size, Tq, dim]\n",
    "att = attention([q, v, k])\n",
    "# att: [batch_size, Tq, dim]  sorce: [batch_size, Tq, Tv]\n",
    "att, score = attention([q, v, k], return_attention_scores=True)\n",
    "\n",
    "# add mask\n",
    "q_mask = tf.constant(np.random.randint(0, 2, (32, 1)), dtype=tf.bool)\n",
    "v_mask = tf.constant(np.random.randint(0, 2, (32, 64)), dtype=tf.bool)\n",
    "\n",
    "# att: [batch_size, Tq, dim]\n",
    "att = attention(inputs=[q, v, k], mask=[q_mask, v_mask]) "
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b0575f5f",
   "metadata": {},
   "source": [
    "# MultiHeadAttention  存疑\n",
    "\n",
    "多头注意力机制\n",
    "\n",
    "\n",
    "```python\n",
    "tf.keras.layers.MultiHeadAttention(\n",
    "    num_heads,   # Number of attention heads\n",
    "    key_dim,   # Size of each attention head for query and key.\n",
    "    value_dim=None, # Size of each attention head for value.\n",
    "    dropout=0.0,\n",
    "    use_bias=True,\n",
    "    output_shape=None,\n",
    "    attention_axes=None,\n",
    "    kernel_initializer='glorot_uniform',\n",
    "    bias_initializer='zeros',\n",
    "    kernel_regularizer=None,\n",
    "    bias_regularizer=None,\n",
    "    activity_regularizer=None,\n",
    "    kernel_constraint=None,\n",
    "    bias_constraint=None,\n",
    "    **kwargs\n",
    ")\n",
    "```\n",
    "\n",
    "**Args**\n",
    "- num_heads  # 注意头的数量\n",
    "- key_dim,   # 查询和键的每个注意力头的大小\n",
    "- value_dim=None, # 每个注意力头的value大小\n",
    "- dropout=0.0,  # Dropout probability.\n",
    "- use_bias=True,  # Boolean, whether the dense layers use bias vectors/matrices.\n",
    "- output_shape=None, # 输出张量的预期形状，除了批次和序列暗淡。如果未指定，则投影回关键函数暗淡。\n",
    "- attention_axes=None, # 应用注意力的轴。 None 表示对所有轴的注意力，但批处理、头部和特征。\n",
    "- kernel_initializer='glorot_uniform',\n",
    "- bias_initializer='zeros',\n",
    "- kernel_regularizer=None,\n",
    "- bias_regularizer=None,\n",
    "- activity_regularizer=None,\n",
    "- kernel_constraint=None,\n",
    "- bias_constraint=None,\n",
    "\n",
    "\n",
    "**call**\n",
    "```python \n",
    "call(self,\n",
    "    query,\n",
    "    value,\n",
    "    key=None,\n",
    "    attention_mask=None,\n",
    "    return_attention_scores=False,\n",
    "    training=None)\n",
    "``` \n",
    "- args\n",
    "    - query,  # query: Query `Tensor` of shape `[B, T, dim]`.\n",
    "    - value,  # value: Value `Tensor` of shape `[B, S, dim]`.\n",
    "    - key=None key: Optional key `Tensor` of shape `[B, S, dim]` If not given, will use\n",
    "          `value`\n",
    "    - attention_mask=None, # a boolean mask of shape `[B, T, S]`\n",
    "    - return_attention_scores=False,\n",
    "    - training=None\n",
    "- returns \n",
    "    - attention_output: The result of the computation, of shape [B, T, E],\n",
    "    - attention_scores: [Optional] multi-head attention coeffients over\n",
    "      attention axes\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "d0db2127",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "TensorShape([32, 4, 128])"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "\n",
    "q = np.random.random(size=(32, 4, 128))  # [batch_size, Tq, dim]\n",
    "v = np.random.random(size=(32, 64, 128))  # [batch_size, Tv, dim]\n",
    "k = np.random.random(size=(32, 64, 128))  #  [batch_size, Tv, dim]\n",
    "\n",
    "attention = tf.keras.layers.MultiHeadAttention(num_heads=2, key_dim=123)\n",
    "\n",
    "# att: [batch_size, Tq, dim]\n",
    "att = attention(q, v, k)\n",
    "\n",
    "att.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7142bf77",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "116ac952",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fa384cbd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "3a5c2b39",
   "metadata": {},
   "outputs": [],
   "source": [
    "tf.keras.layers.MultiHeadAttention?"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "74f8d177",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(None, 8, 16)\n",
      "(None, 2, 8, 4)\n"
     ]
    }
   ],
   "source": [
    "layer = tf.keras.layers.MultiHeadAttention(num_heads=2, key_dim=3)\n",
    "target = tf.keras.Input(shape=[8, 16])\n",
    "source = tf.keras.Input(shape=[4, 16])\n",
    "output_tensor, weights = layer(target, source,\n",
    "                               return_attention_scores=True)\n",
    "print(output_tensor.shape)\n",
    "\n",
    "print(weights.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "b35c4ad0",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(None, 5, 3, 4, 16, 4)\n"
     ]
    }
   ],
   "source": [
    "layer = tf.keras.layers.MultiHeadAttention(num_heads=2,\n",
    "                                           key_dim=2, attention_axes=(2, 3))\n",
    "input_tensor = tf.keras.Input(shape=[5, 3, 4, 16, 4])\n",
    "output_tensor = layer(input_tensor, input_tensor)\n",
    "print(output_tensor.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f0aa6f27",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fa643592",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
