{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# model_encoder forward"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import h5py\n",
    "import json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "params = {\n",
    "    'model_weight' : '../ner_model_weight/model_encoder_714.h5',\n",
    "    'embed_size' : 500,\n",
    "    'max_sent_len': 20,\n",
    "    'heads':16,\n",
    "    'head_size':4,\n",
    "    'batch_size': 64,\n",
    "    'lr' : 0.001,\n",
    "    'max_sent_len': 20,\n",
    "    'epochs': 500,\n",
    "    'drops' : [0.1]\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "heads=params['heads']\n",
    "head_size=params['head_size']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('../char_6.17.json', mode='r', encoding='utf-8') as f:\n",
    "    dicts = json.load(f)\n",
    "char2id = dicts['char2id']\n",
    "id2char = dicts['id2char']\n",
    "intent2id = dicts['intent2id']\n",
    "id2intent = dicts['id2intent']\n",
    "slot2id = dicts['slot2id']\n",
    "id2slot = dicts['id2slot']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "def sigmoid(x):\n",
    "    y = 1 / (1 + np.exp(-x))\n",
    "    return y\n",
    "\n",
    "def softmax(x):\n",
    "    exp_x = np.exp(x)\n",
    "    sum_exp_x = np.sum(exp_x,axis=-1,keepdims=True)\n",
    "    y = exp_x / sum_exp_x\n",
    "    return y\n",
    "\n",
    "def embedding(x,embed_size,embed):\n",
    "    x_one= np.zeros((len(x),embed_size))\n",
    "    x_one[range(len(x)), x] = 1\n",
    "    x_embed = np.dot(x_one, embed)\n",
    "    return x_embed\n",
    "\n",
    "def GlobalAveragePooling1D(x,step_axis=0):\n",
    "    return np.mean(x,axis=step_axis)\n",
    "\n",
    "def LayerNormalization(x,gamma,beta,step_axis = -1,epsilon=1e-3):\n",
    "    mean = np.mean(x,axis = step_axis)\n",
    "    mean = np.expand_dims(mean,axis=1)\n",
    "    variance = np.var(x,axis = step_axis)\n",
    "    variance = np.expand_dims(variance,axis=1)\n",
    "    inv = 1.0 / np.sqrt(variance + epsilon)\n",
    "#     print(np.shape(inv))\n",
    "    gamma = np.expand_dims(gamma,axis=0)\n",
    "    beta = np.expand_dims(beta,axis=0)\n",
    "    inv = gamma *inv\n",
    "    return x * inv + (beta - mean * inv)\n",
    "\n",
    "def dense(x,gamma, bias):\n",
    "#     print(np.shape(x))\n",
    "    y = np.matmul(x,gamma)\n",
    "    y = np.add(y,bias)\n",
    "    return y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "def MultiHeadAttention(x,heads,head_size,atten_q_bias,atten_q_kernel,atten_k_bias,atten_k_kernel,atten_v_bias,atten_v_kernel,atten_o_bias,atten_o_kernel):\n",
    "    q = dense(x,atten_q_kernel,atten_q_bias)\n",
    "#     print('q: ',q)\n",
    "    k = dense(x,atten_k_kernel,atten_k_bias)\n",
    "#     print('k: ',k)\n",
    "    v = dense(x,atten_v_kernel,atten_v_bias)\n",
    "#     print('v: ',v )model_encoder\n",
    "    \n",
    "    qw = np.reshape(q,(-1,heads,head_size))\n",
    "    kw = np.reshape(k,(-1,heads,head_size))\n",
    "    vw = np.reshape(v,(-1,heads,head_size))\n",
    "    print(np.shape(qw))\n",
    "    \n",
    "    a = np.einsum('jhd,khd->hjk', qw, kw)\n",
    "    a = a / head_size ** 0.5\n",
    "    A = softmax(a)\n",
    "    o = np.einsum('hjk,khd -> jhd', A, vw)\n",
    "    \n",
    "    print(np.shape(o))\n",
    "    o = np.reshape(o,(-1,heads*head_size))\n",
    "    print(np.shape(o))\n",
    "    o = dense(o,atten_o_kernel,atten_o_bias)\n",
    "    return o"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "file_path = '../ner_model_weight/model_encoder_714.h5'\n",
    "f = h5py.File(file_path, 'r')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_weight(file_path):\n",
    "    f = h5py.File(file_path,'r')\n",
    "    \n",
    "    embed = f['embedding']['embedding']['embeddings:0'][:]\n",
    "    \n",
    "    encoder_bias = f['encoder']['encoder']['feed']['bias:0'][:]\n",
    "    encoder_kernel = f['encoder']['encoder']['feed']['kernel:0'][:]\n",
    "    \n",
    "    encoder_normal_beta = f['encoder']['encoder']['normal']['beta:0'][:]\n",
    "    encode_normal_gamma = f['encoder']['encoder']['normal']['gamma:0'][:]\n",
    "    \n",
    "    encoder_normal_beta1 = f['encoder']['encoder']['normal1']['beta:0'][:]\n",
    "    encoder_normal_gamma1 = f['encoder']['encoder']['normal1']['gamma:0'][:]\n",
    "    \n",
    "    atten_q_bias = f['encoder']['encoder']['multi_head_attention']['q']['bias:0'][:]\n",
    "    atten_q_kernel = f['encoder']['encoder']['multi_head_attention']['q']['kernel:0'][:]\n",
    "    \n",
    "    atten_k_bias = f['encoder']['encoder']['multi_head_attention']['k']['bias:0'][:]\n",
    "    atten_k_kernel = f['encoder']['encoder']['multi_head_attention']['k']['kernel:0'][:]\n",
    "    \n",
    "    atten_v_bias = f['encoder']['encoder']['multi_head_attention']['v']['bias:0'][:]\n",
    "    atten_v_kernel = f['encoder']['encoder']['multi_head_attention']['v']['kernel:0'][:]\n",
    "    \n",
    "    atten_o_bias = f['encoder']['encoder']['multi_head_attention']['o']['bias:0'][:]\n",
    "    atten_o_kernel = f['encoder']['encoder']['multi_head_attention']['o']['kernel:0'][:]\n",
    "    \n",
    "    pre_intent_bias = f['pre_intent']['pre_intent']['bias:0'][:]\n",
    "    pre_intent_kernel = f['pre_intent']['pre_intent']['kernel:0'][:]\n",
    "    \n",
    "    pre_ner_bias = f['pre_ner']['pre_ner']['bias:0'][:]\n",
    "    pre_ner_kernel = f['pre_ner']['pre_ner']['kernel:0'][:]\n",
    "    \n",
    "    return embed,encoder_bias,encoder_kernel,encoder_normal_beta,encode_normal_gamma,encoder_normal_beta1,encoder_normal_gamma1,\\\n",
    "            atten_q_bias,atten_q_kernel,atten_k_bias,atten_k_kernel,atten_v_bias,atten_v_kernel,atten_o_bias,atten_o_kernel,\\\n",
    "            pre_intent_bias,pre_intent_kernel,pre_ner_bias,pre_ner_kernel"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "def trans2labelid(vocab, labels, max_sent_len):\n",
    "    labels = [vocab[label] for label in labels]\n",
    "    if len(labels) < max_sent_len:\n",
    "        labels += [0] * (max_sent_len - len(labels))\n",
    "    else:\n",
    "        labels = labels[:max_sent_len]\n",
    "    return labels"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "def test(inputs,h5file_path):\n",
    "    embed,encoder_bias,encoder_kernel,encoder_normal_beta,encode_normal_gamma,encoder_normal_beta1,encoder_normal_gamma1,\\\n",
    "            atten_q_bias,atten_q_kernel,atten_k_bias,atten_k_kernel,atten_v_bias,atten_v_kernel,atten_o_bias,atten_o_kernel,\\\n",
    "            pre_intent_bias,pre_intent_kernel,pre_ner_bias,pre_ner_kernel = get_weight(file_path)\n",
    "    \n",
    "    x = trans2labelid(char2id,inputs,params['max_sent_len'])\n",
    "    embed = embedding(x,params['embed_size'], embed)\n",
    "    state = embed\n",
    "    for i in range(3):\n",
    "        att = MultiHeadAttention(state,heads,head_size,atten_q_bias,atten_q_kernel,atten_k_bias,\\\n",
    "                                 atten_k_kernel,atten_v_bias,atten_v_kernel,atten_o_bias,atten_o_kernel)\n",
    "        att_1 = np.add(att,state)\n",
    "        l = LayerNormalization(att_1,encode_normal_gamma,encoder_normal_beta)\n",
    "        feed1 = dense(l,encoder_kernel,encoder_bias)\n",
    "        l1 = LayerNormalization(feed1,encoder_normal_gamma1,encoder_normal_beta1)\n",
    "        state = l1\n",
    "    conv = GlobalAveragePooling1D(state)\n",
    "    \n",
    "    pre_intent = dense(conv,pre_intent_kernel,pre_intent_bias)\n",
    "    pre_intent = sigmoid(pre_intent)\n",
    "    \n",
    "    pre_slot = dense(state,pre_ner_kernel,pre_ner_bias)\n",
    "    pre_slot = sigmoid(pre_slot)\n",
    "    \n",
    "    return pre_intent, pre_slot "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(20, 16, 4)\n",
      "(20, 16, 4)\n",
      "(20, 64)\n",
      "(20, 16, 4)\n",
      "(20, 16, 4)\n",
      "(20, 64)\n",
      "(20, 16, 4)\n",
      "(20, 16, 4)\n",
      "(20, 64)\n"
     ]
    }
   ],
   "source": [
    "inputs = '打开空调'\n",
    "np_pre_intent, np_pre_slot  = test(inputs,file_path) "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([1.95904586e-09, 2.04386779e-05, 1.66595914e-04, 9.81184326e-06,\n",
       "       1.24571212e-04, 5.37920218e-06, 2.57746185e-04, 1.79327330e-04,\n",
       "       6.70664053e-06, 4.14397897e-06, 1.61926877e-05, 6.62351278e-09,\n",
       "       6.09047303e-05, 6.30200950e-08, 5.87803221e-05, 1.04552780e-05,\n",
       "       5.56724286e-05, 1.74767173e-06, 5.83089042e-06, 1.70645293e-05,\n",
       "       1.26748242e-06, 2.43458928e-06, 1.42205218e-04, 1.67237164e-05,\n",
       "       4.44439417e-06, 1.15679629e-04, 1.75295308e-06, 1.68209113e-05,\n",
       "       9.88302989e-05, 3.50426021e-06, 4.88769135e-05, 3.19596877e-05,\n",
       "       1.85471583e-06, 1.43897584e-09, 2.72267536e-08, 3.83518695e-05,\n",
       "       9.27672908e-06, 1.60597614e-06, 1.57129852e-05, 9.12048096e-06,\n",
       "       1.67779951e-07, 4.94079045e-05, 7.63037671e-07, 4.46608465e-05,\n",
       "       9.90124232e-08, 5.94714345e-06, 3.40175920e-01, 1.54744429e-04,\n",
       "       8.20822876e-06, 1.23600913e-04, 1.37792943e-06, 3.70436733e-06,\n",
       "       1.23600317e-07, 4.13216434e-06, 9.34163704e-06])"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "np_pre_intent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[1.12049938e-04, 1.43518947e-06, 2.20492084e-05, 8.24728068e-07,\n",
       "        5.63667548e-07, 1.36262937e-05, 4.87082576e-07, 4.25770958e-07,\n",
       "        3.53080060e-06, 1.87324678e-06, 3.25442955e-06, 8.77282563e-07,\n",
       "        1.05962736e-05, 4.88877307e-08, 2.21502381e-05, 4.42867319e-07,\n",
       "        6.00204142e-07, 6.67324142e-06, 1.08494573e-04, 2.67930951e-06,\n",
       "        3.50265651e-06, 1.85574161e-02, 1.97405918e-06, 1.75415477e-08,\n",
       "        2.73151924e-06, 5.80408137e-08, 1.72910285e-01, 1.71942884e-05,\n",
       "        1.02581654e-04, 2.05186997e-04, 9.74425699e-07, 5.51951045e-06,\n",
       "        1.81612423e-06, 1.71990945e-04, 2.42315763e-03, 1.32583144e-03],\n",
       "       [2.73026992e-04, 8.18767088e-06, 1.43162994e-06, 4.41072472e-07,\n",
       "        1.75200975e-06, 7.03978053e-05, 2.41747974e-06, 4.74158778e-06,\n",
       "        9.20071206e-06, 1.03604179e-05, 2.03574631e-05, 3.57565832e-06,\n",
       "        3.78581066e-04, 1.81650827e-07, 9.71770970e-06, 8.16040008e-08,\n",
       "        9.71164197e-07, 8.76409435e-05, 9.10042021e-04, 1.88063988e-06,\n",
       "        9.62790537e-07, 4.16000845e-05, 1.11355928e-06, 1.36640455e-07,\n",
       "        5.48411337e-06, 9.81977487e-06, 4.60138399e-04, 1.37350216e-05,\n",
       "        5.58772728e-05, 5.94323451e-06, 4.65261244e-06, 2.28656279e-05,\n",
       "        1.83880083e-04, 4.42970728e-05, 1.78224046e-01, 3.05961771e-05],\n",
       "       [1.20372030e-04, 3.28412461e-05, 5.46828466e-07, 1.41486744e-03,\n",
       "        7.38148074e-07, 1.74036214e-04, 2.75164994e-06, 5.62133191e-07,\n",
       "        1.71605388e-06, 1.75285625e-05, 6.37129988e-04, 1.80181887e-04,\n",
       "        1.02405399e-06, 1.15789129e-04, 8.21098948e-01, 1.46476645e-04,\n",
       "        4.77246253e-05, 1.87298801e-05, 6.07385893e-05, 1.24485007e-04,\n",
       "        1.92673260e-06, 3.54148889e-05, 2.73276787e-05, 1.04837676e-04,\n",
       "        2.87603363e-07, 4.24316172e-05, 4.79978390e-07, 1.37741981e-04,\n",
       "        5.83647603e-06, 1.34985244e-06, 4.37239355e-06, 9.98256182e-07,\n",
       "        2.22132982e-03, 6.21139415e-05, 6.23942314e-07, 1.56297340e-05],\n",
       "       [3.02316491e-04, 1.67937352e-04, 4.19477608e-06, 2.02082966e-06,\n",
       "        6.18612805e-07, 6.91262390e-01, 5.72669966e-06, 6.75932437e-08,\n",
       "        2.45809736e-08, 2.66014697e-06, 1.16378551e-05, 6.25651272e-05,\n",
       "        8.47203019e-06, 7.19543761e-06, 4.15703026e-03, 1.18103468e-05,\n",
       "        7.27829748e-07, 3.41585903e-06, 4.37682131e-04, 2.54812038e-05,\n",
       "        4.18166662e-07, 1.86365952e-04, 1.21626441e-05, 5.14866385e-06,\n",
       "        5.68433946e-06, 2.89862771e-05, 4.09094005e-04, 5.43297362e-05,\n",
       "        6.73796533e-05, 2.50215942e-07, 1.05117792e-05, 1.20688888e-05,\n",
       "        3.16467556e-04, 3.43315851e-05, 8.77159083e-05, 3.31159655e-05],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06],\n",
       "       [1.18758246e-01, 3.16042376e-06, 1.06367381e-06, 5.23990825e-07,\n",
       "        3.07060098e-08, 1.46071430e-04, 2.46453729e-08, 7.64163726e-08,\n",
       "        2.68156973e-07, 9.96006324e-07, 4.83936035e-06, 1.00170040e-06,\n",
       "        4.73537744e-06, 2.06065209e-06, 4.89557923e-05, 1.10495200e-07,\n",
       "        3.55791453e-07, 2.37287834e-06, 2.37787601e-05, 7.59816829e-06,\n",
       "        5.63356078e-07, 1.96591887e-05, 1.31127962e-06, 2.81432116e-08,\n",
       "        1.48035943e-07, 3.19086459e-07, 2.32372473e-06, 1.12137302e-05,\n",
       "        3.50561217e-06, 2.88100434e-06, 3.37001883e-07, 6.12542208e-07,\n",
       "        1.68241015e-05, 3.08012189e-06, 3.31245878e-05, 2.26205084e-06]])"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "np_pre_slot"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# encoder模型验证"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'os' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-18-854183be0d78>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtensorflow\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkeras\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayers\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mconcatenate\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mDropout\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mLayerNormalization\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mDense\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0madd\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      2\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtensorflow\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mos\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0menviron\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"CUDA_VISIBLE_DEVICES\"\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'1'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      4\u001b[0m \u001b[0mgpus\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlist_physical_devices\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice_type\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'GPU'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mNameError\u001b[0m: name 'os' is not defined"
     ]
    }
   ],
   "source": [
    "from tensorflow.keras.layers import concatenate, Dropout,LayerNormalization, Dense, add\n",
    "import tensorflow as tf\n",
    "os.environ[\"CUDA_VISIBLE_DEVICES\"] = '1'\n",
    "gpus = tf.config.experimental.list_physical_devices(device_type='GPU')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from tensorflow.keras.layers import Layer\n",
    "\n",
    "class MultiHeadAttention(Layer):\n",
    "    def __init__(\n",
    "            self,\n",
    "            heads,\n",
    "            head_size,\n",
    "            out_dim=None,\n",
    "            use_bias=True,\n",
    "#             max_value = 1,\n",
    "#             min_value = -1l1\n",
    "            **kwargs\n",
    "    ):\n",
    "        super(MultiHeadAttention, self).__init__(**kwargs)\n",
    "        self.heads = heads\n",
    "        self.head_size = head_size\n",
    "        self.out_dim = out_dim \n",
    "        self.use_bias = use_bias\n",
    "\n",
    "    def build(self, input_shape):\n",
    "        super(MultiHeadAttention, self).build(input_shape)\n",
    "        self.q_dense = tf.keras.layers.Dense(\n",
    "            units=self.head_size * self.heads,\n",
    "            use_bias=self.use_bias,\n",
    "            kernel_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            bias_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            name = 'q'\n",
    "            \n",
    "        )\n",
    "        self.k_dense = tf.keras.layers.Dense(\n",
    "            units=self.head_size * self.heads,\n",
    "            use_bias=self.use_bias,\n",
    "            kernel_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            bias_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            name = 'k'\n",
    "        )\n",
    "        self.v_dense = tf.keras.layers.Dense(\n",
    "            units=self.head_size * self.heads,\n",
    "            use_bias=self.use_bias,\n",
    "            kernel_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            bias_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            name = 'v'\n",
    "        )\n",
    "        self.o_dense = tf.keras.layers.Dense( \n",
    "            units=self.out_dim,\n",
    "            use_bias=self.use_bias,\n",
    "            kernel_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            bias_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            name = 'o'\n",
    "        )\n",
    "\n",
    "    def call(self, inputs):\n",
    "        q = inputs\n",
    "        k = inputs\n",
    "        v = inputs\n",
    "        # 线性变化\n",
    "        qw = self.q_dense(q)\n",
    "        kw = self.k_dense(k)\n",
    "        vw = self.v_dense(v)\n",
    "        # 形状变换\n",
    "        qw = tf.reshape(qw, (-1, tf.shape(q)[1], self.heads, self.head_size))\n",
    "        kw = tf.reshape(kw, (-1, tf.shape(q)[1], self.heads, self.head_size))\n",
    "        vw = tf.reshape(vw, (-1, tf.shape(q)[1], self.heads, self.head_size))\n",
    "        # attention\n",
    "        qkv_inputs = [qw, kw, vw]\n",
    "        o = self.pay_attention_to(qkv_inputs)\n",
    "        o = tf.reshape(o, (-1, tf.shape(o)[1], self.head_size * self.heads))\n",
    "        o = self.o_dense(o)\n",
    "        return o\n",
    "\n",
    "    def pay_attention_to(self, inputs):\n",
    "        (qw, kw, vw) = inputs[:3]\n",
    "        a = tf.einsum('bjhd,bkhd->bhjk', qw, kw)\n",
    "        a = a / self.head_size ** 0.5\n",
    "        A = tf.nn.softmax(a)\n",
    "        o = tf.einsum('bhjk,bkhd -> bjhd', A, vw)\n",
    "        return o"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Encoder(tf.keras.models.Model):\n",
    "    def __init__(\n",
    "        self,\n",
    "        layer_count,\n",
    "        **kwargs\n",
    "    ):\n",
    "        super(Encoder, self).__init__(**kwargs)\n",
    "        self.layer_count = layer_count\n",
    "        \n",
    "    def build(self,input_shape):\n",
    "        self.MultiHeadAttention =  MultiHeadAttention(heads=16,head_size=4,out_dim=32)\n",
    "        self.dropout_1 = Dropout(0.1)\n",
    "        self.l1 =  LayerNormalization(name='normal')\n",
    "        self.feed1 = Dense(32,name='feed')\n",
    "        self.dropout1 = Dropout(0.1)\n",
    "        self.l_1 =  LayerNormalization(name='normal1')\n",
    "        \n",
    "    def call(self,inputs):\n",
    "        state = inputs\n",
    "        for _ in range(self.layer_count):\n",
    "            att1 = self.MultiHeadAttention(state)\n",
    "            att_1 = add([att1,state])\n",
    "            dropout1  = self.dropout_1(att_1)\n",
    "            l1 = self.l1(dropout1)\n",
    "            feed1 =self.feed1(l1)\n",
    "            dropout_1  = self.dropout1(feed1)\n",
    "            l_1 = self.l_1(dropout_1)\n",
    "            state = l_1\n",
    "        return state"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "params['intent_num'] = len(intent2id)\n",
    "params['slot_num'] = len(slot2id)\n",
    "params['id2intent'] = id2intent\n",
    "params['id2slot'] = id2slot"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "ename": "TypeError",
     "evalue": "MultiHeadAttention() got an unexpected keyword argument 'out_dim'",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-21-e88647023cd8>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      3\u001b[0m \u001b[0membed\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkeras\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mEmbedding\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m500\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtext_inputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0ml_1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mEncoder\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlayer_count\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membed\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      6\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      7\u001b[0m \u001b[0mconv\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkeras\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mGlobalAveragePooling1D\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ml_1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/envs/tf_2/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m    924\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0m_in_functional_construction_mode\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    925\u001b[0m       return self._functional_construction_call(inputs, args, kwargs,\n\u001b[0;32m--> 926\u001b[0;31m                                                 input_list)\n\u001b[0m\u001b[1;32m    927\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    928\u001b[0m     \u001b[0;31m# Maintains info about the `Layer.call` stack.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/envs/tf_2/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py\u001b[0m in \u001b[0;36m_functional_construction_call\u001b[0;34m(self, inputs, args, kwargs, input_list)\u001b[0m\n\u001b[1;32m   1096\u001b[0m         \u001b[0;31m# Build layer if applicable (if the `build` method has been\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1097\u001b[0m         \u001b[0;31m# overridden).\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1098\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_maybe_build\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1099\u001b[0m         \u001b[0mcast_inputs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_maybe_cast_inputs\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minput_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1100\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/envs/tf_2/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py\u001b[0m in \u001b[0;36m_maybe_build\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m   2641\u001b[0m         \u001b[0;31m# operations.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2642\u001b[0m         \u001b[0;32mwith\u001b[0m \u001b[0mtf_utils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmaybe_init_scope\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2643\u001b[0;31m           \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbuild\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_shapes\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# pylint:disable=not-callable\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   2644\u001b[0m       \u001b[0;31m# We must set also ensure that the layer is marked as built, and the build\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2645\u001b[0m       \u001b[0;31m# shape is stored since user defined build functions may not be calling\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-19-3112dd415e69>\u001b[0m in \u001b[0;36mbuild\u001b[0;34m(self, input_shape)\u001b[0m\n\u001b[1;32m      9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     10\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mbuild\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0minput_shape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 11\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mMultiHeadAttention\u001b[0m \u001b[0;34m=\u001b[0m  \u001b[0mMultiHeadAttention\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mheads\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m16\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mhead_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m4\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mout_dim\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     12\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdropout_1\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mDropout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0.1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     13\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0ml1\u001b[0m \u001b[0;34m=\u001b[0m  \u001b[0mLayerNormalization\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'normal'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mTypeError\u001b[0m: MultiHeadAttention() got an unexpected keyword argument 'out_dim'"
     ]
    }
   ],
   "source": [
    "tf.keras.backend.clear_session()\n",
    "text_inputs = tf.keras.layers.Input(shape=(20,),name='Input')\n",
    "embed = tf.keras.layers.Embedding(500,32)(text_inputs)\n",
    "\n",
    "l_1 = Encoder(layer_count=3)(embed)\n",
    "\n",
    "conv = tf.keras.layers.GlobalAveragePooling1D()(l_1)\n",
    "pre_intent = tf.keras.layers.Dense(params['intent_num'],activation='sigmoid',name = 'pre_intent',kernel_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            bias_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0))(conv)\n",
    "pre_slot = tf.keras.layers.Dense(params['slot_num'],activation='sigmoid',name = 'pre_ner',kernel_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0),\n",
    "            bias_constraint = tf.keras.constraints.MinMaxNorm(min_value=-1.0))(l_1)\n",
    "model = tf.keras.Model(text_inputs,[pre_intent,pre_slot])\n",
    "model.summary()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "model.load_weights('../ner_model_weight/model_encoder_714.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "x = trans2labelid(char2id,inputs,params['max_sent_len'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 88,
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "pre_intent,pre_slot = model.predict([x])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 89,
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[1.9590465e-09, 2.0438689e-05, 1.6659604e-04, 9.8118408e-06,\n",
       "        1.2457115e-04, 5.3791969e-06, 2.5774655e-04, 1.7932746e-04,\n",
       "        6.7066326e-06, 4.1439694e-06, 1.6192693e-05, 6.6235017e-09,\n",
       "        6.0904629e-05, 6.3020160e-08, 5.8780417e-05, 1.0455287e-05,\n",
       "        5.5672532e-05, 1.7476705e-06, 5.8308751e-06, 1.7064507e-05,\n",
       "        1.2674833e-06, 2.4345902e-06, 1.4220510e-04, 1.6723672e-05,\n",
       "        4.4443959e-06, 1.1567961e-04, 1.7529503e-06, 1.6820906e-05,\n",
       "        9.8830460e-05, 3.5042683e-06, 4.8876893e-05, 3.1959706e-05,\n",
       "        1.8547166e-06, 1.4389775e-09, 2.7226763e-08, 3.8351842e-05,\n",
       "        9.2767259e-06, 1.6059797e-06, 1.5712985e-05, 9.1204747e-06,\n",
       "        1.6777959e-07, 4.9407943e-05, 7.6303849e-07, 4.4660752e-05,\n",
       "        9.9012532e-08, 5.9471431e-06, 3.4017593e-01, 1.5474478e-04,\n",
       "        8.2082279e-06, 1.2360090e-04, 1.3779290e-06, 3.7043731e-06,\n",
       "        1.2360017e-07, 4.1321618e-06, 9.3416484e-06]], dtype=float32)"
      ]
     },
     "execution_count": 89,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pre_intent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 91,
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[[1.12049856e-04, 1.43518957e-06, 2.20493130e-05, 8.24729000e-07,\n",
       "         5.63667527e-07, 1.36263034e-05, 4.87082389e-07, 4.25770082e-07,\n",
       "         3.53079258e-06, 1.87324451e-06, 3.25443239e-06, 8.77283298e-07,\n",
       "         1.05962690e-05, 4.88877845e-08, 2.21502014e-05, 4.42867645e-07,\n",
       "         6.00205851e-07, 6.67324593e-06, 1.08494336e-04, 2.67931819e-06,\n",
       "         3.50265464e-06, 1.85574424e-02, 1.97405666e-06, 1.75415060e-08,\n",
       "         2.73151932e-06, 5.80408397e-08, 1.72910050e-01, 1.71943102e-05,\n",
       "         1.02581580e-04, 2.05187214e-04, 9.74423187e-07, 5.51951598e-06,\n",
       "         1.81612199e-06, 1.71990541e-04, 2.42315349e-03, 1.32583210e-03],\n",
       "        [2.73026555e-04, 8.18765784e-06, 1.43163254e-06, 4.41073752e-07,\n",
       "         1.75201092e-06, 7.03979240e-05, 2.41747853e-06, 4.74157605e-06,\n",
       "         9.20070761e-06, 1.03604179e-05, 2.03575146e-05, 3.57565705e-06,\n",
       "         3.78580677e-04, 1.81650677e-07, 9.71773079e-06, 8.16037797e-08,\n",
       "         9.71164923e-07, 8.76408740e-05, 9.10043018e-04, 1.88064814e-06,\n",
       "         9.62789727e-07, 4.16002295e-05, 1.11356053e-06, 1.36640267e-07,\n",
       "         5.48410480e-06, 9.81976882e-06, 4.60137177e-04, 1.37350726e-05,\n",
       "         5.58770371e-05, 5.94323637e-06, 4.65260746e-06, 2.28655608e-05,\n",
       "         1.83879572e-04, 4.42971213e-05, 1.78223938e-01, 3.05961548e-05],\n",
       "        [1.20372126e-04, 3.28412170e-05, 5.46828858e-07, 1.41486770e-03,\n",
       "         7.38148799e-07, 1.74036482e-04, 2.75165144e-06, 5.62132755e-07,\n",
       "         1.71605109e-06, 1.75285331e-05, 6.37129939e-04, 1.80182047e-04,\n",
       "         1.02405284e-06, 1.15789204e-04, 8.21098864e-01, 1.46476683e-04,\n",
       "         4.77246795e-05, 1.87298901e-05, 6.07385737e-05, 1.24485174e-04,\n",
       "         1.92673224e-06, 3.54149342e-05, 2.73276692e-05, 1.04837876e-04,\n",
       "         2.87603513e-07, 4.24316058e-05, 4.79978382e-07, 1.37742216e-04,\n",
       "         5.83647170e-06, 1.34985191e-06, 4.37238896e-06, 9.98256951e-07,\n",
       "         2.22132821e-03, 6.21139043e-05, 6.23941560e-07, 1.56297392e-05],\n",
       "        [3.02316679e-04, 1.67937542e-04, 4.19478101e-06, 2.02083902e-06,\n",
       "         6.18613285e-07, 6.91262364e-01, 5.72670797e-06, 6.75933549e-08,\n",
       "         2.45809613e-08, 2.66015149e-06, 1.16378578e-05, 6.25651737e-05,\n",
       "         8.47202227e-06, 7.19544323e-06, 4.15702956e-03, 1.18103517e-05,\n",
       "         7.27830582e-07, 3.41585610e-06, 4.37682262e-04, 2.54811730e-05,\n",
       "         4.18166195e-07, 1.86365200e-04, 1.21626435e-05, 5.14867315e-06,\n",
       "         5.68433188e-06, 2.89864201e-05, 4.09092812e-04, 5.43297137e-05,\n",
       "         6.73795585e-05, 2.50215749e-07, 1.05117833e-05, 1.20689147e-05,\n",
       "         3.16467980e-04, 3.43315551e-05, 8.77159546e-05, 3.31158662e-05],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06],\n",
       "        [1.18758224e-01, 3.16042588e-06, 1.06367509e-06, 5.23991844e-07,\n",
       "         3.07059587e-08, 1.46071485e-04, 2.46454164e-08, 7.64164554e-08,\n",
       "         2.68157009e-07, 9.96007088e-07, 4.83936219e-06, 1.00169973e-06,\n",
       "         4.73537193e-06, 2.06065533e-06, 4.89557278e-05, 1.10495236e-07,\n",
       "         3.55791769e-07, 2.37288032e-06, 2.37787099e-05, 7.59816794e-06,\n",
       "         5.63355854e-07, 1.96591791e-05, 1.31127922e-06, 2.81432655e-08,\n",
       "         1.48035880e-07, 3.19086354e-07, 2.32372281e-06, 1.12137423e-05,\n",
       "         3.50560845e-06, 2.88100614e-06, 3.37001808e-07, 6.12543090e-07,\n",
       "         1.68240986e-05, 3.08011863e-06, 3.31245828e-05, 2.26205157e-06]]],\n",
       "      dtype=float32)"
      ]
     },
     "execution_count": 91,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pre_slot"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
