{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## semeval_2014\n",
    "## IAN 和ATAE"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 数据预处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "from collections import Counter\n",
    "import xml.etree.ElementTree as ET, getopt, logging, sys, random, re, copy\n",
    "from xml.sax.saxutils import escape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class Aspect:\n",
    "    '''Aspect objects contain the term (e.g., battery life) and polarity (i.e., pos, neg, neu, conflict) of an aspect.'''\n",
    "\n",
    "    def __init__(self, term, polarity, offsets):\n",
    "        self.term = term\n",
    "        self.polarity = polarity\n",
    "        self.offsets = offsets\n",
    "\n",
    "    def create(self, element):\n",
    "        self.term = element.attrib['term']\n",
    "        self.polarity = element.attrib['polarity']\n",
    "        self.offsets = {'from': str(element.attrib['from']), 'to': str(element.attrib['to'])}   \n",
    "        return self\n",
    "    \n",
    "    def get_elem(self):\n",
    "        return self.term,self.polarity,self.offsets\n",
    "\n",
    "    def update(self, term='', polarity=''):\n",
    "        self.term = term\n",
    "        self.polarity = polarity"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def get_data(filename):\n",
    "    df = pd.DataFrame(columns=['sentenceId','text','aspectTerm','polarity','offsets'])\n",
    "    elements = ET.parse('data/{}.xml'.format(filename)).getroot().findall('sentence')\n",
    "    i = 0\n",
    "    for e in elements:\n",
    "        sentence_id = e.get('id')\n",
    "        text = e.find('text').text\n",
    "        for eterms in e.findall('aspectTerms'):\n",
    "            if eterms is not None:\n",
    "                for a in eterms.findall('aspectTerm'):\n",
    "                    instance = Aspect('', '', []).create(a)\n",
    "                    aspectTerm,polarity,offsets = instance.get_elem()\n",
    "                    df.loc[i]=[sentence_id,text,aspectTerm,polarity,offsets]\n",
    "                    i = i+1\n",
    "    cnt = Counter(df.polarity.tolist())\n",
    "    print(filename+'-----')\n",
    "    print(cnt)\n",
    "    return df\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Laptop_Train_v2-----\n",
      "Counter({'positive': 987, 'negative': 866, 'neutral': 460, 'conflict': 45})\n",
      "Laptop_Test-----\n",
      "Counter({'positive': 341, 'neutral': 169, 'negative': 128, 'conflict': 16})\n",
      "Restaurants_Train_v2-----\n",
      "Counter({'positive': 2164, 'negative': 805, 'neutral': 633, 'conflict': 91})\n",
      "Restaurants_Test-----\n",
      "Counter({'positive': 728, 'neutral': 196, 'negative': 196, 'conflict': 14})\n"
     ]
    }
   ],
   "source": [
    "lpt_tr = get_data('Laptop_Train_v2') #987,866,45,460\n",
    "lpt_te = get_data('Laptop_Test')\n",
    "lpt_df = pd.concat([lpt_tr,lpt_te],ignore_index=True)\n",
    "res_tr = get_data('Restaurants_Train_v2')#2164 805 91 633\n",
    "res_te = get_data('Restaurants_Test')\n",
    "res_df = pd.concat([res_tr,res_te],ignore_index=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pickle\n",
    "with open('tmp/data.pkl','wb')as f:\n",
    "    pickle.dump([lpt_df,res_df],f)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 数据读取"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true,
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import pickle\n",
    "with open('data.pkl','rb')as f:\n",
    "    lpt_df,res_df = pickle.load(f)\n",
    "df = res_df\n",
    "# res_tr 3602  res_te 1120 在lpt数据集上atae损失函数会出现nan 不知道为什么."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>sentenceId</th>\n",
       "      <th>text</th>\n",
       "      <th>aspectTerm</th>\n",
       "      <th>polarity</th>\n",
       "      <th>offsets</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>3121</td>\n",
       "      <td>But the staff was so horrible to us.</td>\n",
       "      <td>staff</td>\n",
       "      <td>negative</td>\n",
       "      <td>{'from': '8', 'to': '13'}</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>2777</td>\n",
       "      <td>To be completely fair, the only redeeming fact...</td>\n",
       "      <td>food</td>\n",
       "      <td>positive</td>\n",
       "      <td>{'from': '57', 'to': '61'}</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1634</td>\n",
       "      <td>The food is uniformly exceptional, with a very...</td>\n",
       "      <td>food</td>\n",
       "      <td>positive</td>\n",
       "      <td>{'from': '4', 'to': '8'}</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1634</td>\n",
       "      <td>The food is uniformly exceptional, with a very...</td>\n",
       "      <td>kitchen</td>\n",
       "      <td>positive</td>\n",
       "      <td>{'from': '55', 'to': '62'}</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1634</td>\n",
       "      <td>The food is uniformly exceptional, with a very...</td>\n",
       "      <td>menu</td>\n",
       "      <td>neutral</td>\n",
       "      <td>{'from': '141', 'to': '145'}</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "  sentenceId                                               text aspectTerm  \\\n",
       "0       3121               But the staff was so horrible to us.      staff   \n",
       "1       2777  To be completely fair, the only redeeming fact...       food   \n",
       "2       1634  The food is uniformly exceptional, with a very...       food   \n",
       "3       1634  The food is uniformly exceptional, with a very...    kitchen   \n",
       "4       1634  The food is uniformly exceptional, with a very...       menu   \n",
       "\n",
       "   polarity                       offsets  \n",
       "0  negative     {'from': '8', 'to': '13'}  \n",
       "1  positive    {'from': '57', 'to': '61'}  \n",
       "2  positive      {'from': '4', 'to': '8'}  \n",
       "3  positive    {'from': '55', 'to': '62'}  \n",
       "4   neutral  {'from': '141', 'to': '145'}  "
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from string import punctuation\n",
    "import re\n",
    "dict1 ={'positive':0,'negative':1,'neutral':2,'conflict':3}\n",
    "def pre_process(x):\n",
    "    x = x.lower()\n",
    "    x = re.sub('[^A-Za-z0-9]+', ' ', x)\n",
    "    x = x.split(' ')\n",
    "    return ' '.join(c for c in x if c not in punctuation)\n",
    "df['words'] = df['text'].apply(lambda x:pre_process(x))\n",
    "df['aspectTerm'] = df['aspectTerm'].apply(lambda x:pre_process(x))\n",
    "df['label'] = df['polarity'].apply(lambda x:dict1[x])\n",
    "df = df[df.label<3].reset_index()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 模型训练"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "from keras.models import Model\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "from keras.utils.np_utils import to_categorical\n",
    "from keras.layers import Dense, Input, LSTM, Embedding,GRU\n",
    "from keras.layers import Bidirectional,TimeDistributed\n",
    "from keras.layers import concatenate,multiply,dot,add\n",
    "from keras.layers import RepeatVector, Flatten,Permute,Reshape,Lambda,Activation,Dropout,GlobalAveragePooling1D\n",
    "from keras.callbacks import EarlyStopping,ModelCheckpoint\n",
    "from keras.optimizers import Adam,SGD,Adagrad\n",
    "from keras import backend as K\n",
    "from keras.engine.topology import Layer\n",
    "from keras import activations, regularizers, constraints,initializers\n",
    "import numpy as np\n",
    "from keras.layers import merge"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "np.random.seed(147)\n",
    "tf.set_random_seed(147)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "ename": "NotImplementedError",
     "evalue": "unknown URI scheme 'e' in 'E:/code/gensim_examples/vector/GoogleNews-vectors-negative300.bin.gz'",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNotImplementedError\u001b[0m                       Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-5-d2989671462a>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mgensim\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmodels\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mKeyedVectors\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mw2v\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mKeyedVectors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_word2vec_format\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'E:/code/gensim_examples/vector/GoogleNews-vectors-negative300.bin.gz'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbinary\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/gensim/models/keyedvectors.py\u001b[0m in \u001b[0;36mload_word2vec_format\u001b[0;34m(cls, fname, fvocab, binary, encoding, unicode_errors, limit, datatype)\u001b[0m\n\u001b[1;32m    203\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    204\u001b[0m         \u001b[0mlogger\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"loading projection weights from %s\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 205\u001b[0;31m         \u001b[0;32mwith\u001b[0m \u001b[0mutils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msmart_open\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfname\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mfin\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    206\u001b[0m             \u001b[0mheader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mutils\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto_unicode\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfin\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreadline\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mencoding\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mencoding\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    207\u001b[0m             \u001b[0mvocab_size\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvector_size\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmap\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m  \u001b[0;31m# throws for invalid file format\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/smart_open/smart_open_lib.py\u001b[0m in \u001b[0;36msmart_open\u001b[0;34m(uri, mode, **kw)\u001b[0m\n\u001b[1;32m    133\u001b[0m         \u001b[0;31m# this method just routes the request to classes handling the specific storage\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    134\u001b[0m         \u001b[0;31m# schemes, depending on the URI protocol in `uri`\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 135\u001b[0;31m         \u001b[0mparsed_uri\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mParseUri\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0muri\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    136\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    137\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mparsed_uri\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mscheme\u001b[0m \u001b[0;32min\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;34m\"file\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/smart_open/smart_open_lib.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, uri, default_scheme)\u001b[0m\n\u001b[1;32m    307\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0muri_path\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0muri\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    308\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 309\u001b[0;31m             \u001b[0;32mraise\u001b[0m \u001b[0mNotImplementedError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"unknown URI scheme %r in %r\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mscheme\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0muri\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    310\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    311\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mNotImplementedError\u001b[0m: unknown URI scheme 'e' in 'E:/code/gensim_examples/vector/GoogleNews-vectors-negative300.bin.gz'"
     ]
    }
   ],
   "source": [
    "from gensim.models import KeyedVectors\n",
    "w2v = KeyedVectors.load_word2vec_format('E:/code/gensim_examples/vector/GoogleNews-vectors-negative300.bin.gz', binary=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "w2v = {}\n",
    "with open('vector/glove.6B/glove.6B.300d.txt',encoding='utf-8')as f:\n",
    "    for line in f:\n",
    "        values = line.split()\n",
    "        word = values[0]\n",
    "        coefs = np.asarray(values[1:], dtype='float32')\n",
    "        w2v[word] = coefs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "batch_size = 16\n",
    "embedding_dims = 300\n",
    "nb_epoch = 50\n",
    "lstm_output_size = 300"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Found 4368 unique tokens.\n"
     ]
    }
   ],
   "source": [
    "texts = df.words.tolist()\n",
    "tokenizer = Tokenizer(num_words=20000)\n",
    "tokenizer.fit_on_texts(texts)\n",
    "sequences = tokenizer.texts_to_sequences(texts)\n",
    "word_index = tokenizer.word_index\n",
    "print('Found %s unique tokens.' % len(word_index))\n",
    "# tokenizer 从1算起\n",
    "max_features = len(word_index)+1\n",
    "time_steps = max([len(i) for i in sequences])\n",
    "data = pad_sequences(sequences, maxlen=time_steps)\n",
    "labels = to_categorical(np.asarray(df.label.tolist()))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "baclava\n",
      "fooood\n",
      "hadn\n",
      "presentaion\n",
      "sommlier\n",
      "experiance\n",
      "awsome\n",
      "shuizhu\n",
      "boths\n",
      "mozz\n",
      "vegtables\n",
      "bruscetta\n",
      "inludes\n",
      "bhelpuri\n",
      "sevpuri\n",
      "chaats\n",
      "fromager\n",
      "atomosphere\n",
      "pelligrino\n",
      "dotp\n",
      "tator\n",
      "massamman\n",
      "resturant\n",
      "prixe\n",
      "langostine\n",
      "somosas\n",
      "dhosas\n",
      "dissapointing\n",
      "dishs\n",
      "taramasalata\n",
      "cuccumber\n",
      "orrechiete\n",
      "unpretensious\n",
      "pizzaria\n",
      "unattentive\n",
      "pumkin\n",
      "tortelini\n",
      "virgnin\n",
      "rasamalai\n",
      "offerng\n",
      "carpaachio\n",
      "exellent\n",
      "apetizers\n",
      "varities\n",
      "thh\n",
      "freindly\n",
      "wintermelon\n",
      "mayonaisse\n",
      "excelent\n",
      "recomendations\n",
      "consisent\n",
      "rearely\n",
      "xcept\n",
      "intitally\n",
      "appitizers\n",
      "candlevery\n",
      "delcious\n",
      "edamames\n",
      "reccomend\n",
      "servces\n",
      "tabouleh\n",
      "kompot\n",
      "empenadas\n",
      "plaintains\n",
      "bday\n",
      "impecible\n",
      "magnificant\n",
      "unaccommodating\n",
      "hollondaise\n",
      "bruschettas\n",
      "paninis\n",
      "tramezzinis\n",
      "nosiy\n",
      "buttah\n",
      "fornini\n",
      "nothingon\n",
      "insde\n",
      "selecion\n",
      "welll\n",
      "panchetta\n",
      "raddichio\n",
      "excpetiona\n",
      "delictable\n",
      "atmoshere\n",
      "dinnerbroker\n",
      "masaman\n",
      "itsuperb\n",
      "secondi\n",
      "meatsauce\n",
      "mussaman\n",
      "ummmmm\n",
      "ingrediants\n",
      "horedevous\n",
      "acousitcs\n",
      "sandwhich\n",
      "caserole\n",
      "areally\n",
      "sopaipillas\n",
      "standed\n",
      "incrediby\n",
      "quesedilla\n",
      "erbazzone\n",
      "poori\n",
      "curtious\n",
      "kababs\n",
      "kruno\n",
      "refleshment\n",
      "outisde\n",
      "pickels\n",
      "bagles\n",
      "unbeliavably\n",
      "portioins\n",
      "minimun\n",
      "zabars\n",
      "mozzerella\n",
      "1oz\n",
      "offeres\n",
      "inovated\n",
      "cheff\n",
      "recomend\n",
      "lonk\n",
      "scatty\n",
      "parmesean\n",
      "overpack\n",
      "margheritta\n",
      "asthetically\n",
      "afortune\n",
      "impecable\n",
      "brushetta\n",
      "dahkin\n",
      "delicous\n",
      "prooudly\n",
      "oreganta\n",
      "bgel\n",
      "overated\n",
      "onglet\n",
      "waterbug\n",
      "blantently\n",
      "40times\n",
      "yellowfun\n",
      "skiline\n",
      "roofdeck\n",
      "pleasnt\n",
      "everythig\n",
      "scalina\n",
      "parathas\n",
      "wasabe\n",
      "realtively\n",
      "atmoshpere\n",
      "agreable\n",
      "unforgetable\n",
      "anyones\n",
      "manydifferent\n",
      "varietys\n",
      "tradional\n",
      "gnochi\n",
      "makhani\n",
      "specjal\n",
      "dimsum\n",
      "beatiful\n",
      "atlhough\n",
      "awkardly\n",
      "margharita\n",
      "manhatten\n",
      "decour\n",
      "recomends\n",
      "couteract\n",
      "convienent\n",
      "falafal\n",
      "gosht\n",
      "favs\n",
      "attentative\n",
      "heatlhy\n",
      "saketini\n",
      "quaility\n",
      "inconsistant\n",
      "mayonaise\n",
      "branzini\n",
      "focacchia\n",
      "pialla\n",
      "guac\n",
      "amanzing\n",
      "whem\n",
      "lasagnette\n",
      "hesititate\n",
      "deff\n",
      "panang\n",
      "ocasionally\n",
      "freshmess\n",
      "vegatables\n",
      "trendi\n",
      "atmorphere\n",
      "mcds\n",
      "40pp\n",
      "neighboors\n",
      "underdeserving\n",
      "corriander\n",
      "tasts\n",
      "metrazur\n",
      "narone\n",
      "liquers\n",
      "courtesey\n",
      "omelletes\n",
      "aunthentic\n",
      "lambchops\n",
      "dowtown\n",
      "manhathan\n",
      "perrrrrrrrrfect\n",
      "reccommend\n",
      "eastsider\n",
      "aanother\n",
      "moshphere\n",
      "disapointing\n",
      "indistinguished\n",
      "seved\n",
      "recommand\n",
      "asbolute\n",
      "cheescake\n",
      "energeic\n",
      "pongsri\n",
      "uncourteous\n",
      "advanatage\n",
      "fornino\n",
      "receipies\n",
      "15pm\n",
      "sastifying\n",
      "stauff\n",
      "sligtly\n",
      "thius\n",
      "apppetizers\n",
      "noticicing\n",
      "dissappointed\n",
      "boarderline\n",
      "ballato\n",
      "palets\n",
      "inobtrusive\n",
      "jsut\n",
      "barebecued\n",
      "playfull\n",
      "outragous\n",
      "sushimi\n",
      "sripraphai\n",
      "extrmely\n",
      "taxan\n",
      "aweful\n",
      "coem\n",
      "waterbugs\n",
      "botle\n",
      "daiquiries\n",
      "sandwhiches\n",
      "kennsington\n",
      "parmigana\n",
      "ottimo\n",
      "bijin\n",
      "saet\n",
      "restaraurant\n",
      "toninos\n",
      "eatable\n",
      "taglierini\n",
      "pleasently\n",
      "fettucino\n",
      "attitue\n",
      "satified\n",
      "delivary\n",
      "nidos\n",
      "glechik\n",
      "kalmata\n",
      "imposeing\n",
      "swiftys\n",
      "confitte\n",
      "upstrairs\n",
      "marscapone\n",
      "hanx\n"
     ]
    }
   ],
   "source": [
    "embedding_matrix = np.random.random((max_features, embedding_dims))\n",
    "for word, i in word_index.items():\n",
    "    if word in w2v:\n",
    "        embedding_matrix[i] = w2v[word]\n",
    "    else:\n",
    "        print(word)\n",
    "        embedding_matrix[i] = np.random.uniform(low=-0.01,high=0.01,size=(300,))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## common_lstm"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "input_1 (InputLayer)         (None, 70)                0         \n",
      "_________________________________________________________________\n",
      "embedding_1 (Embedding)      (None, 70, 300)           1310700   \n",
      "_________________________________________________________________\n",
      "dropout_1 (Dropout)          (None, 70, 300)           0         \n",
      "_________________________________________________________________\n",
      "lstm_1 (LSTM)                (None, 300)               721200    \n",
      "_________________________________________________________________\n",
      "dense_1 (Dense)              (None, 3)                 903       \n",
      "=================================================================\n",
      "Total params: 2,032,803\n",
      "Trainable params: 2,032,803\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      "Train on 2612 samples, validate on 2110 samples\n",
      "Epoch 1/50\n",
      "  48/2612 [..............................] - ETA: 76s - loss: 1.4925 - acc: 0.4375 "
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-9-13eaafac4c1f>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     25\u001b[0m                  \u001b[0mvalidation_data\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2612\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlabels\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2612\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     26\u001b[0m                  \u001b[0mcallbacks\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mearly_stopping\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 27\u001b[0;31m                 shuffle=True)\n\u001b[0m",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)\u001b[0m\n\u001b[1;32m   1505\u001b[0m                               \u001b[0mval_f\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mval_f\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mval_ins\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mval_ins\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshuffle\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1506\u001b[0m                               \u001b[0mcallback_metrics\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcallback_metrics\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1507\u001b[0;31m                               initial_epoch=initial_epoch)\n\u001b[0m\u001b[1;32m   1508\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1509\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mevaluate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m32\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mverbose\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msample_weight\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_fit_loop\u001b[0;34m(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch)\u001b[0m\n\u001b[1;32m   1154\u001b[0m                 \u001b[0mbatch_logs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'size'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_ids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1155\u001b[0m                 \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_index\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_logs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1156\u001b[0;31m                 \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1157\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1158\u001b[0m                     \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m   2267\u001b[0m         updated = session.run(self.outputs + [self.updates_op],\n\u001b[1;32m   2268\u001b[0m                               \u001b[0mfeed_dict\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2269\u001b[0;31m                               **self.session_kwargs)\n\u001b[0m\u001b[1;32m   2270\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   2271\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    893\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    894\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 895\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    896\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    897\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1107\u001b[0m     \u001b[0;31m# Create a fetch handler to take care of the structure of fetches.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1108\u001b[0m     fetch_handler = _FetchHandler(\n\u001b[0;32m-> 1109\u001b[0;31m         self._graph, fetches, feed_dict_tensor, feed_handles=feed_handles)\n\u001b[0m\u001b[1;32m   1110\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1111\u001b[0m     \u001b[0;31m# Run request and get response.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, graph, fetches, feeds, feed_handles)\u001b[0m\n\u001b[1;32m    411\u001b[0m     \"\"\"\n\u001b[1;32m    412\u001b[0m     \u001b[0;32mwith\u001b[0m \u001b[0mgraph\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_default\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 413\u001b[0;31m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_mapper\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    414\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetches\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    415\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_targets\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    231\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    232\u001b[0m       \u001b[0;31m# NOTE(touts): This is also the code path for namedtuples.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 233\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0m_ListFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    234\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    235\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0m_DictFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches)\u001b[0m\n\u001b[1;32m    338\u001b[0m     \"\"\"\n\u001b[1;32m    339\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 340\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    341\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m    338\u001b[0m     \"\"\"\n\u001b[1;32m    339\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_fetch_type\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 340\u001b[0;31m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0m_FetchMapper\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfor_fetch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mfetch\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    341\u001b[0m     \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_unique_fetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_value_indices\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_uniquify_fetches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mappers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mfor_fetch\u001b[0;34m(fetch)\u001b[0m\n\u001b[1;32m    239\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtensor_type\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    240\u001b[0m           \u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfetch_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 241\u001b[0;31m           \u001b[0;32mreturn\u001b[0m \u001b[0m_ElementFetchMapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    242\u001b[0m     \u001b[0;31m# Did not find anything.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    243\u001b[0m     raise TypeError('Fetch argument %r has invalid type %r' %\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, fetches, contraction_fn)\u001b[0m\n\u001b[1;32m    248\u001b[0m   \u001b[0;34m\"\"\"Fetch mapper for singleton tensors and ops.\"\"\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    249\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 250\u001b[0;31m   \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontraction_fn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    251\u001b[0m     \"\"\"Creates an _ElementFetchMapper.\n\u001b[1;32m    252\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "def create_model():\n",
    "    inputs = Input(shape=(time_steps,),dtype='int32')\n",
    "    x = Embedding(max_features,\n",
    "                  embedding_dims,\n",
    "                  trainable=True,\n",
    "                  mask_zero=True,\n",
    "                  weights=[embedding_matrix],\n",
    "                 )(inputs)\n",
    "    x = Dropout(0.5)(x)\n",
    "#     小数据GRU？\n",
    "    lstm_out = LSTM(lstm_output_size,dropout=0.2,recurrent_dropout=0.2)(x)\n",
    "    predictions = Dense(3,activation='softmax')(lstm_out)\n",
    "    model = Model(inputs=inputs, outputs=predictions)\n",
    "#     opt = Adam(lr=0.0005)\n",
    "    model.compile(optimizer='Adagrad',\n",
    "                  loss='categorical_crossentropy',\n",
    "                  metrics=['accuracy'])\n",
    "    return model\n",
    "early_stopping = EarlyStopping(monitor='val_loss',patience=10)\n",
    "model = create_model()\n",
    "model.summary()\n",
    "model.fit(data[:2612], labels[:2612],\n",
    "                 batch_size=batch_size,\n",
    "                 epochs=nb_epoch,verbose=1,\n",
    "                 validation_data=(data[2612:],labels[2612:]),\n",
    "                 callbacks=[early_stopping],\n",
    "                shuffle=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Attention-based LSTM for Aspect-level Sentiment Classification"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "aspects = df.aspectTerm.tolist()\n",
    "aspects_seq = tokenizer.texts_to_sequences(aspects)\n",
    "max_aspect_length = max([len(i) for i in aspects_seq])\n",
    "aspect_data = pad_sequences(aspects_seq, maxlen=max_aspect_length)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class MaskAverageLayer(Layer):\n",
    "    '''得到评价对象中所有词向量的平均值\n",
    "    '''\n",
    "    def __init__(self, keepdims=True,**kwargs):\n",
    "        self.support_mask=True\n",
    "        self.keepdims=keepdims\n",
    "        super(MaskAverageLayer, self).__init__(**kwargs)\n",
    "    \n",
    "    def call(self, x, mask=None):\n",
    "        aspect_x=x    #(None*4*300)\n",
    "        mask_1 = tf.to_float(K.expand_dims(mask, 2)) #None *time_steps*1\n",
    "        mask_sum = K.sum(tf.to_float(mask_1),axis=1)  #None *1*1\n",
    "#         mask(None*timesteps)\n",
    "        inter = aspect_x*mask_1\n",
    "        result = K.sum(inter,axis=1) * K.pow(mask_sum,-1) #None*time_steps\n",
    "        return K.repeat(result,1)\n",
    "#         return K.expand_dims(K.sum(inter,axis=1)/mask_sum,1)\n",
    "        \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        if self.keepdims:\n",
    "            return (input_shape[0],1,input_shape[2])\n",
    "        else:\n",
    "            return (input_shape[0],input_shape[2])\n",
    "    \n",
    "    def compute_mask(self, x, mask=None):\n",
    "        return None\n",
    "    \n",
    "class ConnectAspectLayer(Layer):\n",
    "    def __init__(self, **kwargs):\n",
    "        self.support_mask=True\n",
    "        super(ConnectAspectLayer, self).__init__(**kwargs)\n",
    "    \n",
    "    def call(self, x, mask=None):\n",
    "        left_x=x[0]\n",
    "        aspect_vector=x[1] #32x1*100\n",
    "        aspect_vector=K.repeat_elements(aspect_vector,K.int_shape(left_x)[1],axis=1) #32x24x100\n",
    "        aspect_vector=aspect_vector*tf.to_float(K.expand_dims(mask[0], 2))\n",
    "\n",
    "        return K.concatenate([left_x,aspect_vector])\n",
    "        \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        \n",
    "        return (input_shape[0][0],input_shape[0][1],input_shape[0][2]+input_shape[1][2])\n",
    "    \n",
    "    def compute_mask(self, x, mask=None):\n",
    "        if mask:\n",
    "            return mask[0]\n",
    "        else:\n",
    "            return None"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {
    "collapsed": true,
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# Attention-based LSTM for Aspect-level Sentiment Classification  维度举例time_steps:100 aspect_length:10\n",
    "def create_ataelstm():\n",
    "    words_input = Input(shape=(time_steps,),dtype='int32')  #None 100 \n",
    "    target_input = Input(shape=(max_aspect_length,),dtype='int32')   #None 10 \n",
    "    embedding_layer = Embedding(max_features,\n",
    "                                embedding_dims,\n",
    "                                trainable=True,\n",
    "                                weights=[embedding_matrix],\n",
    "                                mask_zero=True,\n",
    "                                name='word_embedding')\n",
    "    words_matrix = embedding_layer(words_input)     #None 100 *300       \n",
    "    target_vector = embedding_layer(target_input)       #None 10 100\n",
    "    target_vector = MaskAverageLayer()(target_vector)   #None  1 100\n",
    "    input_matrix = ConnectAspectLayer()([words_matrix,target_vector])  #None 100 *600  \n",
    "    h = LSTM(embedding_dims,dropout=0.3,recurrent_dropout=0.3,return_sequences=True,name='lstm')(input_matrix) #None*100*300\n",
    "    h2 = TimeDistributed(Dense(embedding_dims))(h)\n",
    "    target_matrix =Lambda(lambda x:K.repeat_elements(x,time_steps,axis=1))(target_vector)\n",
    "    target_matrix= TimeDistributed(Dense(embedding_dims))(target_matrix) #100*300\n",
    "    m = concatenate([h2,target_matrix],name='hiddenaspect') #100*600\n",
    "    m = Activation('tanh')(m)\n",
    "#     mask?\n",
    "    attention_probs = TimeDistributed(Dense(1,activation = 'softmax',name='attention'))(m) #100\n",
    "#     dot -.multiply\n",
    "    r = dot([attention_probs,h],axes=1,name='attention_mul')  #(None,1,300)\n",
    "    r = Flatten()(r)\n",
    "    r = Dropout(0.2)(r)\n",
    "    r = Dense(embedding_dims)(r)\n",
    "    h = Lambda(lambda x:tf.slice(x, [0,time_steps-1, 0], [-1, 1, -1]))(h)\n",
    "    h = Flatten()(h)\n",
    "    h = Dropout(0.2)(h)\n",
    "    h = Dense(embedding_dims)(h)\n",
    "    h = add([r,h])\n",
    "#     自己编写层\n",
    "    h_star = Activation('tanh')(h) #300维度句子向量\n",
    "    h_star = Dropout(0.5)(h_star)\n",
    "    predictions = Dense(3,activation='softmax')(h_star)\n",
    "    model = Model(inputs=[words_input,target_input],outputs=predictions)\n",
    "#     opt = Adagrad(lr=0.01, beta_1=0.9, beta_2=0.999, epsilon=1e-08)\n",
    "    model.compile(loss='categorical_crossentropy',optimizer='Adagrad',\n",
    "                 metrics=['accuracy'])\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 3602 samples, validate on 1120 samples\n",
      "Epoch 1/50\n",
      "3602/3602 [==============================] - 26s - loss: 1.3687 - acc: 0.4914 - val_loss: 0.9070 - val_acc: 0.6509\n",
      "Epoch 2/50\n",
      "3602/3602 [==============================] - 23s - loss: 1.1270 - acc: 0.5389 - val_loss: 0.9491 - val_acc: 0.6545\n",
      "Epoch 3/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.9476 - acc: 0.6074 - val_loss: 0.8580 - val_acc: 0.6768\n",
      "Epoch 4/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.8133 - acc: 0.6641 - val_loss: 0.7220 - val_acc: 0.7268\n",
      "Epoch 5/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.7261 - acc: 0.7010 - val_loss: 0.7269 - val_acc: 0.7366\n",
      "Epoch 6/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.6631 - acc: 0.7277 - val_loss: 0.7617 - val_acc: 0.7384\n",
      "Epoch 7/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.6139 - acc: 0.7457 - val_loss: 0.7123 - val_acc: 0.7393\n",
      "Epoch 8/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.5591 - acc: 0.7696 - val_loss: 0.7259 - val_acc: 0.7429\n",
      "Epoch 9/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.5095 - acc: 0.7946 - val_loss: 0.7107 - val_acc: 0.7339\n",
      "Epoch 10/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.5011 - acc: 0.8051 - val_loss: 0.8029 - val_acc: 0.7500\n",
      "Epoch 11/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.4584 - acc: 0.8173 - val_loss: 0.7124 - val_acc: 0.7580\n",
      "Epoch 12/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.4197 - acc: 0.8343 - val_loss: 0.7524 - val_acc: 0.7598\n",
      "Epoch 13/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.4110 - acc: 0.8373 - val_loss: 0.7270 - val_acc: 0.7643\n",
      "Epoch 14/50\n",
      "3602/3602 [==============================] - 23s - loss: 0.3775 - acc: 0.8493 - val_loss: 0.7733 - val_acc: 0.7625\n",
      "Epoch 15/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.3583 - acc: 0.8556 - val_loss: 0.7971 - val_acc: 0.7670\n",
      "Epoch 16/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.3317 - acc: 0.8676 - val_loss: 0.7947 - val_acc: 0.7607\n",
      "Epoch 17/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.3281 - acc: 0.8681 - val_loss: 0.8016 - val_acc: 0.7688\n",
      "Epoch 18/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.3057 - acc: 0.8745 - val_loss: 0.8423 - val_acc: 0.7446\n",
      "Epoch 19/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.2874 - acc: 0.8865 - val_loss: 0.8748 - val_acc: 0.7634\n",
      "Epoch 20/50\n",
      "3602/3602 [==============================] - 24s - loss: 0.2743 - acc: 0.8917 - val_loss: 0.8447 - val_acc: 0.7491\n"
     ]
    }
   ],
   "source": [
    "model = create_ataelstm()\n",
    "early_stopping = EarlyStopping(monitor='val_loss',patience=10)\n",
    "hist = model.fit([data[:3602],aspect_data[:3602]],labels[:3602],\n",
    "                 batch_size=16,\n",
    "                 epochs=nb_epoch,verbose=1,\n",
    "                 validation_data=([data[3602:],aspect_data[3602:]],labels[3602:]),\n",
    "                 callbacks=[early_stopping],\n",
    "                shuffle=True)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([ 0.50860633,  0.4611327 ,  0.39255969,  0.33592449,  0.29900056,\n",
       "        0.2723487 ,  0.25430316,  0.23042754,  0.20544142,  0.19489173,\n",
       "        0.18267629,  0.16574125,  0.1626874 ,  0.15074958,  0.14436424,\n",
       "        0.13242643,  0.13187118,  0.12548584,  0.11354803,  0.10827318])"
      ]
     },
     "execution_count": 72,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "np.array([1]) - np.array(hist.history['acc'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 75,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEWCAYAAAB1xKBvAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd8VeX9wPHPNxuyGEmAJIQtewcQBQEn4EAQxFWrrUWc\ntdW2tP6qtpVqW7XWugdaW7eAG3GhCIos2XtKwgorAwhkfH9/nBO4xIyb5N7cm+T7fr3uK/eec57n\nfO9Jcr/3nOc5zyOqijHGGFOZkEAHYIwxpm6whGGMMcYrljCMMcZ4xRKGMcYYr1jCMMYY4xVLGMYY\nY7xiCcPUSyJytYh8Eug4qkNE7hOR/7nP00QkT0RCK9u2mvtaLSLDq1veNCyWMMwpRORLETkoIpEe\ny2a5H1p5IlIgIsc9Xj/tsV07ESkWkafKqFdF5LBHuTwR+W05MbwkIvfX5H2o6iuqen5N6ggGqvqD\nqsaoalFN6yrruKpqd1X9sqZ1m4YhLNABmOAhIm2BoUA2cAnwFoCqjvLY5iUgQ1X/r4wqrgUOAhNF\n5A5VPVZqfW9V3eSDOMNUtbCm9ZjgUNbvs6q/Y/ubqB12hmE8XQssAF4CflqVgiIibvn/AwqAi6sT\ngIhMAq4GfuuehbzvLt8mIr8TkRXAYREJE5EpIrJZRHJFZI2IjPWo5zoRmefxWkVksohsFJFDIvKE\nG3Pp/SeLyFERaeaxrK+I7BORcBHpKCJfiUi2u+yNct7HLBG5tdSy5SIyzn3+LxHZISI5IrJERIaW\nU09bN/Yw93U7d/+5IvIpkFBq+7dEZLcb31wR6e7FcT3XfR4pIo+KyE738WjJmaaIDBeRDBG5U0T2\nisguEbm+7N8iiEi8iLzgbpcpIveXXFZzfzfzReSfIrIfuK+cZSEi8n8ist3d58siEl/quPxcRH4A\nvigvFuM7ljCMp2uBV9zHBSLSogplhwCpwOvAm1Qx4ZRQ1Wfd/f/dvRTjmXiuBC4EmrjfJjfjnBHF\nA38C/icirSqo/iJgANALuBy4oIz97wS+BS7zWHwV8LaqFgB/AT4BmuK833+Xs6/X3HgBEJFuQBvg\nQ3fRIqAP0Ax4FXhLRKIqiL3Eq8ASnETxF358nGcBnYAkYCnOsazsuJa4Gzjdjas3MBDnC0CJljjH\nOgX4OfCEiDQtJ86XgEKgI9AXOB+4wWP9IGAL0AKYWs6y69zHCKA9EAM8Xmo/w4CulPG7NH6gqvaw\nBzgf+AVAgvt6HfCrMrZ7Cbi/jOXPA++4zwe7dSV5rFcgBzjk8bignFh+tA9gG/CzSt7DMmCM+/w6\nYF6p/Q/xeP0mMKWcem4AvnCfC7ADOMt9/TLwLJBaSSyxwGGgjft6KjCtgu0P4lyyA7gP+J/7vK0b\nexiQhvMhHO1R7tWSbcuos4lbNr6S43qu+3wzMNpj3QXANvf5cOAoEOaxfi9wehn7bQEcAxp5LLsS\nmOPxu/mhVJmyln0O3OzxurP7dxXmcVzaB/p/pyE97AzDlPgp8Imq7nNfv4qXZwki0giYwMlvs98C\nP+B8M/fUT1WbeDxmVzHGHaX2e62ILHMvMR0CelDqEk0puz2eH8H5xlqW6cBg92zlLKAY+Npd91uc\nJLJQnB5GPyurAlXNxTmbuMJddCXu8XFjv0tE1rqXjg7hfHOvKHaAZOCgqh72WLbdo85QEXnQvUyX\ng5MM8KJez/q3e7ze7i4rsV9PbSco7xi2AcKBXR6/m2dwznpK7CijXOllZcUThpOQKqrH+Ik1epuS\nD/zLgVARKflQjQSaiEhvVV1eSRVjgTjgSREpuUTTBCfhPFqNkMobQvnEchFpAzwHnAN8q6pFIrIM\n58O8RlT1oDhdcifiXO54Xd2vuKq6G/iFG8MQ4DMRmatlN+a/BtwrInOBKGCOW24oTuI5B1itqsUi\nctCL2HcBTUUk2iNppHHyuFwFjAHOxUkW8ThnLiX1VjY09U6cD/vVHnXvrKRMWXbgnGEkaPkN0WXF\nUnpZSTwlSs6w9uBcDiyvHuMndoZhAC4FioBuONev++B8UH6N065RmZ8C04CeHuXPBHqLSM9qxLMH\n55p1RaJxPiyyANwG2B7V2Fd5XsV57+Pd57j7mSAiJR9WB90Yisup4yOcD7w/A2+oasl2sTgffFlA\nmIjcg5NwK6Sq24HFwJ9EJMJNWJ5tEbE4H9T7gcbAX0tVUdlxfQ34PxFJFJEE4B6gyvd4qOounHae\nh0Ukzm287iAiw6pY1WvAr9yG/hic9/NGBUnI+JklDAPOB/6L6vT5313ywGlgvLqkh05ZRCQF55vy\no55lVXUJ8DGnXtZaLqfeh1He2ccLQDf3csY7ZW2gqmuAh3EaqPfgJKv5VXzfFXkPp/F4d6kzrAHA\ndyKS527zS1XdUk6Mx4AZON/4X/VYNRvn2GzAucySj/eXVq7CaRw+ANyL06ZS4mW3vkxgDU6PN0+V\nHdf7cRLSCmAlTqN5de+HuRaIcOM4CLwNVNQhoSzTgP8Cc4GtOMfptmrGY3xA3DNtY4wxpkJ2hmGM\nMcYrljCMMcZ4xRKGMcYYr1jCMMYY45V6dR9GQkKCtm3bNtBhGGNMnbFkyZJ9qprozbb1KmG0bduW\nxYsXBzoMY4ypM0Rke+VbOeySlDHGGK9YwjDGGOMVSxjGGGO8Uq/aMIwx9UdBQQEZGRnk5+cHOpR6\nISoqitTUVMLDw6tdhyUMY0xQysjIIDY2lrZt2yI/nhzRVIGqsn//fjIyMmjXrl2167FLUsaYoJSf\nn0/z5s0tWfiAiNC8efMan61ZwjDGBC1LFr7ji2PZ4BNGUbHy1JebWbbjUKBDMcaYoNbgE8bh44W8\n/O027nxzGfkFRYEOxxgTJA4dOsSTTz5Z5XKjR4/m0KH6+QXUrwlDREaKyHoR2SQiU8pYP9yd03iZ\n+7jH27K+EhcVzj/G92Zz1mH+/vF6f+3GGFPHlJcwCgsrnvDvo48+okmTJv4KK6D81ktKREKBJ4Dz\ngAxgkYi8586U5ulrVb2ommV9YkinBK4d3IZp87dyXrcWDO7Q3B+7McbUIVOmTGHz5s306dOH8PBw\noqKiaNq0KevWrWPDhg1ceuml7Nixg/z8fH75y18yadIk4OQQRXl5eYwaNYohQ4bwzTffkJKSwrvv\nvkujRo0C/M6qz5/dagcCm0qmrxSR13EmqPfmQ78mZatlyqguzN2QxV1vLefjO4YSG1X9vsrGGN/6\n0/urWbMzx6d1dkuO496Lu5e7/sEHH2TVqlUsW7aML7/8kgsvvJBVq1ad6JY6bdo0mjVrxtGjRxkw\nYACXXXYZzZuf+mVz48aNvPbaazz33HNcfvnlTJ8+nWuuucan76M2+fOSVAqnzlOc4S4r7QwRWSEi\ns0Sk5LfnbVlEZJKILBaRxVlZWdUOtnFEGA9f3odd2Ue5/4O11a7HGFM/DRw48JR7GB577DF69+7N\n6aefzo4dO9i4ceOPyrRr144+ffoA0L9/f7Zt21Zb4fpFoG/cWwqkqWqeiIwG3gE6VaUCVX0WeBYg\nPT29RhOU92/TlBuHdeCpLzdzQY8WnN2lRU2qM8b4SEVnArUlOjr6xPMvv/ySzz77jG+//ZbGjRsz\nfPjwMu9xiIyMPPE8NDSUo0eP1kqs/uLPM4xMoLXH61R32QmqmqOqee7zj4BwEUnwpqy/3HFuJ7q0\njOV301dy8PDx2tilMSYIxcbGkpubW+a67OxsmjZtSuPGjVm3bh0LFiyo5egCw58JYxHQSUTaiUgE\ncAXwnucGItJS3LtJRGSgG89+b8r6S2RYKI9c3odDR47zx3dX1cYujTFBqHnz5px55pn06NGD3/zm\nN6esGzlyJIWFhXTt2pUpU6Zw+umnByjK2uW3S1KqWigitwKzgVBgmqquFpHJ7vqngfHATSJSCBwF\nrlBVBcos669YS+uWHMcd557GP2av54LuO7m4d3Jt7doYE0ReffXVMpdHRkYya9asMteVtFMkJCSw\natXJL5133XWXz+OrbX5tw3AvM31UatnTHs8fBx73tmxtuvGs9ny2dg9/fHcVg9o1IykuKlChGGNM\nUGjwd3qXJyw0hIcn9Ca/oIjfTV+Bc+JjjDENlyWMCrRPjGHKyC7MWZ/FG4t2VF7AGGPqMUsYlbh2\ncFvO6NCcv3ywhh0HjgQ6HGOMCRhLGJUICRH+MaE3ISLc9dZyiovt0pQxpmGyhOGFlCaNuOfibny3\n9QDT5m8NdDjGGBMQljC8NL5/Kud2bcHfZ69n096yb+YxxjRcMTExAOzcuZPx48eXuc3w4cNZvHhx\nhfU8+uijHDly8vJ3MA2XbgnDSyLCA+N6EhMZxq/fXE5BUXGgQzLGBKHk5GTefvvtapcvnTCCabh0\nSxhVkBgbydRLe7AiI5sn52wOdDjGGD+aMmUKTzzxxInX9913H/fffz/nnHMO/fr1o2fPnrz77rs/\nKrdt2zZ69OgBwNGjR7niiivo2rUrY8eOPWUsqZtuuon09HS6d+/OvffeCzgDGu7cuZMRI0YwYsQI\nwBkufd++fQA88sgj9OjRgx49evDoo4+e2F/Xrl35xS9+Qffu3Tn//PP9NmZVoAcfrHNG9WzFpX2S\n+fcXGzm7SxI9U+MDHZIx9d+sKbB7pW/rbNkTRj1Y7uqJEydyxx13cMsttwDw5ptvMnv2bG6//Xbi\n4uLYt28fp59+Opdcckm582U/9dRTNG7cmLVr17JixQr69et3Yt3UqVNp1qwZRUVFnHPOOaxYsYLb\nb7+dRx55hDlz5pCQkHBKXUuWLOHFF1/ku+++Q1UZNGgQw4YNo2nTprU2jLqdYVTDny7pQfOYCH5t\n07oaU2/17duXvXv3snPnTpYvX07Tpk1p2bIlf/jDH+jVqxfnnnsumZmZ7Nmzp9w65s6de+KDu1ev\nXvTq1evEujfffJN+/frRt29fVq9ezZo1FU/3M2/ePMaOHUt0dDQxMTGMGzeOr7/+Gqi9YdTtDKMa\n4huH87fLenHdi4t45NMN/GF010CHZEz9VsGZgD9NmDCBt99+m927dzNx4kReeeUVsrKyWLJkCeHh\n4bRt27bMYc0rs3XrVh566CEWLVpE06ZNue6666pVT4naGkbdzjCqaXjnJK4alMZzX29h4dYDgQ7H\nGOMHEydO5PXXX+ftt99mwoQJZGdnk5SURHh4OHPmzGH79u0Vlj/rrLNODGC4atUqVqxYAUBOTg7R\n0dHEx8ezZ8+eUwYyLG9Y9aFDh/LOO+9w5MgRDh8+zMyZMxk6dKgP323lLGHUwN2ju9K6aWPuems5\nh49VPDG8Mabu6d69O7m5uaSkpNCqVSuuvvpqFi9eTM+ePXn55Zfp0qVLheVvuukm8vLy6Nq1K/fc\ncw/9+/cHoHfv3vTt25cuXbpw1VVXceaZZ54oM2nSJEaOHHmi0btEv379uO666xg4cCCDBg3ihhtu\noG/fvr5/0xWQ+jSoXnp6ulbWx9nXFm49wMRnv+WGIe24+8JutbpvY+qztWvX0rWrXe71pbKOqYgs\nUdV0b8rbGUYNDWzXjLF9Uvjfgh84YDP0GWPqMUsYPjB5eAeOFhTx0jfbAh2KMcb4jV8ThoiMFJH1\nIrJJRKZUsN0AESkUkfEey7aJyEoRWSYitXudqYpOaxHLed1a8J9vtpFnbRnG+Ex9umQeaL44ln5L\nGCISCjwBjAK6AVeKyI8u8rvb/Q34pIxqRqhqH2+vrwXSzcM7kH20gNe++yHQoRhTL0RFRbF//35L\nGj6gquzfv5+oqJrNHOrP+zAGAptUdQuAiLwOjAFK351yGzAdGODHWPyub1pTBrdvzvPztnDtGW2I\nDAsNdEjG1GmpqalkZGSQlZUV6FDqhaioKFJTU2tUhz8TRgrgOU1dBjDIcwMRSQHGAiP4ccJQ4DMR\nKQKeUdVny9qJiEwCJgGkpaX5JvJqunlEB37ywkJmLM3kyoGBjcWYui48PJx27doFOgzjIdCN3o8C\nv1PVsoZ+HaKqfXAuad0iImeVVYGqPquq6aqanpiY6M9YKzWkYwI9U+J55qvNFNlES8aYesafCSMT\naO3xOtVd5ikdeF1EtgHjgSdF5FIAVc10f+4FZuJc4gpqIsLNwzuwbf8RPlq5K9DhGGOMT/kzYSwC\nOolIOxGJAK4A3vPcQFXbqWpbVW0LvA3crKrviEi0iMQCiEg0cD6wyo+x+swF3VvSITGaJ7/cbI11\nxph6xW8JQ1ULgVuB2cBa4E1VXS0ik0VkciXFWwDzRGQ5sBD4UFU/9lesvhQSIkwe1oG1u3L4coM1\n1hlj6g8bGsQPjhcWM/wfc0hp2oi3Jp8R6HCMMaZcNjRIgEWEhfCLs9qzaNtBFm2zkWyNMfWDJQw/\nuWJAGs2iI3hyzqZAh2KMMT5hCcNPGkWEcv0ZbZmzPos1O3MCHY4xxtSYJQw/unZwW6IjQnnqq82B\nDsUYY2rMEoYfxTcO55rT2/Dhip1s23c40OEYY0yNWMLws58PaUdYaAjPzN0S6FCMMaZGLGH4WVJc\nFBP6pzJ9SQZ7cqo/ybsxxgSaJYxacONZHSgsLuaFeVsDHYoxxlSbJYxakNa8MRf3TuaVBds5dMSm\ncTXG1E2WMGrJTcM7cPh4Ef/5ZnugQzHGmGqxhFFLurSM45wuSbz0zVaOHLdpXI0xdY8ljFp084gO\nHDxSwGsLd1S+sTHGBBlLGLWof5tmDGzXjOe/3sLxwrLmjDLGmOBlCaOW3TKiI7uy83nn+9JzSRlj\nTHCzhFHLzuqUQPfkOJ62aVyNMXWMJYxa5kzj2pEt+w4ze/XuQIdjjDFe82vCEJGRIrJeRDaJyJQK\nthsgIoUiMr6qZeuikT1a0j4hmie/3GTTuBpj6gy/JQwRCQWeAEYB3YArRaRbOdv9DfikqmXrqtAQ\n4cZh7VmVmcPXG/cFOhxjjPGKP88wBgKbVHWLqh4HXgfGlLHdbcB0YG81ytZZY/um0jIuiie/tAmW\njDF1gz8TRgrgecNBhrvsBBFJAcYCT1W1rEcdk0RksYgszsrKqnHQtSUiLIQbhrZjwZYDLNl+MNDh\nGGNMpQLd6P0o8DtVrfZNCar6rKqmq2p6YmKiD0PzvysHptGkcThP2VmGMaYOCPNj3ZlAa4/Xqe4y\nT+nA6yICkACMFpFCL8vWedGRYVx/Rjv++dkG1u/OpXPL2ECHZIwx5fLnGcYioJOItBORCOAK4D3P\nDVS1naq2VdW2wNvAzar6jjdl64ufntGG6IhQ/vnphkCHYowxFfJbwlDVQuBWYDawFnhTVVeLyGQR\nmVydsv6KNZCaNI5g8rAOfLx6Nwu27A90OMYYUy6pT/cBpKen6+LFiwMdRpXlFxRxzsNfEd8onPdv\nG0JoiAQ6JGNMAyEiS1Q13ZttA93obYCo8FCmjOrCml05vL3ERrI1xgQnSxhB4qJerUhv05R/zF5P\nbn5BoMMxxpgfsYQRJESEey7uxr684zw+x7rZGmOCjyWMINIrtQnj+6fy4rxtbN9/ONDhGGPMKSxh\nBJnfXNCZsFDhrx+tDXQoxhhzCksYQaZFXBS3jOjI7NV7+GazDUxojAkeljCC0M+HtCOlSSP+/P4a\nm2TJGBM0LGEEoajwUP4wuivrdufyxiLrZmuMCQ6WMILU6J4tGdi2GQ9/sp4c62ZrjAkCljCCVEk3\n2wNHjvP4F9bN1hgTeJYwgliPlHgm9E/lxflb2brPutkaYwLLEkaQu+uCzkSEhjD1Q+tma4wJLEsY\nQS4pNopbzu7IZ2v3MM/m/zbGBJAljDrgZ2e2o3WzRvzlgzUUFlV7ckJjjKkRSxh1QFR4KH8Y1ZX1\ne3J5zbrZGmMCxK8JQ0RGish6EdkkIlPKWD9GRFaIyDIRWSwiQzzWbRORlSXr/BlnXTCyR0sGtWvG\nI5+sJ/uodbM1xtQ+vyUMEQkFngBGAd2AK0WkW6nNPgd6q2of4GfA86XWj1DVPt5O7lGflXSzPXS0\ngMc+3xjocIwxDZA/zzAGAptUdYuqHgdeB8Z4bqCqeXpyyr9owMbBqED35HgmprfmP99sY3NWXqDD\nMcY0MGF+rDsF8LzgngEMKr2RiIwFHgCSgAs9VinwmYgUAc+o6rN+i/S/YyG8McSlQHyK8zMu2XnE\ntoKwSL/tuqruPL8zH6zYxV8/XMsL1w0IdDjGmAbEnwnDK6o6E5gpImcBfwHOdVcNUdVMEUkCPhWR\ndao6t3R5EZkETAJIS0uregDFxYDA/k2wdS4cy/nxNtFJbgLxSCTxqR5JJRnCo6q+72pIjI3k1rM7\n8uCsdczdkMVZpyXWyn6NMUZOXhHyccUig4H7VPUC9/XvAVT1gQrKbAEGquq+UsvvA/JU9aGK9pme\nnq6LF9ewfTw/B3J3QXYG5Ox0H5kezzMgP/vH5bqNgQsfgeiEmu3fC8cKizj/n3OJCA1h1i+HEhZq\nnd2MMdUjIku8bSf25xnGIqCTiLQDMoErgKs8NxCRjsBmVVUR6QdEAvtFJBoIUdVc9/n5wJ/9GOtJ\nUXHOI7Fz+dscy3OSSkki2bsGvnsGtn8DFz8GXUb7NcTIMGc02xv/u4RXF/7AtYPb+nV/xhgDfkwY\nqlooIrcCs4FQYJqqrhaRye76p4HLgGtFpAA4Ckx0k0cLnMtUJTG+qqof+yvWKouMgchOkNDp5LLe\nV8KMG+H1K6HvNXDBA07i8ZPzu7VgcPvmPPLpBi7pnUyTxhF+25cxxoAfL0kFgk8uSdVE4TH48kGY\n/6jTxnHpU9B2SOXlqmntrhwufOxrrh3clvsu6e63/Rhj6q+qXJKyi9++FBYJ594L138MEgovXQSz\n74aCfL/srmurOCYOSOO/C7azaW+uX/ZhjDElLGH4Q9ogmDwP0n8G3z4Ozw6Dncv8sqs7zz+NxhGh\n3PHGMg4fK/TLPowxBixh+E9kDFz0CFwz3elV9fw58NU/oMi3H+oJMZH864o+rNmZw62vLrXBCY0x\nfmMJw986ngs3fQPdLoU598O0C2Cfb2fQO7tLC+6/tCdz1mfxx3dXUZ/apYwxwcMSRm1o3AzGvwDj\npzk3CD49BBY+59406BtXDUrj5uEdeG3hDp78crPP6jXGmBKVJgwRCRWRX9VGMPVej8vg5gVOz6mP\n7oL/jYPsTJ9V/5sLOnNpn2T+MXs9M7/P8Fm9xhgDXiQMVS0CrqyFWBqGuFZw9Vtw0T9hx0J4cjCs\neBN8cBlJRPj7+N4Mbt+c3769gm822Qx9xhjf8faS1HwReVxEhopIv5KHXyOrz0ScHlQ3zYOkrjDj\nFzBnqk+qjggL4emf9KddQjQ3/ncJ63aXMTaWMcZUg1c37onInDIWq6qe7fuQqi/gN+5VR3ERvHsL\nrHgDbvgcUnyTh3ceOsrYJ+cTIsKMm8+gVXwjn9RrjKlffH7jnqqOKOMRVMmizgoJhVF/g5gW8O6t\nUHjcJ9UmN2nEi9cNJDe/kOtfXERuvs3SZ4ypGa8ShojEi8gj7jSqi0XkYRGJ93dwDUZUvNOmsXe1\nM6yIj3RLjuOpa/qxaW8eN/1vKccL7R4NY0z1eduGMQ3IBS53HznAi/4KqkHqPMrpRfXV32HvOp9V\nO7RTIg+M68m8TfuYMmOF3aNhjKk2bxNGB1W9151udYuq/glo78/AGqSRf4PIWHjvVqdtw0cmpLfm\nV+eexoylmfzz0w0+q9cY07B4mzCOisiJYVdF5Eyc4ciNL8UkOu0ZGYtgoW9npL39nI5cnp7KY19s\n4vWFP/i0bmNMw+DtfBiTgZc92i0OAj/1T0gNXM8JsPIt+PzPzmWqpm19Uq2IMHVsT3bnHOPud1bR\nIj6KEZ2TfFK3MaZh8OZO7xCgs6r2BnoBvVS1r6qu8Ht0DZGI0wAuofD+L31yQ1+J8NAQnry6H51b\nxHLLK0tZlVnGVLPGGFMOb+70LgZ+6z7PUVWv7wQTkZEisl5ENonIlDLWjxGRFSKyzO19NcTbsvVa\nfCqc9yfY8iUse8WnVcdEhvHi9QNo2jiC619axI4DR3xavzGm/vK2DeMzEblLRFqLSLOSR0UFRCQU\neAIYBXQDrhSRbqU2+xzorap9gJ8Bz1ehbP3W/3pocybM/gPk7vZp1S3ionjx+gHkFxRx/UuLyD5i\n92gYYyrnbcKYCNwCzAWWuI/KbqkeCGxye1UdB14HxnhuoKp5erKfZzSg3pat90JC4OLHnGlfP7rL\n59Wf1iKWZ3+Szg/7j/CL/y7mWKHvemUZY+onb9swrlHVdqUelXWrTQF2eLzOcJeVrn+siKwDPsQ5\ny/C6rFt+UskNhVlZWZW9nboloSMM/z2sfR9Wv+Pz6gd3aM4/JvRi4dYDTHp5CTl2N7gxpgLetmE8\n7q8AVHWmqnYBLgX+Uo3yz6pquqqmJyYm+j7AQBt8K7Tq7ZxlHDng8+rH9EnhgXE9mb9pH2OfmM/W\nfYd9vg9jTP3g7SWpz0XkMhGRKtSdCbT2eJ3qLiuTqs4F2otIQlXL1muhYTDmCTh6EGbf7ZddXDkw\njZd/PpADh49z6RPzmbfRhkU3xvyYtwnjRuBN4JiI5IhIrohU1ltqEdBJRNqJSARwBfCe5wYi0rEk\nCbnDpUcC+70p26C07Aln3gHLX4VNn/llF2d0SODdW4bQMi6Kn764kBfnb7VhRIwxp/A2YcQD1wH3\nq2oc0B04r6ICqloI3ArMBtYCb6rqahGZLCKT3c0uA1aJyDKcXlET1VFm2aq9tXrmrN9Awmnw/h1w\nLNcvu0hr3pjpN5/B2V2S+NP7a5gyfaUNWGiMOcHb+TCeAoqBs1W1q4g0BT5R1QH+DrAq6uR8GFXx\nwwKYNhIGToLRf/fbboqLlUc+3cDjczYxoG1TnrqmPwkxkX7bnzEmcHw+HwYwSFVvAfIBVPUgEFHN\n+Ex1pZ3uJIuFzzrJw09CQoS7LujMv6/sy4qMbMY8Pp/VO+2ucGMaOm8TRoF7M50CiEgizhmHqW3n\n3OPcCf7ebVCQ79ddXdw7mbcnn0FRsTL+qW+ZtXKXX/dnjAlu3iaMx4CZQJKITAXmAX/1W1SmfJEx\ncPG/YN8GmPsPv++uZ2o87912Jl1axXLTK0v556cbKC62xnBjGiJvp2h9BWc8qQeAXcClqvqWPwMz\nFeh4DvRCWPxiAAAec0lEQVS52pmdb5f/x4BMio3i9Umnc1m/VP71+UZueXUpR44X+n2/xpjg4lWj\nd11R7xu9PR05AE8MgrhWcMMXzv0afqaqvDBvK3/9aC2dW8bx3LX9SW3a2O/7Ncb4jz8avU2wadwM\nLnwIdi2Hb/9dK7sUEW4Y2p5p1w0g4+ARxjw+n0XbfH/3uTEmOFnCqMu6jYGuF8OcB2rl0lSJ4Z2T\neOeWM4lvFM5Vzy2wGfyMaSAsYdR1ox+C8EbwzFB4agh8cT9kLoFi/3Zi65AYw8ybz2RwhwSmzFjJ\nr99Yxu5s//baMsYElrVh1AeHdsDqmbDhY/jhW9BiiGkBp410pnltNwwi/NPWUFhUzKOfbeTZuVsI\nCYFJQ9tz47AOREf6v03FGFNzVWnDsIRR3xw5ABs/hfUfwabP4XguhDWC9sOd5HHaSIht4fPd7jhw\nhL/PXs/7y3eSEBPJneefxuXprQkNqcp4lcaY2mYJwzgKj8P2ebD+Y1g/C7LdtoaU/nDaKCeBtOju\nzCPuI9//cJCpH65l8faDdG4Ry+9Hd2HYaYlUbaDjWpKdCZu/gK1fOWdkg291ep0Z04BYwjA/pgp7\n1zhnHus/hkz3OMW3Ppk4ImMhMs796fGIiK1St11V5eNVu3nw43Vs33+EoZ0S+P2ornRLjvPTm/PS\n8SOw/RvY/LmTKLLWOcujk+DIfggJg/4/dUYGji9zvi5j6h1LGKZyuXtg42znzGPzHCg8WvH24dE/\nTiQlCSYqDsKiICwSQiPcn5EUSjjzt+Xy4Zr9HCoQBnZMZtyA9jSLi4WwCAiNPFmmUVPnLnZfUoU9\nq08miO3fQtExZ79tzoAOZzs3QSZ1g4Pb4OuHYflrICHQ71oY8itnGBZj6jFLGKZqCo8537CP5bqP\nHOdnfs6Pl5X5yHbGtSo6VrM4ouIhLgXikt1Hisdr92dUJWcpeVmwZY6TIDZ/AXl7nOWJXZ3k0GEE\npJ1RfieAg9th3iPw/SvO677XwNBfQ5O0mr03Y4KUJQwTGKpQdNxJQCd+HnPaUoqOsftADq99u5Gl\nW/aQ2Agu653I4DaxhBS5CStnp/vIdNoXDu/98T4iYp3EEe+RSGJbwaHtTiP/bvd+lEbNnOTQ4Wzn\nEZdctfdy6AeY909Y+l/ndZ+rnMTRtG2NDpExwcYShglqS7YfZOqHa1j6wyG6tIzl96O7Muy0MuZj\nLzwOubtOJpGcTI/nO52kkrcHUKf9ofWgkwmiVW8ICa15sNkZbuJ42emu3PsKGHoXNGtX87pN/bBr\nudO1vcuFPu1AUluCJmGIyEjgX0Ao8LyqPlhq/dXA7wABcoGbVHW5u26bu6wIKPTmDVnCqDtUlVmr\ndvPgrHX8cOAII7u35MHLetKkcRWnWSkqgNzd0KiJ06biL9mZMP9fsOQlKC50E8ed0LyD//Zpgpsq\nfPsEfHav8zeROhBGPej0QqxDgiJhuPNnbMCZyjUDZ57uK1V1jcc2ZwBrVfWgiIwC7lPVQe66bUC6\nqu7zdp+WMOqeY4VFTJu3jUc+XU9SbBT/uqIP6W2bBTqs8uXschPHi06y6nW5c8aR0DHQkZnadOQA\nvHOTc7Nsl4ug47kw56/OZdQ+Vzvz1sS2rJ1YDu+HPaug/bBqFQ+WhDEYJwFc4L7+PYCqPlDO9k2B\nVaqa4r7ehiWMBmNFxiFue+17Mg4e5dfnncbkYR2C+6a/3N3wzb9h0QtOO02Hs6HtUOfRqnetjB5s\nAuSHBfD2z+BwFpx/vzMLpojTSeTrh2HBk07Pv7PugtNvdnoC+sPedfDdU7D8daeX4p3rITyqytUE\nS8IYD4xU1Rvc1z/Bmer11nK2vwvo4rH9ViAb55LUM6r6bDnlJgGTANLS0vpv377d5+/F1I7c/AL+\nMHMV7y/fyZkdm/PPiX1Iiq36P0CtytvrXJZY/5EzqRVARIwznW6bM6HtEEjuC6HhgY3T1FxxMcz/\nJ3wx1ek1N+FF53db2v7N8MkfYf2H0LQdXDAVOo/2TfuGqtNN/NsnnZ9hUdBrIpx+EyR1rVaVdS5h\niMgI4ElgiKrud5elqGqmiCQBnwK3qercivZpZxh1n6ry5uId3PveamIiw3j48j5lN4gHo9w9sH2+\n89g27+SNgeHR0HqgkzzaDoHkfs59KKbuyMuCmZOcrtrdxzmzXlbWxXvzF/Dx752/g/bD4YIHoEW3\n6u2/4CiseAMWPOXUF9MSBt4A/a+H6ITq1ekKloTh1SUpEemFM/3rKFXdUE5d9wF5qvpQRfu0hFF/\nbNyTy62vfs/6PbncOKw9d53fmfDQOja4cl7WqQlkr9t8F9bITSBDoe2ZTiOpvy5bmJrbOhem3wD5\n2TDyQeh/nfdnC0WFsHgazJnq3LM04Ocw/PfOfDbeyN0NC59z6jh6AFr2gsG3OEnLR186giVhhOE0\nep8DZOI0el+lqqs9tkkDvgCuVdVvPJZHAyGqmus+/xT4s6p+XNE+LWHUL/kFRfz5gzW8+t0P9Gnd\nhH9f2ZfWzerwDH+H93skkPlOQyXqXFZo1uHHd9FHxZU9VEvpZeHREFLHkmldUFwEX/0dvvobNO8I\nE16Clj2qV9eRA06j+OIXnBtUR9ztnB2U19a1c5nTFrJqhtMDq8uFTntImzN83nU3KBKGG8ho4FGc\nbrXTVHWqiEwGUNWnReR54DKgpOGhUFXTRaQ9zlkHQBjwqqpOrWx/ljDqpw9X7GLK9BUg8LfLejG6\nZz0ZIPDIAWc4+m3znRsPy7qjvuCIFxUJRCc694Y0a+9cN2/Wzv3Z3vk2WwfvD6hUfo6TdHetcO6R\naXMGtOzpm/tvcnbBjF/Atq+h95XOvDO+GLpmz2r4eIpz1pLYFUY+4NxgCk6CWj/LSRTb5zttYX2v\ngUE3Or9HPwmahFHbLGHUXzsOHOHW175n+Y5DXD0ojT9e1I2ocB98MAS7okJniPofDcfikVjyc5wb\nHA9ugwNbISfj1Doi45w71MtKKHEpZZ+dFBeVMyRMWUPE5Di9gk5JVO0gItp3xyEvC3Yvd5LD7hXO\nzXIHtvx4u8h4aDPY7XBwJrSsRo+1TZ/BjBudZH3hw85d/r6kCus+hNl/cL4odL7QiXnR887vMD7N\nSRL9fuKcjfiZJQxTLxUUFfPQJ+t55qstdG4Ry+NX9aVTCz/erFdXFeQ7H0QHtjofqge3Os8PbnXG\nyiouOLltaISTTCJiSp3ZHPZiR3LysljBETh68NTVMS1OJo/Siaq8sx5VyN5xamLYtQJyd57cpkma\n03W5ZW9o1ct5rupe6vvaOWPbv9HZNiLW6bFW0uGgVe/ye6wVFcKc+507+5O6OZegEjt7cRyqqSDf\nOZuY+5BzvFuf7vR26nJRrXbLtoRh6rUv1+/lzjeXc/h4IX++pAcT0lODc76NYFRc5Ax3UpJEShJK\nwdFSbSNltJ1ExVfcdnL04MnE5PnzwNZTP/DBqd/zbKS48GSSKEk8EgIJpzkNvSWJoWVPZ2TjyuTu\nPtlWtG0e7FvvLI+IcYaQaXum0+mgpMtzdga8/XPYsQD6/RRG/c2Z+rg25GU5Y6kldamd/ZViCcPU\ne3ty8vnVG8v4ZvN+LumdzNSxPYiNsnsdglbBUefspvQZz4EtzkCPEuJ8qz+RGHo7c7T4amrhvL2n\nJpCstc7y8MZOj7Vdy5079y/+F/Qc75t91hGWMEyDUFSsPPXlJh75dANpzRrz1DX96doqwJM0maor\nKnR+1ubd8Yf3nUwg2+dDVBO45LEGOTaYJQzToCzceoBbX11KTn4BUy/tyWX9bdIjY7xVlYRhnbdN\nnTewXTM+uH0IfVo34c63lvP7GSvJLygKdFjG1DuWMEy9kBQbxf9+PojJwzrw2sIfmPD0t+w44M09\nDMYYb1nCMPVGWGgIU0Z14blr09m2/zAX/XseX6zbE+iwjKk3LGGYeue8bi344LYhpDRpxM9eWsxD\ns9dTVFx/2uqMCRRLGKZeatM8mhk3n8HE9NY8PmcT1077jn15xwIdljF1miUMU29FhYfyt/G9+Pv4\nXizedpCLHpvHku0HAh2WMXWWJQxT712e3poZN59BRFgIE59ZwLR5W6lP3cmNqS2WMEyD0D05nvdv\nG8KILkn8+YM13Pra9+QdKwx0WMbUKZYwTIMR3yicZ3/SnymjujBr5S4ueXweG/bkBjosY+oMSxim\nQRERJg/rwKu/OJ2co4WMeXw+73yfGeiwjKkT/JowRGSkiKwXkU0iMqWM9VeLyAoRWSki34hIb2/L\nGlMTp7dvzke3D6FnSjx3vLGMG/6zmHeXZZKbX1B5YWMaKH9O0RqKM0XreUAGzhStV6rqGo9tzgDW\nqupBERmFMwf4IG/KlsXGkjJVVVhUzL+/2MSrC38gK/cYEaEhDO2UwKierTivawviG9sIuKZ+q8pY\nUv4cHnIgsElVt7hBvQ6MAU586HvO4w0sAFK9LWuML4SFhvCr807jl+d0YskPB5m1cjcfr9rF5+v2\nEhYiDO7QnFE9WnF+9xYkxEQGOlxjAsqfCSMF2OHxOgMYVMH2PwdmVbOsMTUSEiIMaNuMAW2b8ceL\nurIiI5tZq3Yza9Uu/jBzJf/3zkoGtmvGqB6tuKB7S1rGRwU6ZGNqXS0OQF8+ERmBkzCGVKPsJGAS\nQFpamo8jMw2RiNC7dRN6t27C70Z2Zu2uXD5etYuPVu3m3vdWc+97q+mX1oRRPVoxskdLWjfz0SQ/\nxgQ5fyaMTKC1x+tUd9kpRKQX8DwwSlX3V6UsgKo+CzwLThtGzcM25iQRoVtyHN2S4/j1+Z3ZtDeX\nWSt3M2vVbqZ+tJapH62lZ0o8F/Vqxdi+KSTF2ZmHqb/82egdhtNwfQ7Oh/0i4CpVXe2xTRrwBXCt\nZ3uGN2XLYo3epjZt33/YuWy1chfLM7IJDRGGnZbI+P6pnNM1iciw0ECHaEylgmbGPREZDTwKhALT\nVHWqiEwGUNWnReR54DJgu1uksCTwsspWtj9LGCZQNmflMX1JBjOWZrI7J58mjcMZ0zuZ8f1b0yMl\nDhEJdIjGlCloEkZts4RhAq2oWJm3aR9vLd7BJ2v2cLywmC4tYxnfP5VL+6ZYTysTdCxhGBMEso8U\n8P6Knby1JIPlOw4RFiIM75zEhPRURnROIiLMBlowgWcJw5ggs3FPLm8vdS5ZZeUeo1l0BGP6JDOh\nf2u6JccFOjzTgFnCMCZIFRYV8/XGfby1ZAefrdnL8aJiurWK45rT2zAhPZXwUDvrMLXLEoYxdcDB\nw8d5f8VO3li0g9U7c0hr1phfndeJS3qnEBpijeSmdljCMKYOUVW+XJ/FP2avZ82uHE5rEcOd53fm\n/G4trHeV8buqJAw7/zUmwESEEV2S+OC2ITx+VV8Ki5Qb/7uES5+Yz9cbs2x2QBM0LGEYEyRCQoSL\neiXzya/O4u/je7Ev7zg/eWEhVz63wOYiN0HBLkkZE6SOFRbx2nc/8PicTezLO87ZXZK48/zT6J4c\nH+jQTD1ibRjG1CNHjhfy0jfbePrLzeTkF3JRr1b86rzT6JAYE+jQTD1gCcOYeij7aAHPzd3CtPlb\nyS8oYnz/VG4/pxOpTW20XFN9ljCMqcf25R3jyTmb+d8CZwi2qwalMXFAa1rGRdGkcbj1rDJVYgnD\nmAZg56Gj/PuLjby5OIOiYuf/ODxUSIiJJCk2ksSSR4zH89hIEmOiSIyNpFGEjaZrLGEEOgxjatWO\nA0dYnnGIrNxjZOUeY6/7Myv3GFl5x9ifd4ziMv7NYyLDTiSUrq1iGdsvld6p8XaG0sAEy5zexpha\n0LpZ4wpn/SsqVg4cPn4igezNyScr79gpCeb1RTv4z7fb6ZAYzbh+qYztm0Jyk0a1+C5MXWBnGMYY\ncvIL+GjFLmYszWThtgOIwOD2zRnXL5VRPVoSHWnfLesruyRljKm2H/YfYeb3mcz4PoPt+4/QKDyU\nUT1aMq5fKoM7NLdxruqZoEkYIjIS+BfOrHnPq+qDpdZ3AV4E+gF3q+pDHuu2AblAER4z8VXEEoYx\nvqOqLNl+kOlLM/lgxU5y8wtpGRfFpX1TuKxfCp1axAY6ROMDQZEwRCQUZ17u84AMnHm5r1TVNR7b\nJAFtgEuBg2UkjHRV3eftPi1hGOMf+QVFfLZ2DzOWZvLVhiyKipVeqfGM65vCxb2TaW4zCdZZwdLo\nPRDYpKpb3KBeB8YAJxKGqu4F9orIhX6MwxhTQ1HhoVzUK5mLeiWTlXuMd5dlMmNpJve9v4b7P1zL\ngLbN6JUaT48U59GmWWNC7NJVvePPhJEC7PB4nQEMqkJ5BT4TkSLgGVV9tqyNRGQSMAkgLS2tmqEa\nY7yVGBvJDUPbc8PQ9qzdlcPM7zP5dvN+Xpy/jeNFxQDERobRLTmOHinx9EyJp0dKHO0SYqz9o44L\n5q4PQ1Q1071s9amIrFPVuaU3chPJs+BckqrtII1pyLq2iqNrK2eK2eOFxWzYk8vqndmszMxmVWYO\n/1uwnWOFThJpHBFKt1ZxJ85CeqTE0TExhjCbZbDO8GfCyARae7xOdZd5RVUz3Z97RWQmziWuHyUM\nY0xwiAgLOZEMJg5wlhUWFbMpK49VmTmsysxmVWY2by7ewUvfbAMgMiyErq3iGNSuGWP7pdClpc1v\nHsz8mTAWAZ1EpB1OorgCuMqbgiISDYSoaq77/Hzgz36L1BjjF2GhIXRpGUeXlnGM758KODcSbt13\nMomsyMzmhXlbeWbuFrq1imNcvxTG9EkhMdYa0oONv7vVjgYexelWO01Vp4rIZABVfVpEWgKLgTig\nGMgDugEJwEy3mjDgVVWdWtn+rJeUMXXT/rxjfLBiF9OXZrAiI5vQEGHYaYmM65fCuV1bEBVu4175\nS1B0qw0ESxjG1H0b9+Qy4/tMZi7NZHdOPrFRYVzUqxWX9Uulf5umNtaVj1nCMMbUeUXFyreb9zNj\naQazVu3maEERac0aM65fCuP6ppLW3OYB8QVLGMaYeuXwsUI+XrWb6Usz+HbLflRhQNumXNYvldG9\nWhEXFR7oEOssSxjGmHor89BR3vk+k+lLM9iSdZjIsBB6psQTERZCaIgQHlryUwgNCSEsRJxHqBAW\n4qxzXoecWN60cQQ9Upwuwo0jgvluA9+zhGGMqfdUlRUZ2cxYmsH6PbkUFSuFxUphUcnPYo9lxRQW\nK0XFSoG7vMB9XeQxWUiIQPvEGHqmxNM9OY6eKfF0S44jth6fwQTL0CDGGOM3IkLv1k3o3bpJjepR\nVXbn5LM6M4eVmdms3pnNN5v3MfP7k7eNtUuIdu4xcZNI9+R44hvX3yRSHksYxpgGTURoFd+IVvGN\nOLdbixPL9+bms3pnDqsyslm1M5ul2w/y/vKdJ9a3btboRPIY2K4Z/dOa1vvxsyxhGGNMGZJio0jq\nHMWIzkknlh04fPzE0CerM3NYtTObj1buBiC1aSPG9U1hbL9U2iVEBypsv7I2DGOMqYHsIwXMWb+X\n6UszmLdpH6rQv01TxvVL4aKeyUF/6coavY0xJgB2Z+fzzrJMpi/JYOPePCJCQzi3WxLj+qYyrHMi\n4UE40KIlDGOMCSBVZfXOHKYvzeC9ZTvZf/g4zaMjuKRPMpf1S6V7clzQ3LFuCcMYY4JEQVExX63P\nYsb3GXy2Zi/Hi4o5rUUM4/qlMrZvCi3iogIanyUMY4wJQtlHCvhg5U5mLM1kyfaDhAic2TGBkT1a\n0rlFLJ2SYmu9zcMShjHGBLmt+w4zc2kGM77PJOPg0RPLE2Mj6ZgYQ6cWMXRMch6dkmJJiInwy2Us\nSxjGGFNHqCo7DhxlU1Yum/bmsXFPHhv35rF5bx65xwpPbBffKJxOSU4i6ZAYQ6cWsXRKiqFVfFSN\nEond6W2MMXWEiJDWvDFpzRtzdpeTNw6qKntyjjlJZG8uG/fmsWlvHh+v2s3BIwUntouOCKVbchxv\n3jjY7w3pljCMMSYIiQgt46NoGR/FkE4Jp6zbn1eSSJwkkl9QVCu9rvyaMERkJPAvnBn3nlfVB0ut\n7wK8CPQD7lbVh7wta4wxDVXzmEiax0QyqH3zWt2v3+4iEZFQ4AlgFM60q1eKSLdSmx0AbgceqkZZ\nY4wxtciftx0OBDap6hZVPQ68Dozx3EBV96rqIqCgqmWNMcbULn8mjBRgh8frDHeZT8uKyCQRWSwi\ni7OysqoVqDHGmMoF38AmVaSqz6pquqqmJyYmBjocY4ypt/yZMDKB1h6vU91l/i5rjDHGD/yZMBYB\nnUSknYhEAFcA79VCWWOMMX7gt261qlooIrcCs3G6xk5T1dUiMtld/7SItAQWA3FAsYjcAXRT1Zyy\nyvorVmOMMZWzoUGMMaYBa7BjSYlIFrC9msUTgH0+DMfXLL6asfhqxuKrmWCOr42qetVjqF4ljJoQ\nkcXeZtlAsPhqxuKrGYuvZoI9Pm/V+W61xhhjaoclDGOMMV6xhHHSs4EOoBIWX81YfDVj8dVMsMfn\nFWvDMMYY4xU7wzDGGOMVSxjGGGO80qAShoiMFJH1IrJJRKaUsV5E5DF3/QoR6VfL8bUWkTkiskZE\nVovIL8vYZriIZIvIMvdxTy3HuE1EVrr7/tFdkoE8hiLS2eO4LBORHHf0AM9tavX4icg0EdkrIqs8\nljUTkU9FZKP7s2k5ZSv8e/VjfP8QkXXu72+miDQpp2yFfwt+jO8+Ecn0+B2OLqdsoI7fGx6xbROR\nZeWU9fvx8zlVbRAPnCFGNgPtgQhgOc4wJJ7bjAZmAQKcDnxXyzG2Avq5z2OBDWXEOBz4IIDHcRuQ\nUMH6gB7DUr/v3Tg3JQXs+AFn4cwoucpj2d+BKe7zKcDfyom/wr9XP8Z3PhDmPv9bWfF587fgx/ju\nA+7y4vcfkONXav3DwD2BOn6+fjSkMwxvJmUaA7ysjgVAExFpVVsBquouVV3qPs8F1uL9HCLBIqDH\n0MM5wGZVre6d/z6hqnNxZpb0NAb4j/v8P8ClZRStlUnEyopPVT9R1UL35QKc0aIDopzj542AHb8S\n4kyyfTnwmq/3GygNKWF4MylTTSZ98ikRaQv0Bb4rY/UZ7uWCWSLSvVYDAwU+E5ElIjKpjPXBcgyv\noPx/1EAeP4AWqrrLfb4baFHGNsFyHH+Gc8ZYlsr+FvzpNvd3OK2cS3rBcPyGAntUdWM56wN5/Kql\nISWMOkNEYoDpwB2qmlNq9VIgTVV7Af8G3qnl8Iaoah+c+dZvEZGzann/lRJnSPxLgLfKWB3o43cK\nda5NBGXfdhG5GygEXilnk0D9LTyFc6mpD7AL57JPMLqSis8ugv5/qbSGlDC8mZQp4BM3iUg4TrJ4\nRVVnlF6vqjmqmuc+/wgIF5GE2opPVTPdn3uBmTin/p4Cfgxx/gGXquqe0isCffxce0ou07k/95ax\nTUCPo4hcB1wEXO0mtR/x4m/BL1R1j6oWqWox8Fw5+w308QsDxgFvlLdNoI5fTTSkhOHNpEzvAde6\nPX1OB7I9Lh34nXvN8wVgrao+Us42Ld3tEJGBOL/D/bUUX7SIxJY8x2kcXVVqs4AeQ1e53+wCefw8\nvAf81H3+U+DdMrYJ2CRiIjIS+C1wiaoeKWcbb/4W/BWfZ5vY2HL2G+hJ2M4F1qlqRlkrA3n8aiTQ\nre61+cDpwbMBp/fE3e6yycBk97kAT7jrVwLptRzfEJzLEyuAZe5jdKkYbwVW4/T6WACcUYvxtXf3\nu9yNIRiPYTROAoj3WBaw44eTuHYBBTjX0X8ONAc+BzYCnwHN3G2TgY8q+nutpfg24Vz/L/kbfLp0\nfOX9LdRSfP91/7ZW4CSBVsF0/NzlL5X8zXlsW+vHz9cPGxrEGGOMVxrSJSljjDE1YAnDGGOMVyxh\nGGOM8YolDGOMMV6xhGGMMcYrljCMCQLuKLofBDoOYypiCcMYY4xXLGEYUwUico2ILHTnMHhGREJF\nJE9E/inOHCafi0iiu20fEVngMa9EU3d5RxH5TESWi8hSEengVh8jIm+7c1G8UnJHujHBwhKGMV4S\nka7AROBMdQaNKwKuxrm7fLGqdge+Au51i7wM/E6dgQ5Xeix/BXhCVXsDZ+DcKQzO6MR3AN1w7gQ+\n0+9vypgqCAt0AMbUIecA/YFF7pf/RjgDBxZzcpC5/wEzRCQeaKKqX7nL/wO85Y4flKKqMwFUNR/A\nrW+humMPubO0tQXm+f9tGeMdSxjGeE+A/6jq709ZKPLHUttVd7ydYx7Pi7D/TxNk7JKUMd77HBgv\nIklwYm7uNjj/R+Pdba4C5qlqNnBQRIa6y38CfKXOTIoZInKpW0ekiDSu1XdhTDXZNxhjvKSqa0Tk\n/4BPRCQEZ4TSW4DDwEB33V6cdg5whi5/2k0IW4Dr3eU/AZ4RkT+7dUyoxbdhTLXZaLXG1JCI5Klq\nTKDjMMbf7JKUMcYYr9gZhjHGGK/YGYYxxhivWMIwxhjjFUsYxhhjvGIJwxhjjFcsYRhjjPHK/wM1\nDGTXMyfeVQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7fcef5cc1da0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "from matplotlib import pyplot\n",
    "pyplot.plot(np.array([1]) - np.array(hist.history['acc']))\n",
    "pyplot.plot(np.array([1]) - np.array(hist.history['val_acc']))\n",
    "pyplot.title('ATAE train vs validation error')\n",
    "pyplot.ylabel('error')\n",
    "pyplot.xlabel('epoch')\n",
    "pyplot.legend(['train', 'validation'], loc='upper right')\n",
    "pyplot.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## interactive attention networks for aspect_level sentiment classification"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class AttentionLayer(Layer):\n",
    "    def __init__(self,output_dim,**kwargs):\n",
    "        self.output_dim=output_dim\n",
    "        super(AttentionLayer, self).__init__(**kwargs)\n",
    "        \n",
    "    def build(self,input_shape):\n",
    "        assert len(input_shape) ==2     \n",
    "        self.W = K.random_uniform_variable((input_shape[0][2],input_shape[0][2]),0,1, name='{}_W'.format(self.name))\n",
    "#         self.weights = self.add_weight(name = 'kernel',\n",
    "#                                      shape=(input_shape[0][2],input_shape[0][2]),\n",
    "#                                      initializer='uniform',)\n",
    "        self.trainable_weights=[self.W]\n",
    "        super(AttentionLayer,self).build(input_shape)\n",
    "    \n",
    "    def call(self, x):\n",
    "        h = x[0]  #none 7 300\n",
    "        h_ = K.dot(h,self.W)\n",
    "        t = x[1]  #none 300\n",
    "        return K.tanh(K.batch_dot(t,h,axes=[1,2]))\n",
    "    \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        return (input_shape[0][0],input_shape[0][1])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 维度举例time_steps100 aspect_length10\n",
    "def create_ian():\n",
    "    words_input = Input(shape=(time_steps,),dtype='int32')    #None *100\n",
    "    target_input = Input(shape=(max_aspect_length,),dtype='int32')   #None *10\n",
    "    embedding_layer = Embedding(max_features,\n",
    "                                embedding_dims,\n",
    "                                trainable=True,\n",
    "#                                 weights=[embedding_matrix],\n",
    "#                                 mask_zero=True,\n",
    "                                name='word_embedding')\n",
    "    words_matrix = embedding_layer(words_input)     #None 100 *300       \n",
    "    target_matrix = embedding_layer(target_input)       #None 10 *100\n",
    "    hc = LSTM(embedding_dims,dropout=0.3,recurrent_dropout=0.3,return_sequences=True)(words_matrix) #None*100*300\n",
    "    ht = LSTM(embedding_dims,dropout=0.3,recurrent_dropout=0.3,return_sequences=True)(target_matrix) #None*10*300\n",
    "    # TODO 加入mask 改global Average 和 attentionlayer\n",
    "    cavg = GlobalAveragePooling1D()(hc)         #None *300\n",
    "    tavg = GlobalAveragePooling1D()(ht)         #None *300 \n",
    "    att_c = AttentionLayer(3)([hc,tavg])          \n",
    "    att_t = AttentionLayer(3)([ht,cavg])\n",
    "#   attention_probs\n",
    "    alpha = Activation('softmax')(att_c)    #None *100\n",
    "    beta= Activation('softmax')(att_t)       #None*10 \n",
    "    cr = dot([alpha,hc],axes=1,name='attention_mul') #None*300\n",
    "    tr =dot([beta,ht],axes=1,name='attention_mul2')  #None*300\n",
    "    d = concatenate([cr,tr])     \n",
    "    x = Dense(embedding_dims,activation='tanh')(d)\n",
    "    predictions = Dense(3,activation='softmax')(x)\n",
    "    model = Model(inputs=[words_input,target_input],outputs=predictions)\n",
    "    opt = Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)\n",
    "    model.compile(loss='categorical_crossentropy',optimizer=opt,\n",
    "                 metrics=['accuracy'])\n",
    "    return model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "____________________________________________________________________________________________________\n",
      "Layer (type)                     Output Shape          Param #     Connected to                     \n",
      "====================================================================================================\n",
      "input_21 (InputLayer)            (None, 79)            0                                            \n",
      "____________________________________________________________________________________________________\n",
      "input_22 (InputLayer)            (None, 7)             0                                            \n",
      "____________________________________________________________________________________________________\n",
      "word_embedding (Embedding)       multiple              1028100     input_21[0][0]                   \n",
      "                                                                   input_22[0][0]                   \n",
      "____________________________________________________________________________________________________\n",
      "lstm_4 (LSTM)                    (None, 79, 300)       721200      word_embedding[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "lstm_5 (LSTM)                    (None, 7, 300)        721200      word_embedding[1][0]             \n",
      "____________________________________________________________________________________________________\n",
      "global_average_pooling1d_4 (Glob (None, 300)           0           lstm_5[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "global_average_pooling1d_3 (Glob (None, 300)           0           lstm_4[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "attention_layer_3 (AttentionLaye (None, 79)            90000       lstm_4[0][0]                     \n",
      "                                                                   global_average_pooling1d_4[0][0] \n",
      "____________________________________________________________________________________________________\n",
      "attention_layer_4 (AttentionLaye (None, 7)             90000       lstm_5[0][0]                     \n",
      "                                                                   global_average_pooling1d_3[0][0] \n",
      "____________________________________________________________________________________________________\n",
      "activation_19 (Activation)       (None, 79)            0           attention_layer_3[0][0]          \n",
      "____________________________________________________________________________________________________\n",
      "activation_20 (Activation)       (None, 7)             0           attention_layer_4[0][0]          \n",
      "____________________________________________________________________________________________________\n",
      "attention_mul (Dot)              (None, 300)           0           activation_19[0][0]              \n",
      "                                                                   lstm_4[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "attention_mul2 (Dot)             (None, 300)           0           activation_20[0][0]              \n",
      "                                                                   lstm_5[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_2 (Concatenate)      (None, 600)           0           attention_mul[0][0]              \n",
      "                                                                   attention_mul2[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "dense_44 (Dense)                 (None, 300)           180300      concatenate_2[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "dense_45 (Dense)                 (None, 3)             903         dense_44[0][0]                   \n",
      "====================================================================================================\n",
      "Total params: 2,831,703\n",
      "Trainable params: 2,831,703\n",
      "Non-trainable params: 0\n",
      "____________________________________________________________________________________________________\n"
     ]
    },
    {
     "ename": "ValueError",
     "evalue": "None values not supported.",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mValueError\u001b[0m                                Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-33-4c081f7d5369>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      5\u001b[0m                  \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnb_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mverbose\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      6\u001b[0m                  \u001b[0mvalidation_split\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.2\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m                  callbacks=[early_stopping])\n\u001b[0m",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)\u001b[0m\n\u001b[1;32m   1488\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1489\u001b[0m             \u001b[0mins\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mx\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0my\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0msample_weights\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1490\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_train_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1491\u001b[0m         \u001b[0mf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1492\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_make_train_function\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1012\u001b[0m                 \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_collected_trainable_weights\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1013\u001b[0m                 \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconstraints\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1014\u001b[0;31m                 self.total_loss)\n\u001b[0m\u001b[1;32m   1015\u001b[0m             \u001b[0mupdates\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdates\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mtraining_updates\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1016\u001b[0m             \u001b[0;31m# Gets loss and metrics. Updates weights at each call.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/keras/optimizers.py\u001b[0m in \u001b[0;36mget_updates\u001b[0;34m(self, params, constraints, loss)\u001b[0m\n\u001b[1;32m    420\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    421\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mp\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mg\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mm\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparams\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgrads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 422\u001b[0;31m             \u001b[0mm_t\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbeta_1\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mm\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m1.\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbeta_1\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mg\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    423\u001b[0m             \u001b[0mv_t\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbeta_2\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mv\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m1.\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbeta_2\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mK\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msquare\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mg\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    424\u001b[0m             \u001b[0mp_t\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mp\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mlr_t\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mm_t\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mK\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msqrt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv_t\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mepsilon\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/math_ops.py\u001b[0m in \u001b[0;36mbinary_op_wrapper\u001b[0;34m(x, y)\u001b[0m\n\u001b[1;32m    854\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msparse_tensor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSparseTensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    855\u001b[0m         \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 856\u001b[0;31m           \u001b[0my\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mops\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconvert_to_tensor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbase_dtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"y\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    857\u001b[0m         \u001b[0;32mexcept\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    858\u001b[0m           \u001b[0;31m# If the RHS is not a tensor, it might be a tensor aware object\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py\u001b[0m in \u001b[0;36mconvert_to_tensor\u001b[0;34m(value, dtype, name, preferred_dtype)\u001b[0m\n\u001b[1;32m    609\u001b[0m       \u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    610\u001b[0m       \u001b[0mpreferred_dtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mpreferred_dtype\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 611\u001b[0;31m       as_ref=False)\n\u001b[0m\u001b[1;32m    612\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    613\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/ops.py\u001b[0m in \u001b[0;36minternal_convert_to_tensor\u001b[0;34m(value, dtype, name, as_ref, preferred_dtype)\u001b[0m\n\u001b[1;32m    674\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    675\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mret\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 676\u001b[0;31m           \u001b[0mret\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mconversion_func\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mas_ref\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mas_ref\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    677\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    678\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mret\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0mNotImplemented\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/constant_op.py\u001b[0m in \u001b[0;36m_constant_tensor_conversion_function\u001b[0;34m(v, dtype, name, as_ref)\u001b[0m\n\u001b[1;32m    119\u001b[0m                                          as_ref=False):\n\u001b[1;32m    120\u001b[0m   \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mas_ref\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 121\u001b[0;31m   \u001b[0;32mreturn\u001b[0m \u001b[0mconstant\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    122\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/constant_op.py\u001b[0m in \u001b[0;36mconstant\u001b[0;34m(value, dtype, shape, name, verify_shape)\u001b[0m\n\u001b[1;32m    100\u001b[0m   \u001b[0mtensor_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mattr_value_pb2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mAttrValue\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    101\u001b[0m   tensor_value.tensor.CopyFrom(\n\u001b[0;32m--> 102\u001b[0;31m       tensor_util.make_tensor_proto(value, dtype=dtype, shape=shape, verify_shape=verify_shape))\n\u001b[0m\u001b[1;32m    103\u001b[0m   \u001b[0mdtype_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mattr_value_pb2\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mAttrValue\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtensor_value\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtensor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    104\u001b[0m   const_tensor = g.create_op(\n",
      "\u001b[0;32m/home/dutir923/yuetianchi/anaconda3/lib/python3.6/site-packages/tensorflow/python/framework/tensor_util.py\u001b[0m in \u001b[0;36mmake_tensor_proto\u001b[0;34m(values, dtype, shape, verify_shape)\u001b[0m\n\u001b[1;32m    362\u001b[0m   \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    363\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mvalues\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 364\u001b[0;31m       \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"None values not supported.\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    365\u001b[0m     \u001b[0;31m# if dtype is provided, forces numpy array to be the type\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    366\u001b[0m     \u001b[0;31m# provided if possible.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mValueError\u001b[0m: None values not supported."
     ]
    }
   ],
   "source": [
    "model = create_ian()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 465,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "weights=[np.array([[0,0,0]]),np.array([[1,1,1]]),np.array([[2,2,2]])]\n",
    "words_input = Input(shape=(4,),dtype='int32')\n",
    "target_input = Input(shape=(2,),dtype='int32')\n",
    "# TODO 加入mask 改global Average 和 attentionlayer\n",
    "embedding_layer = Embedding(3,\n",
    "                            3,\n",
    "                            trainable=True,\n",
    "                                weights=[embedding_matrix],\n",
    "#                                 mask_zero=True,\n",
    "                            name='word_embedding')\n",
    "words_matrix = embedding_layer(words_input)    \n",
    "x = LSTM(3,return_sequences=True)(words_matrix)\n",
    "model =Model(inputs=[words_input,target_input],outputs=x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 463,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "input_a = np.array([[0,1,2,0]])\n",
    "input_b = np.array([[2,2]])\n",
    "# 观察mask的区别\n",
    "out = model.predict([input_a,input_b])\n",
    "out2 = model2.predict([input_a,input_b])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": true
   },
   "source": [
    "## mask IAN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def softmask(x, mask,axis=-1):\n",
    "    '''softmax with mask, used in attention mechanism others\n",
    "    '''\n",
    "    y = K.exp(x)\n",
    "    if mask is not None:\n",
    "        y = y * tf.to_float(mask)\n",
    "    sumx = K.sum(y, axis=axis, keepdims=True) + 1e-6\n",
    "    x = y / sumx\n",
    "    return K.relu(x)\n",
    "class MaskAverageLayer(Layer):\n",
    "    '''得到评价对象中所有词向量的平均值\n",
    "    '''\n",
    "    def __init__(self, keepdims=True,**kwargs):\n",
    "        self.support_mask=True\n",
    "        self.keepdims=keepdims\n",
    "        super(MaskAverageLayer, self).__init__(**kwargs)\n",
    "    \n",
    "    def call(self, x, mask=None):\n",
    "        #  mask  None*time_steps\n",
    "        mask_float = tf.to_float(K.expand_dims(mask, 2)) #None *time_steps*1\n",
    "        mask_sum = K.sum(mask_float,axis=1) #None *1*1\n",
    "#         x 的mask为false位置 是0  embedding_matrix 第一个不设置\n",
    "        result = K.sum(x*mask_float,axis=1) /mask_sum #None*time_steps\n",
    "#         return K.repeat(result,1)\n",
    "        return result\n",
    "\n",
    "    def compute_output_shape(self, input_shape):\n",
    "        if self.keepdims:\n",
    "            return (input_shape[0],1,input_shape[2])\n",
    "        else:\n",
    "            return (input_shape[0],input_shape[2])\n",
    "    \n",
    "    def compute_mask(self, x, mask=None):\n",
    "        return None"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class AttentionLayer(Layer):\n",
    "    def __init__(self,**kwargs):\n",
    "#         None 100 300     None 300\n",
    "        self.support_mask=True\n",
    "        super(AttentionLayer, self).__init__(**kwargs)\n",
    "        \n",
    "    def build(self,input_shape):\n",
    "#         self.W = K.random_uniform_variable((input_shape[0][2],input_shape[0][2]),0,1, dtype ='float32',name='{}_W'.format(self.name))\n",
    "        self.W = self.add_weight(name = 'kernel',\n",
    "                                     shape=(input_shape[0][2],input_shape[0][2]),\n",
    "                                     initializer=initializers.RandomUniform(minval=-0.1,maxval=0.1),trainable=True)\n",
    "#         self.trainable_weights=[self.W]\n",
    "        super(AttentionLayer,self).build(input_shape)\n",
    "    \n",
    "    def call(self, x,mask=None):\n",
    "        h = x[0]  #none 100 300\n",
    "        h_ = K.dot(h,self.W)\n",
    "        t = x[1]  #none 300\n",
    "        tanh_result = K.tanh(K.batch_dot(t,h_,axes=[1,2]))\n",
    "        atten = softmask(tanh_result,mask[0])\n",
    "        return atten\n",
    "    \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        return (input_shape[0][0],input_shape[0][1])\n",
    "    \n",
    "    def compute_mask(self, x, mask=None):\n",
    "        if mask:\n",
    "            return mask[0]\n",
    "        else:\n",
    "            return None"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "words_input = Input(shape=(time_steps,),dtype='int32')    #None *100\n",
    "target_input = Input(shape=(max_aspect_length,),dtype='int32')   #None *10\n",
    "embedding_layer = Embedding(max_features,\n",
    "                            embedding_dims,\n",
    "                            trainable=True,\n",
    "#                                 weights=[embedding_matrix],\n",
    "                                mask_zero=True,\n",
    "                            name='word_embedding')\n",
    "words_matrix = embedding_layer(words_input)     #None 100 *300       \n",
    "target_matrix = embedding_layer(target_input)       #None 10 *100\n",
    "# words_matrix = Dropout(0.5)(words_matrix)\n",
    "# target_matrix = Dropout(0.5)(target_matrix)\n",
    "hc = LSTM(embedding_dims,dropout=0.3,recurrent_dropout=0.3,return_sequences=True)(words_matrix) #None*100*300\n",
    "ht = LSTM(embedding_dims,dropout=0.3,recurrent_dropout=0.3,return_sequences=True)(target_matrix) #None*10*300\n",
    "# hc = Dropout(0.5)(hc)\n",
    "# ht = Dropout(0.5)(ht)\n",
    "# TODO 加入mask 改global Average 和 attentionlayer\n",
    "cavg = MaskAverageLayer(keepdims=False)(hc)         #None *300\n",
    "tavg = MaskAverageLayer(keepdims=False)(ht)         #None *300 \n",
    "att_c = AttentionLayer()([hc,tavg])  #报错 tf None not supported        \n",
    "att_t = AttentionLayer()([ht,cavg])\n",
    "cr = dot([att_c,hc],axes=1,name='attention_mul') #None*300\n",
    "tr =dot([att_t,ht],axes=1,name='attention_mul2')  #None*300\n",
    "d = concatenate([cr,tr]) \n",
    "d = Dropout(0.5)(d)\n",
    "x = Dense(embedding_dims,\n",
    "          activation='tanh',\n",
    "#           activity_regularizer=regularizers.l2(0.01)\n",
    "         )(d)\n",
    "x = Dropout(0.5)(x)\n",
    "predictions = Dense(3,activation='softmax')(x)\n",
    "model = Model(inputs=[words_input,target_input],outputs=predictions)\n",
    "# adam ，adagrad很快到0.73后过拟合\n",
    "# opt = SGD(lr=0.01,momentum=0.9,decay=1e-6)#best0.001\n",
    "opt = Adam(lr=0.001)\n",
    "model.compile(loss='categorical_crossentropy',optimizer=opt,\n",
    "             metrics=['accuracy'])\n",
    "\n",
    "early_stopping = EarlyStopping(monitor='val_loss',patience=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "____________________________________________________________________________________________________\n",
      "Layer (type)                     Output Shape          Param #     Connected to                     \n",
      "====================================================================================================\n",
      "input_2 (InputLayer)             (None, 70)            0                                            \n",
      "____________________________________________________________________________________________________\n",
      "input_3 (InputLayer)             (None, 20)            0                                            \n",
      "____________________________________________________________________________________________________\n",
      "word_embedding (Embedding)       multiple              1310700     input_2[0][0]                    \n",
      "                                                                   input_3[0][0]                    \n",
      "____________________________________________________________________________________________________\n",
      "lstm_1 (LSTM)                    (None, 70, 300)       721200      word_embedding[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "lstm_2 (LSTM)                    (None, 20, 300)       721200      word_embedding[1][0]             \n",
      "____________________________________________________________________________________________________\n",
      "mask_average_layer_2 (MaskAverag (None, 300)           0           lstm_2[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "mask_average_layer_1 (MaskAverag (None, 300)           0           lstm_1[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "attention_layer_1 (AttentionLaye (None, 70)            90000       lstm_1[0][0]                     \n",
      "                                                                   mask_average_layer_2[0][0]       \n",
      "____________________________________________________________________________________________________\n",
      "attention_layer_2 (AttentionLaye (None, 20)            90000       lstm_2[0][0]                     \n",
      "                                                                   mask_average_layer_1[0][0]       \n",
      "____________________________________________________________________________________________________\n",
      "attention_mul (Dot)              (None, 300)           0           attention_layer_1[0][0]          \n",
      "                                                                   lstm_1[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "attention_mul2 (Dot)             (None, 300)           0           attention_layer_2[0][0]          \n",
      "                                                                   lstm_2[0][0]                     \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)      (None, 600)           0           attention_mul[0][0]              \n",
      "                                                                   attention_mul2[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "dropout_1 (Dropout)              (None, 600)           0           concatenate_1[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "dense_1 (Dense)                  (None, 300)           180300      dropout_1[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "dropout_2 (Dropout)              (None, 300)           0           dense_1[0][0]                    \n",
      "____________________________________________________________________________________________________\n",
      "dense_2 (Dense)                  (None, 3)             903         dropout_2[0][0]                  \n",
      "====================================================================================================\n",
      "Total params: 3,114,303\n",
      "Trainable params: 3,114,303\n",
      "Non-trainable params: 0\n",
      "____________________________________________________________________________________________________\n",
      "Train on 3602 samples, validate on 1120 samples\n",
      "Epoch 1/50\n",
      "3602/3602 [==============================] - 20s - loss: 0.8245 - acc: 0.6480 - val_loss: 0.7031 - val_acc: 0.7045\n",
      "Epoch 2/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.5482 - acc: 0.7790 - val_loss: 0.6954 - val_acc: 0.7357\n",
      "Epoch 3/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.3857 - acc: 0.8479 - val_loss: 0.8026 - val_acc: 0.7348\n",
      "Epoch 4/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.2971 - acc: 0.8898 - val_loss: 0.8244 - val_acc: 0.7366\n",
      "Epoch 5/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.2408 - acc: 0.9117 - val_loss: 0.9055 - val_acc: 0.7071\n",
      "Epoch 6/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1958 - acc: 0.9259 - val_loss: 0.9794 - val_acc: 0.7098\n",
      "Epoch 7/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1766 - acc: 0.9370 - val_loss: 1.1386 - val_acc: 0.7152\n",
      "Epoch 8/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1573 - acc: 0.9411 - val_loss: 1.1397 - val_acc: 0.7107\n",
      "Epoch 9/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1328 - acc: 0.9489 - val_loss: 1.2674 - val_acc: 0.7134\n",
      "Epoch 10/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1173 - acc: 0.9547 - val_loss: 1.2518 - val_acc: 0.7107\n",
      "Epoch 11/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1132 - acc: 0.9606 - val_loss: 1.3646 - val_acc: 0.6955\n",
      "Epoch 12/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.1029 - acc: 0.9572 - val_loss: 1.3899 - val_acc: 0.6929\n",
      "Epoch 13/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0943 - acc: 0.9653 - val_loss: 1.4467 - val_acc: 0.7116\n",
      "Epoch 14/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0800 - acc: 0.9684 - val_loss: 1.5214 - val_acc: 0.6991\n",
      "Epoch 15/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0787 - acc: 0.9720 - val_loss: 1.5422 - val_acc: 0.6982\n",
      "Epoch 16/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0731 - acc: 0.9758 - val_loss: 1.5990 - val_acc: 0.6804\n",
      "Epoch 17/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0667 - acc: 0.9770 - val_loss: 1.5964 - val_acc: 0.6875\n",
      "Epoch 18/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0717 - acc: 0.9753 - val_loss: 1.6061 - val_acc: 0.7054\n",
      "Epoch 19/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0595 - acc: 0.9803 - val_loss: 1.6539 - val_acc: 0.7036\n",
      "Epoch 20/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0542 - acc: 0.9808 - val_loss: 1.6382 - val_acc: 0.7107\n",
      "Epoch 21/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0487 - acc: 0.9831 - val_loss: 1.7741 - val_acc: 0.7161\n",
      "Epoch 22/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0386 - acc: 0.9853 - val_loss: 1.8478 - val_acc: 0.7027\n",
      "Epoch 23/50\n",
      "3602/3602 [==============================] - 18s - loss: 0.0531 - acc: 0.9831 - val_loss: 1.9108 - val_acc: 0.6964\n"
     ]
    }
   ],
   "source": [
    "\n",
    "early_stopping = EarlyStopping(monitor='val_loss',patience=20)\n",
    "model.summary()\n",
    "hist = model.fit([data[:3602],aspect_data[:3602]],labels[:3602],\n",
    "                 batch_size=8,    #4 736\n",
    "                 epochs=nb_epoch,verbose=1,\n",
    "                 validation_data=([data[3602:],aspect_data[3602:]],labels[3602:]),\n",
    "                 callbacks=[early_stopping],\n",
    "                shuffle=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYwAAAEWCAYAAAB1xKBvAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xl4lPXV8PHvyb4nZCMhCSYImLAvEbSKS7EWUETrvrRq\nW6l9tD5tH9/WLk93W/vW2uXVitRqbeu+tWpBqhZFW0HACiI7yBJIIGxZyJ6c94/fHRhCIJNlMpPk\nfK5rrpm5tzlzZzJn7t8qqooxxhjTkbBgB2CMMaZvsIRhjDHGL5YwjDHG+MUShjHGGL9YwjDGGOMX\nSxjGGGP8YgnDdEhEPhKR84IdR3tEZJqIbAh2HF0hIjeJyDs+z6tFZJg/23bhtRaKyI1d3d8YgIhg\nB2BCn6qODsRxReQHwHBVvaGrx1DVt4HTeiyoIFLVhJ44TnvnVVVn9sSxzcBmVxgmZIljn9F+QkSO\n+4Ha3rLOHsP0HvtnNB0SkW0icoH3eIqIvCsih0SkVETuF5Eon21VRG4VkU3eNg+IiLRzzBnAt4Gr\nvaKYVd7yN0XkbhH5F1ADDBORm0VknYhUichWEfmSz3HOE5GSNrHeKSKrRaRCRJ4WkZh2Xj/ai2+M\nz7IMEakVkUwRSReRV7xtDojI2+0lLxF5UETubbPsbyLyde/xXSKyxYt9rYhcdpLzrCIy3HucJiIv\niUiliLwHnNpm29+IyE5v/UoRmebHef2i9zhMRL4rIttFZK+I/ElEkr11+V4cN4rIDhHZJyLfOUnM\n0SJyr7ftHhGZJyKxvn8bEfmmiJQBj7a3zNv2FhHZ7J3rl0RkSJvzcpuIbAI2nSgWE3iWMExnNQNf\nA9KBM4HpwH+12eZi4HRgHHAV8Om2B1HVV4GfAk+raoKqjvdZ/VlgLpAIbAf2esdMAm4GfiUik04S\n41XADKDAi+Gmdl6/HngBuLbNfm+p6l7gf4ASIAMYjPsSbm8cnSdxX84CICKDgAuBp7z1W4BpQDLw\nQ+AvIpJ9kthbPQDUAdnA572br+XABCAVeAJ4VkRiOjivrW7ybucDw4AE4P4225yNK+qbDnxPRIpO\nEOc9wEgvluFADvA9n/VZXoyn4P6mxy0TkU8CP8Od/2zc3/wpjnUpMBUYdYI4TC+whGE6RVVXqupS\nVW1S1W3AQ8C5bTa7R1UPqeoOYDHuy6Qz/qiqH3mv0aiqf1fVLeq8BfwD9yV8Ir9V1d2qegB4+SSv\n/wRwjc/z67xlAI24L69TvBje1vYHXnsbl0ha47kCeFdVdwOo6rNeLC2q+jTuF/KUk715EQkHLge+\np6qHVXUN8JjvNqr6F1Xd752jXwLR+F+Xcz1wn6puVdVq4FvANW2Ke36oqrWqugpYBRyXeLwkORf4\nmqoeUNUqXLLyPactwPdVtV5Va0+w7HrgEVV930vk3wLOFJF8n+P8zHuNWkzQWMIwnSIiI72imjIR\nqcR9QaS32azM53EN7hdsZ+xs85ozRWSpV1xxCJjVzmt25fUXA3EiMtX7cpoAvOit+wWwGfiHVwx2\nV3sH8JLIUxy9UrkOeNwn9s+JyAde0dYhYEwHsYO7qong2POw3XcDr9htnVfsdgh3BdPRcVsNaXO8\n7d7rDfZZ5s85zADigJU+7+9Vb3mrclWta7Nf22XHxOMlsf24q5VWx3wmTHBYwjCd9SCwHhihqkm4\noprj6ij8dKKhko8sF5Fo4HngXmCwqqYAC7rxmkdfRLUZeAb3ZX8t8Ir3KxlVrVLV/1HVYcAlwNdF\nZPoJDvUkcIWInIIrNnnei/0U4PfA7UCaF/saP2IvB5qAPJ9lQ1sfePUV38AV4Qzyjlvhc9yOhqDe\njSsO8j12E7Cng/3a2gfUAqNVNcW7Jbdp7dVeLG2XHROPiMQDacCuDo5jepklDNNZiUAlUC0ihcCX\nu3GsPUB+e5XJPqJwxS3lQJOIzMTVEfSUJ4CrccUircVRiMjFIjLcK3apwNXdtLR3AFX9D+7L82Fg\nkaoe8lbF477oyr1j3oy7wjgpL5G9APxAROJEZBTg24ciEfcFXw5EiMj3cPU7rTo6r08CXxORAhFJ\n4GidR1NHsbWJswWXEH8lIpnee8wRkePqrDrwJHCziEzwfiD8FFjmFXmaEGIJw3TWnbhilyrcl8XT\n3TjWs979fhF5v70NvF/8d+CuBA56r/1SN16z7fGXAYdxxSILfVaNAF4HqoF3gd+p6uKTHOoJ4AJ8\nko6qrgV+6e2/BxgL/MvP0G7HFQOVAX/Ea03kWYQr+tmIK8qp49gim47O6yPAn4ElwMfe/l/xM662\nvokrulvqFVG+Tif7xajq68D/4q7MSnEtwq456U4mKMQmUDLGGOMPu8IwxhjjF0sYxhhj/GIJwxhj\njF8sYRhjjPFLvxrIKz09XfPz84MdhjHG9BkrV67cp6oZHW/ZzxJGfn4+K1asCHYYxhjTZ4jI9o63\ncqxIyhhjjF8sYRhjjPGLJQxjjDF+6Vd1GMaY/qOxsZGSkhLq6toOdmu6IiYmhtzcXCIjI7t8DEsY\nxpiQVFJSQmJiIvn5+cjxkzaaTlBV9u/fT0lJCQUFBV0+jhVJGWNCUl1dHWlpaZYseoCIkJaW1u2r\ntYAmDBGZISIbvLl6j5uARkTmiJt7+QMRWSEiZ/us2yYiH7auC2ScxpjQZMmi5/TEuQxYkZQ3zeQD\nwKdwcyMvF5GXvCGfW70BvKSqKiLjcENYF/qsP19V9wUqRoDG5hZ+//ZWxgxJ5pyRfvVdMcaYASmQ\nVxhTgM3evMENuGks5/huoKrVPvMkt04206siwoT5S7aycE1pb7+0MSaEHTp0iN/97ned3m/WrFkc\nOnSo4w37oEAmjByOndSlhGPn6AVARC4TkfXA34HP+6xS4HURWSkic0/0IiIy1yvOWlFeXt7pIEWE\nwqxE1pVWdXpfY0z/daKE0dR08okJFyxYQEpKSqDCCqqgV3qr6ouqWghcCvzYZ9XZqjoBmAncJiLn\nnGD/+aparKrFGRldK1Iqyk5iQ1kVLS02mZQxxrnrrrvYsmULEyZM4PTTT2fatGlccskljBo1CoBL\nL72UyZMnM3r0aObPn39kv/z8fPbt28e2bdsoKirilltuYfTo0Vx44YXU1tYG6+30iEA2q93FsZPY\n53LspO7HUNUlIjJMRNJVdZ+q7vKW7xWRF3FFXEsCEWhRVhK1jc3sOFBDfnp8IF7CGNMNP3z5I9bu\nruzRY44aksT3Z48+4fp77rmHNWvW8MEHH/Dmm29y0UUXsWbNmiPNUh955BFSU1Opra3l9NNP5/LL\nLyctLe2YY2zatIknn3yS3//+91x11VU8//zz3HDDDT36PnpTIK8wlgMjvInmo3Bz9B4zF7OIDBev\n6l5EJgHRuHmI40Uk0VseD1wIrAlUoIXZiQCsL+vZD6Qxpv+YMmXKMX0Yfvvb3zJ+/HjOOOMMdu7c\nyaZNm47bp6CggAkTJgAwefJktm3b1lvhBkTArjBUtUlEbsdNWB8OPKKqH4nIrd76ecDlwOdEpBGo\nBa72WkwNBl70ckkE8ISqvhqoWEdkJhImsK60ihljsgP1MsaYLjrZlUBviY8/Wvrw5ptv8vrrr/Pu\nu+8SFxfHeeed124fh+jo6COPw8PDrUjqZFR1AbCgzbJ5Po9/Dvy8nf22AuMDGZuv2Khw8tPj7QrD\nGHNEYmIiVVXtN4apqKhg0KBBxMXFsX79epYuXdrL0QWHDQ3iKcpKYs3uimCHYYwJEWlpaZx11lmM\nGTOG2NhYBg8efGTdjBkzmDdvHkVFRZx22mmcccYZQYy091jC8BRmJfL3D0uprm8iIdpOizEGnnji\niXaXR0dHs3DhwnbXtdZTpKens2bN0arXO++8s8fj621Bb1YbKgqzkwDYUGb9MYwxpj2WMDyFWdZS\nyhhjTsYShid3UCyJ0RGstx7fxhjTLksYHhGhMDvRrjCMMeYELGH4KMxKYn1pFUfHQzTGGNPKEoaP\nwuxEquqb2HWob3euMcaYQLCE4aMwy7WUsnoMY0xnJSQkALB7926uuOKKdrc577zzWLHi5PPB/frX\nv6ampubI81AaLt0Sho/TrKWUMaabhgwZwnPPPdfl/dsmjFAaLt0Sho+E6AiGpsbZ3BjGGO666y4e\neOCBI89/8IMf8JOf/ITp06czadIkxo4dy9/+9rfj9tu2bRtjxowBoLa2lmuuuYaioiIuu+yyY8aS\n+vKXv0xxcTGjR4/m+9//PuAGNNy9ezfnn38+559/PnB0uHSA++67jzFjxjBmzBh+/etfH3m93hpG\n3bo0t1GYlcg6u8Iwxj/7NsOmRTD0DMieCGEB+g268C4o+7Bnj5k1Fmbec8LVV199NV/96le57bbb\nAHjmmWdYtGgRd9xxB0lJSezbt48zzjiDSy655ITzZT/44IPExcWxbt06Vq9ezaRJk46su/vuu0lN\nTaW5uZnp06ezevVq7rjjDu677z4WL15Menr6McdauXIljz76KMuWLUNVmTp1Kueeey6DBg3qtWHU\nLWG0UZSdxOvr9lDb0ExsVHiwwzEmdK1+Fl7+b2g87J4nZMHIT8Nps2DYuRAZG9z4umnixIns3buX\n3bt3U15ezqBBg8jKyuJrX/saS5YsISwsjF27drFnzx6ysrLaPcaSJUu44447ABg3bhzjxo07su6Z\nZ55h/vz5NDU1UVpaytq1a49Z39Y777zDZZdddmTU3M985jO8/fbbXHLJJb02jLoljDaKshNpUdi0\nt4pxuaFRbmhMSGmsg1fvgpWPwtAz4aJful//GxbAmhfg/ccgIhaGnQenzYSRMyBxcEdHPbmTXAkE\n0pVXXslzzz1HWVkZV199NY8//jjl5eWsXLmSyMhI8vPz2x3WvCMff/wx9957L8uXL2fQoEHcdNNN\nXTpOq94aRt3qMNqwllLGnMT+LfCHC1yyOOurcOMrMHg0jL8GrvoTfGML3PACTPos7PkIXr4DfjkS\nfv9JeOsXULYG+lA/p6uvvpqnnnqK5557jiuvvJKKigoyMzOJjIxk8eLFbN++/aT7n3POOUcGMFyz\nZg2rV68GoLKykvj4eJKTk9mzZ88xAxmeaFj1adOm8de//pWamhoOHz7Miy++yLRp03rw3XbMrjDa\nGJoaR2xkuNVjGNPW2r/B324HCYNrn4bTZhy/TUQ0DJ/ubjP/r0saGxfChoWw+CfulpznrjrGXQ15\np/f+++iE0aNHU1VVRU5ODtnZ2Vx//fXMnj2bsWPHUlxcTGFh4Un3//KXv8zNN99MUVERRUVFTJ48\nGYDx48czceJECgsLycvL46yzzjqyz9y5c5kxYwZDhgxh8eLFR5ZPmjSJm266iSlTpgDwxS9+kYkT\nJ/bqLH7Sn3o1FxcXa0dtnP1x6QP/IjYynCfnDowx7o05qaYGeO1/Ydk8yCmGKx+FlKGdP05VGWxc\nBBtfhS2LoakOzrkTzvsWhB1fX7hu3TqKiop64A2YVu2dUxFZqarF/uxvVxjtKMpO5NU1ZajqCVs/\nGDMgHNwOz94Eu9+HM/4LLvghRER17ViJWTD5Rnerr3b1IEt+ATuWwuUPu/UmpFkdRjsKs5I4WNPI\nnsr6YIdi+rr6aijf6O77mvUL4KFprt7i6r/AjJ91PVm0FZ0Ac+6HSx+EkhUwbxpsfatnjm0Cxq4w\n2tE6N8a6skqykmOCHI0JWU31ULkLKnZ59yVHn1eUQGUJ1HnT/iZkwdV/hrwpwY3ZH82N8MYP4d//\nD7LHw5WPQWpBYF5rwnWQPQGevRH+fKkrnpp255H+HHaV33N6ovrBEkY7WmffW19axfmnZQY5GhMS\nVGH1M7DupaOJ4XD58dvFpkJyjivjP+VMSMqB+HR4+z54dBbM/DkUfx5C9UuwYhc8dzPsXAanfxEu\nvBsiA/yjafAouGUxvPI1WHw37HgXPvN7YmJi2L9/P2lpaZY0TkZboLkBIk78d1JV9u/fT0xM9/6W\nAU0YIjID+A0QDjysqve0WT8H+DHQAjQBX1XVd/zZN5CSYyPJSYm1MaWMc3Cb+zLb8k8YlA9pw90v\n7+RclxCScyApF5KGQFRc+8comg3P3wJ//7qrD5j1y8B/EftL1V0tfbwEXvyS+/K54hEYc3nvxRCd\nAJ+ZD/lnwYJvwLyzyf3MI5RUQXl5O4l5oGtudI0Gmurc307C3OfvJGJiYsjNze3WywYsYYhIOPAA\n8CmgBFguIi+p6lqfzd4AXlJVFZFxwDNAoZ/7BlRhVqL1xRjoWppdy6B//sT9Q866F4q/0LXhL2IH\nwXVPw5s/cxW9e9a6Iqrk7v0DH6e6HFb+EQ7vhcYaaKx1He1aHzfVestqvWXeOrziiszRrj9F+vCe\njcsfIjD5JhgyCZ69kcg/XUzB9P+FT/x34IYc6SsqSlwdz9Y34eO3oHqPW556qusgOexcOG1ku63N\nelIgrzCmAJtVdSuAiDwFzAGOfOmrqm9NYDxHPrUd7xtohdmJvLWxnPqmZqIjbIiQAWfPR/DSV2DX\nShjxabj4vu5/uYeFwye/68rsX7wVHjoXrnoM8s/ufrx1FfDv++HdB1wCiE1xva0jYyEyzruPcctb\nl0XEHLs+LtX1jQj2kB7Z42DuW+78v/4D2P4uXDbPxTdQ1B6Ebe+4BLH1Tdi/2S2Pz3AJouBclyS6\n0ry5GwKZMHKAnT7PS4CpbTcSkcuAnwGZwEWd2dfbfy4wF2Do0J47eYVZSTS1KFv2HmbUkKQeO64J\ncU317grgnV9BTApc/gdXNNOTZehFF0P6P+Gp6+CxS+DTd8PUW7v2Go21sPxhV0dSewBGXeqSUvqI\nnos3GGKS4Mo/uvf26rfgoXPgikdDvqNftxzcBisfcwmi9ANXNxEZ74rpij/vEkXmqKDWfwW90ltV\nXwReFJFzcPUZF3Ry//nAfHAd97oUxAdPQu7px1yGF2UfnRvDEsYAsf1dN5TFvo0w/lr49E8D96s2\nYyTc8k/465ddf4Rd78Ps35y4DqSt5ib44HF48x6o2g2nfhKmfw+GTAxMvMEgAlNugZxJri/IozPg\nUz+GM77svkzrKqDmANTsd8my3ccH3ePag5B2qje21czgFLmdSF0lvP1LWPo7Vwyaezqc8w13BZFT\n3HNNmXtAIBPGLiDP53mut6xdqrpERIaJSHpn9+2WmgOw6NuuwuiLrx+5HM9PiycqIoz1ZVaP0e/V\nVbpmpMsfhuShcMPzMLxTv1u6JiYJrvqz+7JYfDeUr3P9HQbln3iflhZY9zdXr7J/s/tC+cxDUHBO\n4OMNlpzJ8KUl8NfbYNG3XD1QfRVHS7DbCIt0iT42FeLSXHKISYbdq+Af33W3tBFuaJORMyFvKoQH\n4bdzSzP858/ub3m4HMZd45J+ck7vx+KnQJ6l5cAIESnAfdlfA1znu4GIDAe2eJXek4BoYD9wqKN9\ne0xcqmud8fgVsPAbcMn/AyAiPIyRgxNYV9rHW0q993tY9aTrB5Cc49Oyx7tPzA7OP0uo2PCqa7lU\nudv1ZD7/O67FTm8JC4Nz/w8MmQDPfwHmn+eKwYZPP3Y7VddK640fueKKjEK4+nEovCh0m+j2pNhB\ncM3jrkK/7EOXCHyTQtwgdx+bCtGJJz4nh3a44Uk2LICl81xfk9hBMOJCd/Vx6nSXyANt65uw6Duw\nZw3kneEaRORMDvzrdlPAvilUtUlEbgcW4ZrGPqKqH4nIrd76ecDlwOdEpBGoBa5W17uk3X0DFSsj\nPgVnfx3euQ9OOcuNvAkUZSWxeEMfbtK3fgEsuBMyiuDAVtj2NtS3SYASdjSZ+CaS5Bz3pZQ2on+2\nUKkuh1e/CWued+XCV/0Jcv0aTicwRnzK9UV4+rPux8v077nRYEVg53J3BbTtbXcFdOmDrnI6wC1i\nQo4IFN/cvWOkDHXFXFNucVeWW/7pxrbauAhWP+2uTvLPcnN6jJwBg07pmdhb7dvsrnA2LnR/yyse\nhdGX9Zmkb4MPtmpugj9dArv/4/5xMwv5wzsf8+NX1rL8OxeQkRjd8TFCSflGN6R0+nC4+dWjbf7r\nKtrvmVxZcnR5k8+4/DEprndy3hR36Z4zGaLiux9fSzPs2+R+LZathvIN7pdha58G3wQWl9a1f6iW\nFqjZ1/773LoYGg7DOf/HfTGHSjlxw2E3IuxHL7i+G6qw/hWIS3exFt/sRoQ1PaulGXa+5648Nr7q\n6rHA/ZgoOOfo57+rLeVqD8Jb/xfem+9ar037uruiDYG+OJ0ZfNAShq/KUph3tuuZe8s/+feOWq57\neBl/+cJUzh6R3vH+oaKuwiWLugqY+2bnPuSqrl6nYqe7XN65zP0jla936yUcssa4y2jff6KTfaE3\n1MDetVC66miC2LPW9QkACI9yVzKNh13RUHPDsftHxLg6prZXQEm5EJ/mrhZ8E15rYmjvWOHR7liZ\no9yv+MyTD08dFKquqOT177tWMmfd4Sp6oxODHdnAsX+LG5J946uuaXVjjVuelHP0c583BbLGQXjk\niY/T3AgrHnH1LrWHYNLnXCu2hNAZQcISRnds+Sf8+TMw/hr2X/BrJt/9Bt+9qIgvThvWM0EGWksL\nPHUtbH4dPveSu7zuCbUH3SBxO5e5W8nKo1NzJg45+k+Uezo0VLnEULra3e/f5Fq1gKt8zBrn3ca6\nNvfpI4/+053sqqD1eVUpaPPxMUq4T2Jpp4gtKdf9GOgjl/+Ub3TxDqT+B6GoudH78fTe0R9QFV6r\n/4hY14orb+rRJBKX6pL+ptfgH99xVysF57hWd1ljg/te2mEJo7sW/xTe+jlccj9TFmYzbUQGv7xq\nfPeP2xtaY5/5C5g6N3Cv09wEez9y/zw7lnr/RDuO3SY572hiaE0OyXnd/8JubnI9XSt3weF9rjNT\ncg4kDB545fomOCp2HU0eO5e5q+aWJrcubYSrSC95z/XEvvAnrkI9RH+oWMLorpZmN3Lmzvf4dvpv\nWdUwhL/f0btTIXbJulfg6ethwvUw54He/4BW7naX79FJLkHYL2MzUDTUuPrP1iRyYKub9+P0W0Kn\nfuwELGH0hOq9MO9s9jfFcEHVD1j+o0uJCA/h1kLlG7xK7pFw88KQqEwzxoS+ziSMEP4GDLKETLj8\nD6TW7eQHYb9na3kIT4BTV+GGmYiMdR2/LFkYYwLAEsbJFExj3+n/w5zwf1Pz7sPBjqZ9LS3wwlw3\nDs1VfwrpXqLGmL7NEkYHki+8iyUt4xiz+meuWWioefNnrunfjHvglE8EOxpjTD9mCaMDUZERPJDy\nDSrDkuCZG49OuRkK1r4ES/4vTLzBzY5mjDEBZAnDD0Ny8vhW2NfcODQvfcW1sQ62vevdSKc5k93s\nbSHaZM8Y039YwvBDYVYii6oKqD3nu7D2b657fzDVHvIqueOsktsY02ssYfihMNuNXrlq6GfdgGSL\nvuP6GwRDSzO8cAsc2u4quTuYx9cYY3qKJQw/FGW1TqZU7UYKTcxyE7rUHuz9YBb/FDb9A2b+HE45\ns/df3xgzYA3giRD8l5EYTWp8lJtMKa7ATR35yAz463+51klNdW5wssbaY29NrY9roNFnm5ZG1xs6\nLvXoGP6+4/vHDmp/joq1L8Hb97oBzIq/0OvnwRgzsFnC8IOIUJSdeHQypdxiuPDHbmrNDQv8P1BE\nrKtvCIt0ra2a60+8bUzy8Ylk3ctucL9Z91oltzGm11nC8FNhVhKPL9tOc4sSHiYw9VY3GUvtQdfD\nOjLODcMdGec9jz12eUTMsRMRqbq5D9qbf7jtvMTVe1yrqPThbkpPmw/BGBMEljD8VJiVSF1jC9v3\nH2ZYRoL7hV94UdcPKOKmAo1OcInHGGNCnFV6+6nIaym1vqwqyJEYY0xwWMLw0/DMBMIE1pdWdryx\nMcb0Q5Yw/BQTGc6wjATW2RWGMWaACmjCEJEZIrJBRDaLyF3trL9eRFaLyIci8m8RGe+zbpu3/AMR\n6aFJLrqnMCuR9WV2hWGMGZgCljBEJBx4AJgJjAKuFZFRbTb7GDhXVccCPwbajrlxvqpO8Hdyj0Ar\nyk5i54Faquoagx2KMcb0ukBeYUwBNqvqVlVtAJ4C5vhuoKr/VtXW7tJLgdwAxtNtRdmux/cGK5Yy\nxgxAgUwYOcBOn+cl3rIT+QKw0Oe5Aq+LyEoRmXuinURkroisEJEV5eXl3Qq4I4VZrqWU1WMYYwai\nkOiHISLn4xLG2T6Lz1bVXSKSCbwmIutVdUnbfVV1Pl5RVnFxcUDHHc9OjiEpJsJaShljBqRAXmHs\nAvJ8nud6y44hIuOAh4E5qrq/dbmq7vLu9wIv4oq4gkpEKMxOsr4YxpgBKZAJYzkwQkQKRCQKuAZ4\nyXcDERkKvAB8VlU3+iyPF5HE1sfAhcCaAMbqt6KsRDaUVdHSEgKTKBljTC8KWJGUqjaJyO3AIiAc\neERVPxKRW73184DvAWnA78QNptfktYgaDLzoLYsAnlDVVwMVa2cUZidRXb+dXYdqyUuNC3Y4xhjT\nawJah6GqC4AFbZbN83n8ReC4yahVdSswvu3yUFDozY2xrrTSEoYxZkCxnt6ddFpWIiI2ppQxZuCx\nhNFJcVER5KfFW49vY8yAYwmjCwqzEllXalcYxpiBxRJGFxRmJbFt/2FqGpqCHYoxxvQaSxhdUJid\niCps3FMd7FCMMabXWMLogiJviBDr8W2MGUgsYXRB7qBY4qPCraWUMWZAsYTRBWFhwmlZiayzKwxj\nzABiCaOLWseUUrUhQowxA4MljC4qyk6ioraRssq6YIdijDG9whJGFxV5Q4Ss3W3FUsaYgcESRheN\nGpJETGQYb20M7KRNxhgTKixhdFFcVATTCwez4MNSmppbgh2OMcYEnCWMbpg9Ppt91Q0s+/hAsEMx\nxpiAs4TRDeedlkl8VDgvr9od7FCMMSbgLGF0Q0xkOBeOzmLhmjIamqxYyhjTv1nC6KbZ47OpqG3k\nnc1W+W2M6d8sYXTT2cMzSI6N5OVVpcEOxRhjAsoSRjdFRYQxY3QWr63dQ11jc7DDMcaYgLGE0QNm\njx9CdX0Tb27YG+xQjDEmYAKaMERkhohsEJHNInJXO+uvF5HVIvKhiPxbRMb7u28oOWNYKukJUVYs\nZYzp1wKuZlSCAAAafUlEQVSWMEQkHHgAmAmMAq4VkVFtNvsYOFdVxwI/BuZ3Yt+QEREexqyx2byx\nfg+H620WPmNM/xTIK4wpwGZV3aqqDcBTwBzfDVT136p60Hu6FMj1d99QM3v8EOoaW3h93Z5gh2KM\nMQHRYcIQkXAR+VoXjp0D7PR5XuItO5EvAAs7u6+IzBWRFSKyorw8eE1bJw8dRFZSjBVLGWP6rQ4T\nhqo2A9cGMggROR+XML7Z2X1Vdb6qFqtqcUZGRs8H56ewMOHicdm8tXEvFTWNQYvDGGMCxd8iqX+J\nyP0iMk1EJrXeOthnF5Dn8zzXW3YMERkHPAzMUdX9ndk31MweP4TGZmXR2rJgh2KMMT0uws/tJnj3\nP/JZpsAnT7LPcmCEiBTgvuyvAa7z3UBEhgIvAJ9V1Y2d2TcUjctNZmhqHC+v2s1VxXkd72CMMX2I\nXwlDVc/v7IFVtUlEbgcWAeHAI6r6kYjc6q2fB3wPSAN+JyIATV7xUrv7djaG3iYizB6fzby3trK/\nup60hOhgh2SMMT1G/JmTWkSSge8D53iL3gJ+pKoVAYyt04qLi3XFihVBjWFdaSUzf/M2P7l0DDec\ncUpQYzHGmI6IyEpVLfZnW3/rMB4BqoCrvFsl8GjXwuvfCrMSGZ6ZYEOeG2P6HX8Txqmq+n2vX8RW\nVf0hMCyQgfVVIsLscUN4b9sB9lTWBTscY4zpMf4mjFoRObv1iYicBdQGJqS+7+Lx2ajC31dbnwxj\nTP/hb8K4FXhARLaJyDbgfuBLAYuqjzs1I4FR2Um8vNqKpYwx/Yc/Pb3DgNNUdTwwDhinqhNVdXXA\no+vDZo8fwn92HGLngZpgh2KMMT3Cn57eLcA3vMeVqloZ8Kj6gYvHZQPwihVLGWP6CX+LpF4XkTtF\nJE9EUltvAY2sj8tLjWPi0BRrLWWM6Tf8TRhXA7cBS4CV3i24HR76gNnjhrC2tJIt5dXBDsUYY7rN\n3zqMG1S1oM3NmtV24KJx2YjAKzaCrTGmH/C3DuP+Xoil3xmcFMOU/FReWrULf3rUG2NMKPO3SOoN\nEblcvAGfjP9mjx/ClvLDrC+rCnYoxhjTLf4mjC8BzwD1IlIpIlUiYq2l/DBzTBbhYcIr1ifDGNPH\n+ZswkoGbgJ+oahIwGvhUoILqT9ISojlreDovryq1YiljTJ/mb8J4ADiDozPvVWH1Gn67eFw2Ow7U\nsLokpAb3NcaYTvE3YUxV1duAOgBVPQhEBSyqfubTo7OIDBfrk2GM6dP8TRiNIhKOm2UPEckAWgIW\nVT+THBvJuSMzeWV1KS0tVixljOmb/E0YvwVeBDJF5G7gHeCnAYuqH5o9PpuyyjpW7jgY7FCMMaZL\n/J2i9XERWQlMBwS4VFXXBTSyfuaCosHERIbx8qrdnJ5vo6oYY/oef68wUNX1qvqAqt5vyaLz4qMj\nmF44mAUfltLUbKV5xpi+x++EYbpv9vhs9lU3sHTrgWCHYowxnRbQhCEiM0Rkg4hsFpG72llfKCLv\niki9iNzZZt02EflQRD4QkX4x0OF5p2WSEB1hnfiMMX1SwBKG16rqAWAmMAq4VkRGtdnsAHAHcO8J\nDnO+qk5Q1eJAxdmbYiLDuXDUYBauKaOhyYqljDF9SyCvMKYAm1V1q6o2AE8Bc3w3UNW9qrocaAxg\nHCHlkglDqKht5IX3S4IdijHGdEogE0YOsNPneYm3zF+Km7hppYjM7dHIgujckRlMPmUQv3xtI9X1\nTcEOxxhj/BbKld5nq+oEXJHWbSJyTnsbichcEVkhIivKy8t7N8IuEBG+e1ER5VX1PPTWlmCHY4wx\nfgtkwtgF5Pk8z/WW+UVVd3n3e3GdBqecYLv5qlqsqsUZGRndCLf3TBw6iDkThjB/yVZ2HaoNdjjG\nGOOXQCaM5cAIESkQkSjgGuAlf3YUkXgRSWx9DFwIrAlYpEHwjRmFAPzi1fVBjsQYY/wTsIShqk3A\n7cAiYB3wjKp+JCK3isitACKSJSIlwNeB74pIiYgkAYOBd0RkFfAe8HdVfTVQsQZDTkosX5xWwF8/\n2M0HOw8FOxxjjOmQ9Kc5GoqLi3XFir7TZaO6vonzfvEm+WlxPHvrmdiEhsaY3iYiK/3tuhDKld79\nXkJ0BHdeOJIV2w+ycE1ZsMMxxpiTsoQRZFcW51GYlcjPFq6jvqk52OEYY8wJWcIIsvAw4bsXjWLn\ngVr++K9twQ7HGGNOyBJGCDh7RDqfLMzk/n9uZn91fbDDMcaYdlnCCBHfnlVITWMzv359U7BDMcaY\ndlnCCBHDMxO5YepQnnhvB5v2VAU7HGOMOY4ljBDy3xeMJC4qnJ8usPmpjDGhxxJGCEmNj+KOT45g\n8YZylmwM/XGxjDEDiyWMEPO5T5zC0NQ4fvL3tTaVqzEmpFjCCDHREeF8a2YhG/dU88wKmzPDGBM6\nLGGEoBljspiSn8p9r22gqm7AzC1ljAlxljBCkIjw3YuL2FfdwINv2pwZxpjQYAkjRI3LTeEzE3N4\n+J2PKTlYE+xwjDHGEkYou/PTpxEm8PNXNwQ7FGOMsYQRyoakxDJ32jBeXrWbldsPBjscY8wAZwkj\nxH3p3FPJTIzmJ39fS3+au8QY0/dYwghx8dER3Pnp0/jPjkO8sro02OEYYwYwSxh9wOWTchmVncQ9\nC9ezt7Iu2OEYYwYoSxh9QHiYcPdlYzhY08AV895lx35rNWWM6X2WMPqIiUMH8fgXp1JR28gV8/7N\nhjIb0dYY07ssYfQhE4cO4tlbz0QErnroXd7fYS2njDG9J6AJQ0RmiMgGEdksIne1s75QRN4VkXoR\nubMz+w5UIwcn8tytnyAlLpIbHl7G25tsVFtjTO8IWMIQkXDgAWAmMAq4VkRGtdnsAHAHcG8X9h2w\n8lLjePbWMxmaGsfn/7icBR9a6yljTOAF8gpjCrBZVbeqagPwFDDHdwNV3auqy4G2I+x1uO9Al5kY\nw9Nzz2Rcbgq3P/E+Ty/fEeyQjDH9XCATRg6w0+d5ibesR/cVkbkiskJEVpSXD6zimeS4SP78hSmc\nPSKDbz7/IQ+9ZQMVGmMCp89XeqvqfFUtVtXijIyMYIfT6+KiInj4c8VcPC6bny1cz89fXW89wo0x\nARERwGPvAvJ8nud6ywK974ATFRHGb66ZSFJsJA++uYWK2kZ+PGcM4WES7NCMMf1IIBPGcmCEiBTg\nvuyvAa7rhX0HpPAw4e5Lx5ASG8nvvKTxq6smEBXR5y8ijTEhImAJQ1WbROR2YBEQDjyiqh+JyK3e\n+nkikgWsAJKAFhH5KjBKVSvb2zdQsfYXIsI3ZhSSHBvJzxaup7quiQdvmERcVCB/FxhjBgrpT+Xd\nxcXFumLFimCHERKeem8H337xQyYOHcQjN55OclxksEMyxoQgEVmpqsX+bGvlFf3UNVOG8sB1k/iw\npILLfvcv/r15X7BDMsb0cZYw+rGZY7N57PNTaGxp4bqHl/GVJ/9DWYWNdmuM6RpLGP3cmaem8drX\nzuWrF4xg0UdlTP/lm8xfsoXG5pZgh2aM6WMsYQwAMZHhfPWCkbz+tXM589Q0frpgPTN/87YVUxlj\nOsUSxgAyNC2Oh288nT/cWEx9UzPXPbyM259434qpjDF+sYQxAE0vGnykmOq1tXusmMoY4xdLGANU\nazHVa1ZMZYzxkyWMAc63mKqhqcWKqYwxJ2RdgA3giqnOGp7OQ29t5Xdvbuaf6/dy67mncuOZ+dbp\nzxgDWE9v046dB2r48Str+cfaPSRER3D91KF84ewCMpNigh2aMaaHdaantyUMc0Jrd1cy760tvLJ6\nNxFhYVw+OZcvnTOM/PT4YIdmjOkhljBMj9qxv4aHlmzh2ZUlNDW3MGtsNreeeypjcpKDHZoxppss\nYZiA2FtVxyPvbOMvS7dTXd/EuSMz+PJ5pzK1IBURm3vDmL7IEoYJqIraRv6ydDuP/utj9lU3MHFo\nCv913nCmF2YSZpM2GdOnWMIwvaKusZlnV+zkoSVbKTlYy4jMBG4991Rmjs2yOTiM6SMsYZhe1dTc\nwiurS3nwzS1s2FOFCAxNjaMwK5HTspK8+0Ty0+Jt2lhjQkxnEob9DDTdFhEexqUTc5gzYQjvbN7H\n+9sPsWFPJetLq3ht7R5avN8k0RFhjBicwGmDkyjKdknktKxEMhKirQ7EmD7AEobpMSLCtBEZTBuR\ncWRZXWMzm/ZUs76skg1lVWzYU8WSTeU8/37JkW1S46MozErknJEZzBqTzdC0uGCEb4zpgBVJmaDY\nX13PhrIq1pdVsaGsig93VbC2tBKAMTlJzByTzUVjs63PhzEBZnUYpk/aeaCGV9eU8fcPS/lg5yEA\nirKTmDUmi1njsjk1IyHIERrT/4RMwhCRGcBvgHDgYVW9p8168dbPAmqAm1T1fW/dNqAKaAaa/HlD\nljD6j12Hanl1TRkLPyxlxfaDAJw2OJGZY7OYNTabkYMTgxyhMf1DSCQMEQkHNgKfAkqA5cC1qrrW\nZ5tZwFdwCWMq8BtVneqt2wYUq6rf421bwuifyirqeHVNKQvWlLF82wFUYXhmArPGZDFjTDZF2YlW\naW5MF4VKK6kpwGZV3eoF9RQwB1jrs80c4E/qstZSEUkRkWxVLQ1gXKaPyUqO4aazCrjprAL2Vtax\n6KMyFnxYxv2LN/Pbf24mLT6KKQWpnDEsjanDUhmZmWgdCI0JgEAmjBxgp8/zEtxVREfb5AClgAKv\ni0gz8JCqzm/vRURkLjAXYOjQoT0TuQlZmUkxfPbMfD57Zj7lVfUsXr+XpR/vZ9nWAyxcUwbAoLhI\nTs8/mkCKspIsgRjTA0K5We3ZqrpLRDKB10RkvaouabuRl0jmgyuS6u0gTfBkJEZz1el5XHV6HuAq\nzZd9fIBlW/ez9OP9/GPtHgCSYiKYUpDK1II0zhiWxqghSdaB0JguCGTC2AXk+TzP9Zb5tY2qtt7v\nFZEXcUVcxyUMY1rlpcaRlxrHFZNzAdh9qJZl3tXH0q37eX3dXgASoyMozh/E1GEugYwZkkREuE0+\naUxHApkwlgMjRKQAlwSuAa5rs81LwO1e/cZUoEJVS0UkHghT1Srv8YXAjwIYq+mHhqTEctnEXC6b\n6BLInso6lm7dz7KPXQJZvKEcgPiocCbnpzK1IJUzhqUyNieFqIieSSAVNY1s23+Y1Pgo8lKtQ6Lp\n2wKWMFS1SURuBxbhmtU+oqoficit3vp5wAJcC6nNuGa1N3u7DwZe9Fq+RABPqOqrgYrVDAyDk2KY\nMyGHORNyADdc+3sfH2DZ1gMs+3g/v1i0AYDYyHAmnZLCGQVpTB2Wxvi8ZKIjwts9pqpy4HAD2/bX\nsH3/4ePuD9U0Htl2XG4ys8cN4aJx2QxJiQ38Gzamh1nHPWM8+6vrXQLxrkDWl1UBbgysiUNTmFqQ\nRk5KLDsO1LBt/2G273f3VXVNR44hAjkpseSnxXNKWhz5afEMTYtj277DvLK6lA93VQAw+ZRBzB6X\nzayx2Tb1rQmqkOiHEQyWMExPOlTTwHsfH2CpdwWytrQSVQgPE3IHxXJKWjz5aXHH3Oelxp7wagTw\nEsduXlldyvoyN7Lv1IJUZo8fwswx2aTGR/XiOzTGEkawwzD9VEVtI4dqGhiSEktkD1SSb9pTxcur\nS3ll9W62lh8mPEz4xKlpzB4/hE+PyiI5LrIHojbm5CxhGNOHqCprSyt5xUseOw/UEhkunDMig8n5\ng8gbFMdQrwXYoLhI69VuepQlDGP6KFVlVUkFr6zazcI1Zew6VHvM+viocPJS48gdFEdeaix5g+K8\n5sTucXx0KHetMqHIEoYx/UR1fRMlB2vYeaCWnQdq2HGg5ujzgzXUNDQfs31afBS5qXEUpMVxakYC\nwzISGJYRT0F6PDGRJ65bMQNXqIwlZYzppoToCAqzkijMSjpuXWuT3p0HXTLZebDmSFJ57+MD/PWD\n3Ue2FYEhybEMy4jn1IwETs2IP5JMspJirJjL+MUShjF9lIiQlhBNWkI0E/JSjltf09DEx/sOs7Xc\nu+2rZmv5YZ5dsZPDPlcmcVHhFKS7RDI8M4GRgxMYnpnIKWlxPVK5b/oPSxjG9FNxURGMHpLM6CHJ\nxyxXVfZU1rO1vJot+w6ztdwlkvd3HOSlVUevSiLDhYL0eEZkJjI8M4ERgxMYkZlIfnrcSZsOm/7L\nEoYxA4yIkJUcQ1ZyDJ8Ynn7MupqGJrbsPcymvVVs2lvNpj3VfLS7ggVrSmmt7gwPE/LT4hiRmciI\nwQmckhZPYkwE8VERxEWHu/uocOKj3X10RJhfRV71Tc1U1DZSUdPIodpGDh5u4NCR5w0c8pYnxUQw\nPjeFCUNTGJGZaANJ9iJLGMaYI+KiIhibm8zY3GOvSuoam9lSXs1mL4ls2lvFxr1VvLZuD80tJ284\nEx4mxEWFkxB9bCIBqKhtoqLGJYa2Ffhtj5ESG0lybCT7qut58r2dXrzhjMlJZkJeCuNzUxifl0xO\nSqzVyQSIJQxjTIdiIsPbLd6qb2qm9FAdhxuaqGlo5nB9E4frm93z+iYONzRT0+CW1TR4z73lqkpO\nSiyjhySREhtJSlwkyXFRRx6nxEa5+7hIEqIjjiSBlhZl2/7DrCo5xKqdFXyw8xB//Nc2GppbAEhP\niGZCXrKXQFwisU6QPcOa1Rpj+rz6pmbWl1axquQQH+w8xKqdh9hSfvjI+oL0eDITo4mKCCMyPIzI\ncCEyPIyocO95RJvn3rKo8DBS46PISIx2t4RoBsVF9asJuaxZrTFmQImOCHdXE3kpfO5Mt6yitpE1\nuyqOJJBDtY1U1zfR2NxCY5PS2NxCQ3OLe96sNDa55w3NLZzsd3R4mJCeEHUkgWQmxhxNKD6JJSMx\nOuAdKVtalEO1jVTWNpKfHh/Q1wJLGMaYfio5NpKzhqdzVpuKfX80t7iEUt/YwoGaBsqr6r1bHXtb\nH1fXs7eqno92V7Kvup72qnLiosLJSIwms00iOZpYXLJJS4g6pglzTUPTkdc88no+r9v6eF91PU0t\nyuCkaJZ9+4LunC6/WMIwxpg2wsOE8LBwYiLDSY6LpKCDX+/NLcpBn8Sy97gv+Do2lFXxTtU+Kn2G\nw28lAqlxUcRFh3OguuGYfjKtwsTVz7Qmm8KsxCOPs3ppiHxLGMYY002umCqa9IRoirJPvm1dYzP7\nqtu/YqiubyIt/vjircwkV3cS7CbEljCMMaYXxUSGkzvIDSDZ11i/f2OMMX6xhGGMMcYvljCMMcb4\nJaAJQ0RmiMgGEdksIne1s15E5Lfe+tUiMsnffY0xxvSugCUMEQkHHgBmAqOAa0VkVJvNZgIjvNtc\n4MFO7GuMMaYXBfIKYwqwWVW3qmoD8BQwp802c4A/qbMUSBGRbD/3NcYY04sCmTBygJ0+z0u8Zf5s\n48++xhhjelGfr/QWkbkiskJEVpSXlwc7HGOM6bcC2XFvF5Dn8zzXW+bPNpF+7AuAqs4H5gOISLmI\nbO9ivOnAvi7u21/ZOTmenZPj2Tk5Xl86J6f4u2EgE8ZyYISIFOC+7K8BrmuzzUvA7SLyFDAVqFDV\nUhEp92Pf46hqRleDFZEV/g7xO1DYOTmenZPj2Tk5Xn89JwFLGKraJCK3A4uAcOARVf1IRG711s8D\nFgCzgM1ADXDzyfYNVKzGGGM6FtCxpFR1AS4p+C6b5/NYgdv83dcYY0zw9PlK7x40P9gBhCA7J8ez\nc3I8OyfH65fnpF9N0WqMMSZw7ArDGGOMXyxhGGOM8cuATxg2yGH7RGSbiHwoIh+IyIpgxxMMIvKI\niOwVkTU+y1JF5DUR2eTdDwpmjL3tBOfkByKyy/usfCAis4IZY28TkTwRWSwia0XkIxH5b295v/us\nDOiEYYMcduh8VZ3QH9uT++mPwIw2y+4C3lDVEcAb3vOB5I8cf04AfuV9ViZ4LRwHkibgf1R1FHAG\ncJv3PdLvPisDOmFggxyak1DVJcCBNovnAI95jx8DLu3VoILsBOdkQFPVUlV933tcBazDjX3X7z4r\nAz1h2CCHJ6bA6yKyUkTmBjuYEDJYVUu9x2XA4GAGE0K+4s1p80h/KHrpKhHJByYCy+iHn5WBnjDM\niZ2tqhNwxXW3icg5wQ4o1HgdT61dupvHZhgwASgFfhnccIJDRBKA54Gvqmql77r+8lkZ6AnDnwES\nByRV3eXd7wVexBXfGdjjzdmCd783yPEEnaruUdVmVW0Bfs8A/KyISCQuWTyuqi94i/vdZ2WgJ4wj\nAySKSBRukMOXghxT0IlIvIgktj4GLgTWnHyvAeMl4Ebv8Y3A34IYS0ho/VL0XMYA+6yIiAB/ANap\n6n0+q/rdZ2XA9/T2mgD+mqODHN4d5JCCTkSG4a4qwI039sRAPC8i8iRwHm6o6j3A94G/As8AQ4Ht\nwFWqOmAqgU9wTs7DFUcpsA34kk/Zfb8nImcDbwMfAi3e4m/j6jH61WdlwCcMY4wx/hnoRVLGGGP8\nZAnDGGOMXyxhGGOM8YslDGOMMX6xhGGMMcYvljCMCQEicp6IvBLsOIw5GUsYxhhj/GIJw5hOEJEb\nROQ9b96Hh0QkXESqReRX3lwIb4hIhrftBBFZ6g3K92LroHwiMlxEXheRVSLyvoic6h0+QUSeE5H1\nIvK414PYmJBhCcMYP4lIEXA1cJY3MGMzcD0QD6xQ1dHAW7jezwB/Ar6pquNwvYBblz8OPKCq44FP\n4AbsAzfK6Vdxc7MMA84K+JsyphMigh2AMX3IdGAysNz78R+LG1CuBXja2+YvwAsikgykqOpb3vLH\ngGe9MbpyVPVFAFWtA/CO956qlnjPPwDygXcC/7aM8Y8lDGP8J8BjqvqtYxaK/G+b7bo63k69z+Nm\n7P/ThBgrkjLGf28AV4hIJhyZs/kU3P/RFd421wHvqGoFcFBEpnnLPwu85c3IViIil3rHiBaRuF59\nF8Z0kf2CMcZPqrpWRL4L/ENEwoBG4DbgMDDFW7cXV88BbkjreV5C2Arc7C3/LPCQiPzIO8aVvfg2\njOkyG63WmG4SkWpVTQh2HMYEmhVJGWOM8YtdYRhjjPGLXWEYY4zxiyUMY4wxfrGEYYwxxi+WMIwx\nxvjFEoYxxhi//H8wYLTeCzzoRQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f68b853c0b8>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "from matplotlib import pyplot\n",
    "pyplot.plot(np.array([1]) - np.array(hist.history['acc']))\n",
    "pyplot.plot(np.array([1]) - np.array(hist.history['val_acc']))\n",
    "pyplot.title('ian train vs validation error')\n",
    "pyplot.ylabel('error')\n",
    "pyplot.xlabel('epoch')\n",
    "pyplot.legend(['train', 'validation'], loc='upper right')\n",
    "pyplot.show()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
