{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\h5py\\__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
      "  from ._conv import register_converters as _register_converters\n",
      "Using TensorFlow backend.\n"
     ]
    }
   ],
   "source": [
    "import pandas as pd\n",
    "from collections import Counter\n",
    "import re\n",
    "import jieba\n",
    "import pandas as pd\n",
    "from numpy import nan\n",
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "import random as rn\n",
    "import os\n",
    "from keras import backend as K"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 随机种子\n",
    "os.environ['PYTHONHASHSEED'] = '0'\n",
    "np.random.seed(42)\n",
    "rn.seed(12345)\n",
    "session_conf = tf.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)\n",
    "tf.set_random_seed(1234)\n",
    "sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)\n",
    "K.set_session(sess)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "xinjiang_data = pd.read_excel('data/xinjiang/xinjiang_labels.xlsx')\n",
    "xinjiang_data1 = pd.read_excel('data/xinjiang/xinjiang_labels.xlsx',sheetname=1)\n",
    "xinjiang_data2 = pd.read_excel('data/xinjiang/xinjiang_labels.xlsx',sheetname=2)\n",
    "xinjiang_data['source'] = 'news'\n",
    "xinjiang_data1['source'] = 'quora'\n",
    "xinjiang_data2['source'] = 'tweet'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "df = pd.concat([xinjiang_data[['sentence','stance','source']],\n",
    "                xinjiang_data1[['sentence','stance','source']],\n",
    "                xinjiang_data2[['sentence','stance','source']]],ignore_index=True)\n",
    "df = df.sample(frac=1.0).reset_index(drop=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# 数据清洗还可以细作\n",
    "from nltk.stem.porter import PorterStemmer\n",
    "from nltk.tokenize import word_tokenize\n",
    "def preprocess_word(word):\n",
    "    # Remove punctuation\n",
    "    word = word.strip('\\'\"?!,.():;')\n",
    "    # Convert more than 2 letter repetitions to 2 letter\n",
    "    # funnnnny --> funny\n",
    "    word = re.sub(r'(.)\\1+', r'\\1\\1', word)\n",
    "    # Remove - & '\n",
    "    word = re.sub(r'(-|\\')', '', word)\n",
    "    return word\n",
    "def is_valid_word(word):\n",
    "    # Check if word begins with an alphabet\n",
    "    return (re.search(r'^[a-zA-Z][a-z0-9A-Z\\._]*$', word) is not None)\n",
    "def handle_emojis(tweet):\n",
    "    # Smile -- :), : ), :-), (:, ( :, (-:, :')\n",
    "    tweet = re.sub(r'(:\\s?\\)|:-\\)|\\(\\s?:|\\(-:|:\\'\\))', ' EMO_POS ', tweet)\n",
    "    # Laugh -- :D, : D, :-D, xD, x-D, XD, X-D\n",
    "    tweet = re.sub(r'(:\\s?D|:-D|x-?D|X-?D)', ' EMO_POS ', tweet)\n",
    "    # Love -- <3, :*\n",
    "    tweet = re.sub(r'(<3|:\\*)', ' EMO_POS ', tweet)\n",
    "    # Wink -- ;-), ;), ;-D, ;D, (;,  (-;\n",
    "    tweet = re.sub(r'(;-?\\)|;-?D|\\(-?;)', ' EMO_POS ', tweet)\n",
    "    # Sad -- :-(, : (, :(, ):, )-:\n",
    "    tweet = re.sub(r'(:\\s?\\(|:-\\(|\\)\\s?:|\\)-:)', ' EMO_NEG ', tweet)\n",
    "    # Cry -- :,(, :'(, :\"(\n",
    "    tweet = re.sub(r'(:,\\(|:\\'\\(|:\"\\()', ' EMO_NEG ', tweet)\n",
    "    return tweet\n",
    "def pre_process_tweet(tweet):\n",
    "    stl = ['…','“','”','`','[ ','] ','•','``','—','※','§','+','$','\\|','&','(',')','·',\"'\",\"''\",'–','\\'','s']\n",
    "    processed_tweet = []\n",
    "    # Convert to lower case\n",
    "    tweet = tweet.lower()\n",
    "    tweet = re.sub(r'(’ s)','',tweet)\n",
    "    # Replaces URLs with the word URL\n",
    "    tweet = re.sub(r'((www\\.[\\S]+)|(https?://[\\S]+))', ' URL ', tweet)\n",
    "    # Replace @handle with the word USER_MENTION\n",
    "    tweet = re.sub(r'@[\\S]+', 'USER_MENTION', tweet)\n",
    "    # Replaces #hashtag with hashtag\n",
    "    tweet = re.sub(r'#(\\S+)', r' \\1 ', tweet)\n",
    "    # Remove RT (retweet)\n",
    "    tweet = re.sub(r'\\brt\\b', '', tweet)\n",
    "    # Replace 2+ dots with space\n",
    "    tweet = re.sub(r'\\.{2,}', ' ', tweet)\n",
    "    # Strip space, \" and ' from tweet\n",
    "    tweet = tweet.strip(' \"\\'')\n",
    "    # Replace emojis with either EMO_POS or EMO_NEG\n",
    "    tweet = handle_emojis(tweet)\n",
    "    # Replace multiple spaces with a single space\n",
    "    tweet = re.sub(r'\\s+', ' ', tweet)\n",
    "    tweet = re.sub(r'-', ' ', tweet)\n",
    "    tweet = re.sub(r'\\'s', '', tweet)\n",
    "    processed_tweet = word_tokenize(tweet)\n",
    "#     words = tweet.split()\n",
    "#     for word in words:\n",
    "#         word = preprocess_word(word)\n",
    "#         if is_valid_word(word):\n",
    "#             if use_stemmer:\n",
    "#                 word = str(porter_stemmer.stem(word))\n",
    "#             processed_tweet.append(word)\n",
    "    return ' '.join([i for i in processed_tweet if i not in stl])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "label_dict_weibo =  {0:0,1:1,-1:2}\n",
    "df['WORDS'] = df['sentence'].apply(lambda x:pre_process_tweet(x))\n",
    "df['LABEL'] = df['stance'].apply(lambda x:label_dict_weibo[x])\n",
    "df['TARGET'] = 'The Chinese government anti terrorist in xinjiang'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 121,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "df.to_csv('data/processed_xinjiang')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 评分函数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.metrics import recall_score,accuracy_score,precision_score\n",
    "\n",
    "def f1_score(y_true,y_pred):\n",
    "    pf,pa = precision_score(y_true, y_pred, average=None)[1:]\n",
    "    rf,ra = recall_score(y_true,y_pred,average=None)[1:]\n",
    "    ff = 2*pf*rf/(pf+rf)\n",
    "    fa = 2*pa*ra/(pa+ra)\n",
    "    fa = (ff+fa)/2\n",
    "    return fa"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### TFIDF+SVM"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.5125468759470875 0.55697607440186\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\feature_extraction\\text.py:1059: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
      "  if hasattr(X, 'dtype') and np.issubdtype(X.dtype, np.float):\n"
     ]
    }
   ],
   "source": [
    "from sklearn.feature_extraction.text import TfidfVectorizer\n",
    "from sklearn import svm\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True)\n",
    "tfidf = TfidfVectorizer(min_df=2,ngram_range=(1,2))\n",
    "x_tfidf = tfidf.fit_transform(df.WORDS)\n",
    "avg,acc = 0,0\n",
    "for train_index, valid_index in kfold.split(np.zeros(1000), np.asarray(df.LABEL)):\n",
    "    clf = svm.LinearSVC(C=0.1)\n",
    "    train_x_t,valid_x_t = x_tfidf[train_index], x_tfidf[valid_index]\n",
    "    train_y_t,valid_y_t = np.asarray(df.LABEL)[train_index],np.asarray(df.LABEL)[valid_index]\n",
    "    clf.fit(train_x_t,train_y_t)\n",
    "    prediction = clf.predict(valid_x_t)\n",
    "    acc += accuracy_score(valid_y_t,prediction)\n",
    "    avg += f1_score(valid_y_t,prediction)\n",
    "    del clf\n",
    "print(avg/5,acc/5)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## BaseModel"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from keras.models import Model\n",
    "from keras.preprocessing.text import Tokenizer\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "from keras.utils.np_utils import to_categorical\n",
    "from keras.layers import Dense, Input, LSTM, Embedding,GRU\n",
    "from keras.layers import Bidirectional,TimeDistributed,Merge\n",
    "from keras.layers import concatenate,multiply,dot,add\n",
    "from keras.layers import RepeatVector, Flatten,Permute,Reshape,Lambda,Activation,Dropout,GlobalAveragePooling1D,Conv1D,GlobalMaxPool1D\n",
    "from keras.callbacks import EarlyStopping,ModelCheckpoint\n",
    "from keras.optimizers import Adam\n",
    "from keras import backend as K\n",
    "from keras.engine.topology import Layer\n",
    "from keras import activations, regularizers, constraints\n",
    "import tensorflow as tf\n",
    "from keras.callbacks import ModelCheckpoint\n",
    "from keras import initializers\n",
    "from keras.models import load_model\n",
    "from sklearn.metrics import precision_score, recall_score, f1_score, accuracy_score\n",
    "from keras import optimizers\n",
    "import datetime\n",
    "from sklearn.metrics import recall_score,precision_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\gensim\\utils.py:862: UserWarning: detected Windows; aliasing chunkize to chunkize_serial\n",
      "  warnings.warn(\"detected Windows; aliasing chunkize to chunkize_serial\")\n"
     ]
    }
   ],
   "source": [
    "from gensim.models.keyedvectors import KeyedVectors\n",
    "w2v = KeyedVectors.load_word2vec_format('E:/code/tools/vector/GoogleNews-vectors-negative300.bin.gz', binary=True)\n",
    "extra_words = []\n",
    "def get_embedding_matrix(word_index,max_features):\n",
    "    embedding_matrix = np.random.random((max_features,300 ))\n",
    "    for word, i in word_index.items():\n",
    "        if word in w2v:\n",
    "            embedding_matrix[i] = w2v[word]\n",
    "        else:\n",
    "            extra_words.append(word)\n",
    "            embedding_matrix[i] = np.random.uniform(low=-0.01,high=0.01,size=(300,))\n",
    "    return embedding_matrix\n",
    "def get_train_data(train_df,test_df):\n",
    "    print(\"处理：==={}===\".format(test_df.TARGET.tolist()[0]))\n",
    "    texts = train_df.WORDS.tolist()+test_df.WORDS.tolist()\n",
    "    targets = test_df.TARGET.tolist()[0].split(' ')\n",
    "    tokenizer = Tokenizer(num_words=20000)\n",
    "    tokenizer.fit_on_texts(texts)\n",
    "    sequences = tokenizer.texts_to_sequences(texts)\n",
    "    word_index = tokenizer.word_index\n",
    "    for target in targets:\n",
    "        if target not in word_index.keys():\n",
    "            word_index[target] = len(word_index)+1    \n",
    "    print('Found %s unique tokens.' % len(word_index))\n",
    "    # tokenizer 从1算起\n",
    "    max_features = len(word_index)+1\n",
    "    time_steps = max([len(i) for i in sequences])\n",
    "    data = pad_sequences(sequences, maxlen=time_steps)\n",
    "    labels = to_categorical(np.asarray(train_df.LABEL.tolist()+test_df.LABEL.tolist()))\n",
    "    targets_index = [[word_index[i] for i in targets]]*len(texts)\n",
    "    targets_matrix = np.array(targets_index)\n",
    "    # target 词个数\n",
    "    target_nums = len(targets)\n",
    "    train_len = len(train_df)\n",
    "    embedding_matrix = get_embedding_matrix(word_index,max_features)\n",
    "    train_data = [data[:train_len],targets_matrix[:train_len],labels[:train_len]]\n",
    "    test_data = [data[train_len:],targets_matrix[train_len:],labels[train_len:]]\n",
    "    return train_data,test_data,time_steps,target_nums,embedding_matrix"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "处理：===The Chinese government anti terrorist in xinjiang===\n",
      "Found 3965 unique tokens.\n"
     ]
    }
   ],
   "source": [
    "xinjiang =df\n",
    "train_data,test_data,time_steps,target_nums,embedding_matrix = get_train_data(xinjiang[:800],xinjiang[800:])\n",
    "del w2v"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "lstm_config = {\n",
    "    'dropout_rate':0.2,\n",
    "    'lstm_output_size':300,\n",
    "    'embed_trainable':True,\n",
    "    'batch_size':50,\n",
    "    'optimizer':'adam',\n",
    "    'lr':0.001,\n",
    "    'epochs':20,\n",
    "    'n_stop':50,\n",
    "    'time_steps':time_steps\n",
    "}\n",
    "cnn_config={\n",
    "    'filters':250,\n",
    "    'kernel_sizes':[3,4,5],\n",
    "    'hidden_dims':250,\n",
    "    'max_length':time_steps,\n",
    "    'embed_trainable':True,\n",
    "    'batch_size':50,\n",
    "    'optimizer':'adam',\n",
    "    'lr':0.001,\n",
    "    'epochs':20,\n",
    "    'n_stop':20,\n",
    "    'dropout_rate':0.2}\n",
    "tan_config = {\n",
    "    'dropout_rate':0.2,\n",
    "    'lstm_output_size':100,\n",
    "    'embed_trainable':True,\n",
    "    'batch_size':100,\n",
    "    'optimizer':'adam',\n",
    "    'lr':0.001,\n",
    "    'epochs':20,\n",
    "    'n_stop':20,\n",
    "    'time_steps':time_steps,\n",
    "    'target_nums':target_nums\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class BaseModel:\n",
    "    def __init__(self, config):\n",
    "        self.config = config\n",
    "        self.model = None\n",
    "\n",
    "#     @abc.abstractmethod\n",
    "#     def build(self, **args):\n",
    "#         return\n",
    "\n",
    "    # 产生批数据     根据任务改变\n",
    "    def data_generator(self,train_x,train_y,batch_size):\n",
    "        idx = np.random.permutation(len(train_y))\n",
    "        shuffled_train_x,shuffled_train_y = train_x[idx],train_y[idx]\n",
    "        \n",
    "    #     while True:\n",
    "#         num_batches = int(np.ceil(len(labels) / float(batch_size))) 若除不开最后用\n",
    "        for i in range(int(len(train_y)//batch_size)):\n",
    "#         for i in range(num_batches)\n",
    "            yield shuffled_train_x[i * batch_size:(i + 1) * batch_size],shuffled_train_y[i * batch_size:(i + 1) * batch_size]\n",
    "\n",
    "        \n",
    "    #  训练一轮\n",
    "    def train(self,train_x,train_y):\n",
    "        loss,acc,total=0,0,0  \n",
    "        for batch_x,batch_y in self.data_generator(train_x,train_y,self.config['batch_size']):\n",
    "            history = self.model.train_on_batch(batch_x,batch_y)\n",
    "            num = len(batch_y)\n",
    "            loss,acc,total=loss+history[0]*num,acc+history[1]*num,total+num\n",
    "        loss,acc = loss/total,acc/total\n",
    "        return loss,acc\n",
    "    \n",
    "    \n",
    "    def predict(self,test_x):\n",
    "        y_pred = self.model.predict(test_x)\n",
    "        return y_pred\n",
    "\n",
    "#     根据任务改变\n",
    "    def evaluate(self,valid_x, valid_y):\n",
    "        model = self.model\n",
    "        v_pred = [i.argmax() for i in self.predict(valid_x)]\n",
    "        v_true = [i.argmax() for i in valid_y]\n",
    "        valid_score = self.f1_score(v_true,v_pred)\n",
    "        evaluate_list =self.model.evaluate(valid_x,valid_y,verbose=0) \n",
    "#         valid_acc = np.mean(np.equal(v_true,v_pred),axis=-1)\n",
    "        return evaluate_list[0],evaluate_list[1],valid_score\n",
    "\n",
    "    def fit(self, train_x, train_y, valid_x, valid_y,best_type='best_score',filename='best.model',valid=True):\n",
    "        early_stop = 0\n",
    "        best_loss = 1000\n",
    "        best_acc = 0\n",
    "        best_score = 0\n",
    "        n_stop = self.config['n_stop']\n",
    "        for i in range(1,self.config['epochs']):\n",
    "            early_stop += 1\n",
    "            val = self.train(train_x,train_y)\n",
    "            print('\\r',i,'train',val)\n",
    "            if valid:\n",
    "                print('testing...',end='')\n",
    "                val = self.evaluate(valid_x,valid_y)\n",
    "                if (val[0]<best_loss and best_type=='best_loss') or (val[2]>best_score and best_type=='best_score'):\n",
    "                    print(best_type)\n",
    "                    best_loss=val[0]\n",
    "                    best_epoch=i\n",
    "                    best_acc=val[1]\n",
    "                    best_score = val[2]\n",
    "                    self.model.save_weights(filename)\n",
    "                    early_stop=0\n",
    "                t=datetime.datetime.now().strftime('%H:%M:%S')\n",
    "                print('\\r',i,'test',t,'loss:%f, acc:%f,score:%f'%val)\n",
    "                print('-----------------------------------------')\n",
    "                if early_stop>n_stop:\n",
    "                    print('early stop')\n",
    "                    break\n",
    "        if valid:\n",
    "            print('best:',best_epoch,best_loss,best_acc,best_score)\n",
    "            self.model.load_weights(filename)\n",
    "        else:\n",
    "            best_epoch = n_epoch\n",
    "            best_loss = val[0]\n",
    "            best_acc = val[1]\n",
    "            self.model.save_weights(filename)\n",
    "        return best_epoch,best_loss,best_acc\n",
    "    \n",
    "  \n",
    "    def f1_score(self,y_true,y_pred):\n",
    "        pf,pa = precision_score(y_true, y_pred, average=None)[1:]\n",
    "        rf,ra = recall_score(y_true,y_pred,average=None)[1:]\n",
    "        ff = 2*pf*rf/(pf+rf)\n",
    "        fa = 2*pa*ra/(pa+ra)\n",
    "        fa = (ff+fa)/2\n",
    "        return fa\n",
    "\n",
    "\n",
    "#     def evaluation_metric_2f1(self, y_true, y_pred):\n",
    "#         return f1_score(self.config['classes'][y_true.argmax(1)], self.config['classes'][y_pred.argmax(1)], average='macro', labels=[self.config['classes'][0], self.config['classes'][-1]])\n",
    "\n",
    "#     def evaluation_metric_3accuracy(self, y_true, y_pred):\n",
    "#         return accuracy_score(self.config['classes'][y_true.argmax(1)], self.config['classes'][y_pred.argmax(1)])\n",
    "\n",
    "#     def evaluate(self, x_s_val, x_t_val, y_val):\n",
    "#         assert self.trained_flag is True\n",
    "#         y_val_pred = self._st_model.predict_on_batch([x_s_val, x_t_val])\n",
    "#         score = self.evaluation_metric_2f1(y_val, y_val_pred)\n",
    "#         return score, self.config['classes'][y_val.argmax(1)], self.config['classes'][y_val_pred.argmax(1)], y_val_pred\n",
    "\n",
    "#     def save_weights(self, filename):\n",
    "#         assert self.trained_flag is True\n",
    "#         self._st_model.save_weights(filename)\n",
    "\n",
    "#     def load_model(self, filename):\n",
    "#         assert self._st_model is not None\n",
    "#         self._st_model.load_weights(filename)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 99,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class CommonCnn(BaseModel):\n",
    "    def build(self,embedding_matrix=None):\n",
    "        sentence_input = Input(shape=(self.config['max_length'],),dtype='int32')\n",
    "        embed = Embedding(embedding_matrix.shape[0],\n",
    "                                    embedding_matrix.shape[1],\n",
    "                                    trainable=self.config['embed_trainable'],\n",
    "                                    weights=[embedding_matrix]\n",
    "                                )(sentence_input)\n",
    "        convs = []\n",
    "        for ksz in self.config['kernel_sizes']:\n",
    "            conv = Conv1D(self.config['filters'],\n",
    "                          ksz,\n",
    "                          activation='relu')(embed)\n",
    "            pooling = GlobalMaxPool1D()(conv)\n",
    "            convs.append(pooling)\n",
    "        merged = Merge(mode='concat',concat_axis=1)(convs)\n",
    "        dense = Dense(self.config['hidden_dims'],activation='relu')(merged)\n",
    "        out = Dropout(self.config['dropout_rate'])(dense)\n",
    "        predictions = Dense(3,\n",
    "                            kernel_regularizer=regularizers.l2(0.01),\n",
    "                            activation='softmax')(out)\n",
    "        model = Model(inputs=[sentence_input], outputs=predictions)\n",
    "        opt = optimizers.get(self.config['optimizer'])\n",
    "        K.set_value(opt.lr,self.config['lr'])\n",
    "        model.compile(optimizer=opt,\n",
    "                      loss='categorical_crossentropy',\n",
    "                      metrics=['accuracy'])\n",
    "        model.summary()\n",
    "        self.model = model\n",
    "def k_f1_score(y_true,y_pred):\n",
    "    sc = K.placeholder(shape=(1),dtype='float64')\n",
    "#     a = K.placeholder(shape=(y_true.shape[0],y_true.shape[1]),dtype='float64')\n",
    "#     b = K.placeholder(shape=(y_true.shape[0],y_true.shape[1]),dtype='float64')\n",
    "#     K.set_value(a,y_true)\n",
    "#     K.set_value(b,y_pred)\n",
    "    a = K.constant(y_true)\n",
    "    b = K.constant(y_pred)\n",
    "    K.set_value(sc,precision_score(K.get_value(K.argmax(a)),K.get_value(K.argmax(b)), average=None)[1])\n",
    "    return sc\n",
    "class CommonLstm(BaseModel):\n",
    "    def build(self,embedding_matrix=None):\n",
    "        sentence_input = Input(shape=(time_steps,),dtype='int32',name='sentence_input')\n",
    "        embed = Embedding(embedding_matrix.shape[0],\n",
    "                                    embedding_matrix.shape[1],\n",
    "                                    trainable=self.config['embed_trainable'],\n",
    "                                    weights=[embedding_matrix]\n",
    "                                   )(sentence_input)\n",
    "        lstm_out = Bidirectional(LSTM(self.config['lstm_output_size'],\n",
    "                        dropout=self.config['dropout_rate'],\n",
    "                        recurrent_dropout=self.config['dropout_rate']))(embed)\n",
    "        x = Dropout(self.config['dropout_rate'])(lstm_out)\n",
    "        predictions = Dense(3,\n",
    "                            kernel_regularizer=regularizers.l2(0.01),\n",
    "                            activation='softmax')(x)\n",
    "        model = Model(inputs=[sentence_input], outputs=predictions)\n",
    "        opt = optimizers.get(self.config['optimizer'])\n",
    "        K.set_value(opt.lr,self.config['lr'])\n",
    "        model.compile(optimizer=opt,\n",
    "                      loss='categorical_crossentropy',\n",
    "                      metrics=['accuracy'])\n",
    "        model.summary()\n",
    "        self.model = model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def softmask(x, mask,axis=-1):\n",
    "    '''softmax with mask, used in attention mechanism others\n",
    "    '''\n",
    "    y = K.exp(x)\n",
    "    if mask is not None:\n",
    "        y = y * tf.to_float(mask)\n",
    "    sumx = K.sum(y, axis=axis, keepdims=True) + 1e-6\n",
    "    x = y / sumx\n",
    "    return K.relu(x)\n",
    "class AttentionLayer(Layer):\n",
    "    def __init__(self,**kwargs):\n",
    "#         None 100 300     None 300\n",
    "        self.support_mask=True\n",
    "        super(AttentionLayer, self).__init__(**kwargs)\n",
    "        \n",
    "    def build(self,input_shape):\n",
    "#         self.W = K.random_uniform_variable((input_shape[0][2],input_shape[0][2]),0,1, dtype ='float32',name='{}_W'.format(self.name))\n",
    "        self.W = self.add_weight(name = 'kernel',\n",
    "                                     shape=(input_shape[0][2],input_shape[0][2]),\n",
    "                                     initializer=initializers.RandomUniform(minval=-0.1,maxval=0.1),trainable=True)\n",
    "#         self.bias = self.add_weight(name = 'bias',\n",
    "#                                      shape=(1,),\n",
    "#                                      initializer=initializers.RandomUniform(minval=-0.1,maxval=0.1),trainable=True)\n",
    "#         self.trainable_weights=[self.W]\n",
    "        super(AttentionLayer,self).build(input_shape)\n",
    "    \n",
    "    def call(self, x,mask=None):\n",
    "        h = x[0]  #none 100 300\n",
    "        hw = K.dot(h,self.W)\n",
    "        t = x[1]  #none 300\n",
    "        hwt = dot([hw,t],axes=[2,1])\n",
    "        tanh_result = K.tanh(hwt)\n",
    "#         tanh_result = K.tanh(K.bias_add(hwt,self.bias))\n",
    "        atten = softmask(tanh_result,mask[0])\n",
    "        return atten\n",
    "    \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        return (input_shape[0][0],input_shape[0][1])\n",
    "    \n",
    "    def compute_mask(self, x, mask=None):\n",
    "        if mask:\n",
    "            return mask[0]\n",
    "        else:\n",
    "            return None\n",
    "        \n",
    "        \n",
    "class TargetAttention(BaseModel):\n",
    "    def data_generator(self,train_x,train_y,batch_size):\n",
    "        idx = np.random.permutation(len(train_y))\n",
    "        shuffled_train_x,shuffled_train_x1,shuffled_train_y = train_x[0][idx],train_x[1][idx],train_y[idx]\n",
    "        \n",
    "    #     while True:\n",
    "#         num_batches = int(np.ceil(len(labels) / float(batch_size))) 若除不开最后用\n",
    "        for i in range(int(len(train_y)//batch_size)):\n",
    "#         for i in range(num_batches)\n",
    "            yield [shuffled_train_x[i * batch_size:(i + 1) * batch_size],shuffled_train_x1[i * batch_size:(i + 1) * batch_size]],shuffled_train_y[i * batch_size:(i + 1) * batch_size]\n",
    "    def build(self,embedding_matrix=None):\n",
    "        sentence_input = Input(shape=(self.config['time_steps'],),dtype='int32',name='sentence_input')\n",
    "        target_input = Input(shape=(self.config['target_nums'],),dtype='int32',name='target_input')\n",
    "        embedding_layer = Embedding(embedding_matrix.shape[0],\n",
    "                                    embedding_matrix.shape[1],\n",
    "                                    trainable=self.config['embed_trainable'],\n",
    "                                    weights=[embedding_matrix],\n",
    "                                    mask_zero=True\n",
    "                                   )\n",
    "        \n",
    "        left_x = embedding_layer(sentence_input)\n",
    "#         left_x = Dropout(0.2)(left_x)\n",
    "        target_x = embedding_layer(target_input)\n",
    "        target_x = ClearMaskLayer()(target_x)\n",
    "        target_weight = Dense(1,name='target_weights')(target_x)\n",
    "        target_weight = Reshape((target_nums,))(target_weight)\n",
    "        target_weight = Activation('softmax',name='target_softmax')(target_weight)\n",
    "        target_rep = dot([target_weight,target_x],axes=1)\n",
    "        target_rep = Dense(self.config['lstm_output_size']*2,activation='tanh')(target_rep)\n",
    "        target_rep = Reshape((self.config['lstm_output_size']*2,))(target_rep)\n",
    "        left_x = Bidirectional(LSTM(self.config['lstm_output_size'],dropout=0.2,recurrent_dropout=0.2,return_sequences=True))(left_x)  #140*300\n",
    "        att_c = AttentionLayer()([left_x,target_rep])\n",
    "        cr = dot([att_c,left_x],axes=1,name='attention_mul') #None*300\n",
    "        attention_mul = Dropout(0.2)(cr)\n",
    "        attention_mul = Dense(100,activation='tanh')(attention_mul)\n",
    "        predictions = Dense(3,\n",
    "#                             kernel_regularizer=regularizers.l2(0.01),\n",
    "                            activation='softmax')(attention_mul)\n",
    "        model = Model(inputs=[sentence_input,target_input], outputs=predictions)\n",
    "        opt = optimizers.get(self.config['optimizer'])\n",
    "        K.set_value(opt.lr,self.config['lr'])\n",
    "        model.compile(optimizer=opt,\n",
    "                      loss='categorical_crossentropy',\n",
    "                      metrics=['accuracy'])\n",
    "        model.summary()\n",
    "        self.model = model\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class ClearMaskLayer(Layer):\n",
    "    '''after using a layer that supports masking in keras,\n",
    "    you can use this layer to remove the mask before softmax layer\n",
    "    '''\n",
    "    def __init__(self, **kwargs):\n",
    "        self.supports_masking = True\n",
    "        super(ClearMaskLayer, self).__init__(**kwargs)\n",
    "    \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        return input_shape\n",
    "    \n",
    "    def compute_mask(self, x, mask=None):\n",
    "        return None\n",
    "class TargetAttention2(BaseModel):\n",
    "    def data_generator(self,train_x,train_y,batch_size):\n",
    "        idx = np.random.permutation(len(train_y))\n",
    "        shuffled_train_x,shuffled_train_x1,shuffled_train_y = train_x[0][idx],train_x[1][idx],train_y[idx]\n",
    "        \n",
    "    #     while True:\n",
    "#         num_batches = int(np.ceil(len(labels) / float(batch_size))) 若除不开最后用\n",
    "        for i in range(int(len(train_y)//batch_size)):\n",
    "#         for i in range(num_batches)\n",
    "            yield [shuffled_train_x[i * batch_size:(i + 1) * batch_size],shuffled_train_x1[i * batch_size:(i + 1) * batch_size]],shuffled_train_y[i * batch_size:(i + 1) * batch_size]\n",
    "    def build(self,embedding_matrix=None):\n",
    "        sentence_input = Input(shape=(self.config['time_steps'],),dtype='int32',name='sentence_input')\n",
    "        target_input = Input(shape=(self.config['target_nums'],),dtype='int32',name='target_input')\n",
    "        embedding_layer = Embedding(embedding_matrix.shape[0],\n",
    "                                    embedding_matrix.shape[1],\n",
    "                                    trainable=self.config['embed_trainable'],\n",
    "                                    weights=[embedding_matrix],\n",
    "                                    mask_zero=True\n",
    "                                   )\n",
    "        \n",
    "        left_x = embedding_layer(sentence_input)\n",
    "#         left_x = Dropout(self.config['dropout_rate'])(left_x)\n",
    "        target_x = embedding_layer(target_input)\n",
    "        target_x = ClearMaskLayer()(target_x)\n",
    "        target_weight = Dense(1,name='target_weights')(target_x)\n",
    "        target_weight = Reshape((self.config['target_nums'],))(target_weight)\n",
    "        target_weight = Activation('softmax',name='target_softmax')(target_weight)\n",
    "        target_rep = dot([target_weight,target_x],axes=1)\n",
    "        target_rep = Dense(300,activation='tanh')(target_rep)\n",
    "        target_rep = Reshape((300,))(target_rep)\n",
    "        att_c = AttentionLayer(name='att')([left_x,target_rep])\n",
    "        left_x = Bidirectional(LSTM(self.config['lstm_output_size'],dropout=0.2,recurrent_dropout=0.2,return_sequences=True))(left_x)  #140*300\n",
    "        cr = dot([att_c,left_x],axes=1,name='attention_mul') #None*300\n",
    "        attention_mul = Dropout(self.config['dropout_rate'])(cr)\n",
    "        attention_mul = Dense(100,\n",
    "                              activation='tanh',\n",
    "                              kernel_regularizer=regularizers.l2(0.01)\n",
    "                             )(attention_mul)\n",
    "#         attention_mul = Dropout(self.config['dropout_rate'])(attention_mul)\n",
    "        predictions = Dense(3,\n",
    "#                             kernel_regularizer=regularizers.l2(0.01),\n",
    "                            activation='softmax')(attention_mul)\n",
    "        model = Model(inputs=[sentence_input,target_input], outputs=predictions)\n",
    "        opt = optimizers.get(self.config['optimizer'])\n",
    "        K.set_value(opt.lr,self.config['lr'])\n",
    "        model.compile(optimizer=opt,\n",
    "                      loss='categorical_crossentropy',\n",
    "                      metrics=['accuracy'])\n",
    "#         model.summary()\n",
    "        self.model = model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_x,train_y = train_data[0][:600],train_data[2][:600]\n",
    "valid_x,valid_y = train_data[0][600:],train_data[2][600:]\n",
    "test_x,test_y = test_data[0],test_data[2]\n",
    "train_x_t,train_y_t = [train_data[0][:600],train_data[1][:600]],train_data[2][:600]\n",
    "valid_x_t,valid_y_t = [train_data[0][600:],train_data[1][600:]],train_data[2][600:]\n",
    "test_x_t,test_y_t = [test_data[0],test_data[1]],test_data[2]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### CNN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 174,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-174-c943ab663440>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[0mcnn1\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mCommonCnn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mcnn_config\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[0mcnn1\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mbuild\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0membedding_matrix\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      3\u001b[0m \u001b[0mretrain\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m'1'\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      4\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mretrain\u001b[0m \u001b[1;33m==\u001b[0m\u001b[1;34m'1'\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      5\u001b[0m     \u001b[0mcnn1\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mtrain_x\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mtrain_y\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mvalid_x\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mvalid_y\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32m<ipython-input-47-11db5fa336ad>\u001b[0m in \u001b[0;36mbuild\u001b[1;34m(self, embedding_matrix)\u001b[0m\n\u001b[0;32m      6\u001b[0m                                     \u001b[0mtrainable\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'embed_trainable'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      7\u001b[0m                                     \u001b[0mweights\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0membedding_matrix\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 8\u001b[1;33m                                 )(sentence_input)\n\u001b[0m\u001b[0;32m      9\u001b[0m         \u001b[0mconvs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     10\u001b[0m         \u001b[1;32mfor\u001b[0m \u001b[0mksz\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mconfig\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'kernel_sizes'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\keras\\engine\\topology.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs, **kwargs)\u001b[0m\n\u001b[0;32m    595\u001b[0m                 \u001b[1;31m# Load weights that were specified at layer instantiation.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    596\u001b[0m                 \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_initial_weights\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 597\u001b[1;33m                     \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mset_weights\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_initial_weights\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m    598\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    599\u001b[0m             \u001b[1;31m# Raise exceptions in case the input is not compatible\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\keras\\engine\\topology.py\u001b[0m in \u001b[0;36mset_weights\u001b[1;34m(self, weights)\u001b[0m\n\u001b[0;32m   1209\u001b[0m             \u001b[1;32mreturn\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1210\u001b[0m         \u001b[0mweight_value_tuples\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1211\u001b[1;33m         \u001b[0mparam_values\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mK\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mbatch_get_value\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mparams\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   1212\u001b[0m         \u001b[1;32mfor\u001b[0m \u001b[0mpv\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mp\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mw\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mparam_values\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mparams\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mweights\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1213\u001b[0m             \u001b[1;32mif\u001b[0m \u001b[0mpv\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshape\u001b[0m \u001b[1;33m!=\u001b[0m \u001b[0mw\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36mbatch_get_value\u001b[1;34m(ops)\u001b[0m\n\u001b[0;32m   2318\u001b[0m     \"\"\"\n\u001b[0;32m   2319\u001b[0m     \u001b[1;32mif\u001b[0m \u001b[0mops\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2320\u001b[1;33m         \u001b[1;32mreturn\u001b[0m \u001b[0mget_session\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mops\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   2321\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   2322\u001b[0m         \u001b[1;32mreturn\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36mget_session\u001b[1;34m()\u001b[0m\n\u001b[0;32m    187\u001b[0m                 \u001b[1;31m# not already marked as initialized.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    188\u001b[0m                 is_initialized = session.run(\n\u001b[1;32m--> 189\u001b[1;33m                     [tf.is_variable_initialized(v) for v in candidate_vars])\n\u001b[0m\u001b[0;32m    190\u001b[0m                 \u001b[0muninitialized_vars\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    191\u001b[0m                 \u001b[1;32mfor\u001b[0m \u001b[0mflag\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mv\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mis_initialized\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcandidate_vars\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36mrun\u001b[1;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m    893\u001b[0m     \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    894\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[1;32m--> 895\u001b[1;33m                          run_metadata_ptr)\n\u001b[0m\u001b[0;32m    896\u001b[0m       \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m    897\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run\u001b[1;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m   1122\u001b[0m     \u001b[1;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[1;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[1;32mor\u001b[0m \u001b[1;33m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1123\u001b[0m       results = self._do_run(handle, final_targets, final_fetches,\n\u001b[1;32m-> 1124\u001b[1;33m                              feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[0;32m   1125\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1126\u001b[0m       \u001b[0mresults\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_run\u001b[1;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[0;32m   1319\u001b[0m     \u001b[1;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1320\u001b[0m       return self._do_call(_run_fn, self._session, feeds, fetches, targets,\n\u001b[1;32m-> 1321\u001b[1;33m                            options, run_metadata)\n\u001b[0m\u001b[0;32m   1322\u001b[0m     \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1323\u001b[0m       \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_do_call\u001b[1;34m(self, fn, *args)\u001b[0m\n\u001b[0;32m   1325\u001b[0m   \u001b[1;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1326\u001b[0m     \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1327\u001b[1;33m       \u001b[1;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   1328\u001b[0m     \u001b[1;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1329\u001b[0m       \u001b[0mmessage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0me\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[1;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[0;32m   1295\u001b[0m                 run_metadata):\n\u001b[0;32m   1296\u001b[0m       \u001b[1;31m# Ensure any changes to the graph are reflected in the runtime.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1297\u001b[1;33m       \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_extend_graph\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m   1298\u001b[0m       \u001b[1;32mwith\u001b[0m \u001b[0merrors\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mraise_exception_on_not_ok_status\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mstatus\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1299\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_created_with_new_api\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;32mD:\\program\\Lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m_extend_graph\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m   1356\u001b[0m         \u001b[1;32mwith\u001b[0m \u001b[0merrors\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mraise_exception_on_not_ok_status\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mstatus\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1357\u001b[0m           tf_session.TF_ExtendGraph(\n\u001b[1;32m-> 1358\u001b[1;33m               self._session, graph_def.SerializeToString(), status)\n\u001b[0m\u001b[0;32m   1359\u001b[0m         \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_opened\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1360\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "cnn1 = CommonCnn(cnn_config)\n",
    "cnn1.build(embedding_matrix)\n",
    "retrain = '1'\n",
    "if retrain =='1':\n",
    "    cnn1.fit(train_x,train_y,valid_x,valid_y)\n",
    "cnn1.model.load_weights('best.model')\n",
    "cnn1.evaluate(test_x,test_y)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### LSTM"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 145,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "sentence_input (InputLayer)  (None, 78)                0         \n",
      "_________________________________________________________________\n",
      "embedding_54 (Embedding)     (None, 78, 300)           1189800   \n",
      "_________________________________________________________________\n",
      "bidirectional_3 (Bidirection (None, 600)               1442400   \n",
      "_________________________________________________________________\n",
      "dropout_65 (Dropout)         (None, 600)               0         \n",
      "_________________________________________________________________\n",
      "dense_148 (Dense)            (None, 3)                 1803      \n",
      "=================================================================\n",
      "Total params: 2,634,003\n",
      "Trainable params: 2,634,003\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      " 1 train (1.2129882474740346, 0.42666666209697723)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 16:44:56 loss:1.161713, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.076807975769043, 0.5100000003973643)\n",
      "testing...best_score\n",
      " 2 test 16:46:09 loss:1.092652, acc:0.475000,score:0.388502\n",
      "-----------------------------------------\n",
      " 3 train (0.9855988522370657, 0.5316666712363561)\n",
      "testing...best_score\n",
      " 3 test 16:47:27 loss:1.053271, acc:0.510000,score:0.512920\n",
      "-----------------------------------------\n",
      " 4 train (0.9616815944512686, 0.623333344856898)\n",
      " 4 test 16:48:41 loss:1.045954, acc:0.485000,score:0.436540\n",
      "-----------------------------------------\n",
      " 5 train (0.8303546210130056, 0.666666661699613)\n",
      " 5 test 16:49:55 loss:1.012902, acc:0.500000,score:0.475094\n",
      "-----------------------------------------\n",
      " 6 train (0.6232188120484352, 0.8166666676600774)\n",
      "testing...best_score\n",
      " 6 test 16:51:10 loss:0.977067, acc:0.560000,score:0.558483\n",
      "-----------------------------------------\n",
      " 7 train (0.3901236479481061, 0.8916666607062022)\n",
      " 7 test 16:52:26 loss:1.147305, acc:0.570000,score:0.551443\n",
      "-----------------------------------------\n",
      " 8 train (0.28861212606231373, 0.9316666622956594)\n",
      "testing...best_score\n",
      " 8 test 16:53:48 loss:1.065443, acc:0.550000,score:0.563306\n",
      "-----------------------------------------\n",
      " 9 train (0.2980579286813736, 0.9283333271741867)\n",
      "testing...best_score\n",
      " 9 test 16:55:12 loss:1.129408, acc:0.575000,score:0.591760\n",
      "-----------------------------------------\n",
      " 10 train (0.20804573595523834, 0.9500000029802322)\n",
      " 10 test 16:56:36 loss:1.110552, acc:0.565000,score:0.553835\n",
      "-----------------------------------------\n",
      " 11 train (0.15424562245607376, 0.9833333293596903)\n",
      "testing...best_score\n",
      " 11 test 16:58:02 loss:1.291750, acc:0.570000,score:0.604971\n",
      "-----------------------------------------\n",
      " 12 train (0.15065131026009718, 0.9700000037749609)\n",
      "testing...best_score\n",
      " 12 test 16:59:17 loss:1.207450, acc:0.575000,score:0.619810\n",
      "-----------------------------------------\n",
      " 13 train (0.14099500079949698, 0.9783333291610082)\n",
      " 13 test 17:00:32 loss:1.229809, acc:0.565000,score:0.565424\n",
      "-----------------------------------------\n",
      " 14 train (0.10126023304959138, 0.9866666744152705)\n",
      " 14 test 17:01:47 loss:1.395190, acc:0.590000,score:0.612440\n",
      "-----------------------------------------\n",
      " 15 train (0.08765050893028577, 0.9916666646798452)\n",
      " 15 test 17:03:02 loss:1.481452, acc:0.565000,score:0.578845\n",
      "-----------------------------------------\n",
      " 16 train (0.07534362406780322, 0.9950000047683716)\n",
      " 16 test 17:04:19 loss:1.389191, acc:0.570000,score:0.598077\n",
      "-----------------------------------------\n",
      " 17 train (0.08057384472340345, 0.9933333396911621)\n",
      " 17 test 17:05:41 loss:1.584932, acc:0.570000,score:0.579966\n",
      "-----------------------------------------\n",
      " 18 train (0.08447827336688836, 0.9899999996026357)\n",
      "testing...best_score\n",
      " 18 test 17:06:59 loss:1.410589, acc:0.590000,score:0.640312\n",
      "-----------------------------------------\n",
      " 19 train (0.11519599488625924, 0.9816666692495346)\n",
      " 19 test 17:08:13 loss:1.365928, acc:0.575000,score:0.601150\n",
      "-----------------------------------------\n",
      "best: 18 1.4105891466140748 0.59 0.6403116499930848\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(1.2751790761947632, 0.595, 0.6069597069597069)"
      ]
     },
     "execution_count": 145,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "lstm1 = CommonLstm(lstm_config)\n",
    "lstm1.build(embedding_matrix)\n",
    "retrain = '1'\n",
    "if retrain =='1':\n",
    "    lstm1.fit(train_x,train_y,valid_x,valid_y)\n",
    "lstm1.model.load_weights('best.model')\n",
    "lstm1.evaluate(test_x,test_y)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### ATTENTION"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\r",
      " 1 train (2.31344465414683, 0.49333332975705463)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 21:47:24 loss:2.236032, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.1067353089650473, 0.5000000049670538)\n",
      "testing...best_score\n",
      " 2 test 21:47:35 loss:2.066532, acc:0.470000,score:0.354977\n",
      "-----------------------------------------\n",
      " 3 train (1.926886757214864, 0.5450000067551931)\n",
      "testing...best_score\n",
      " 3 test 21:47:46 loss:1.905755, acc:0.495000,score:0.443497\n",
      "-----------------------------------------\n",
      " 4 train (1.7196462949117024, 0.6316666702429453)\n",
      "testing...best_score\n",
      " 4 test 21:47:56 loss:1.762960, acc:0.565000,score:0.592761\n",
      "-----------------------------------------\n",
      " 5 train (1.4857972065607707, 0.69500000278155)\n",
      "testing...best_score\n",
      " 5 test 21:48:06 loss:1.669751, acc:0.560000,score:0.611037\n",
      "-----------------------------------------\n",
      " 6 train (1.2511868874231975, 0.7849999864896139)\n",
      "testing...best_score\n",
      " 6 test 21:48:17 loss:1.568690, acc:0.620000,score:0.643233\n",
      "-----------------------------------------\n",
      " 7 train (1.025299847126007, 0.8550000091393789)\n",
      " 7 test 21:48:27 loss:1.480338, acc:0.610000,score:0.621511\n",
      "-----------------------------------------\n",
      " 8 train (0.8174331386884054, 0.9166666666666666)\n",
      " 8 test 21:48:37 loss:1.523665, acc:0.590000,score:0.584510\n",
      "-----------------------------------------\n",
      " 9 train (0.6537550389766693, 0.9583333333333334)\n",
      " 9 test 21:48:48 loss:1.520181, acc:0.610000,score:0.626387\n",
      "-----------------------------------------\n",
      " 10 train (0.571053147315979, 0.9650000035762787)\n",
      "testing...best_score\n",
      " 10 test 21:48:58 loss:1.630100, acc:0.645000,score:0.675135\n",
      "-----------------------------------------\n",
      " 11 train (0.4559576213359833, 0.9833333392937978)\n",
      " 11 test 21:49:08 loss:1.541911, acc:0.625000,score:0.647465\n",
      "-----------------------------------------\n",
      " 12 train (0.39750629166762036, 0.9850000143051147)\n",
      " 12 test 21:49:19 loss:1.565828, acc:0.635000,score:0.652373\n",
      "-----------------------------------------\n",
      " 13 train (0.34318283200263977, 0.9883333444595337)\n",
      " 13 test 21:49:29 loss:1.644253, acc:0.595000,score:0.627370\n",
      "-----------------------------------------\n",
      " 14 train (0.30962926149368286, 0.9816666841506958)\n",
      " 14 test 21:49:39 loss:1.477809, acc:0.615000,score:0.631739\n",
      "-----------------------------------------\n",
      " 15 train (0.2694306547443072, 0.9883333444595337)\n",
      " 15 test 21:49:50 loss:1.473277, acc:0.615000,score:0.629812\n",
      "-----------------------------------------\n",
      " 16 train (0.23871996998786926, 0.9850000143051147)\n",
      " 16 test 21:50:00 loss:1.383939, acc:0.595000,score:0.623603\n",
      "-----------------------------------------\n",
      " 17 train (0.20178581774234772, 0.9900000095367432)\n",
      " 17 test 21:50:11 loss:1.413184, acc:0.620000,score:0.642157\n",
      "-----------------------------------------\n",
      " 18 train (0.16807646304368973, 0.996666669845581)\n",
      " 18 test 21:50:21 loss:1.425215, acc:0.635000,score:0.662051\n",
      "-----------------------------------------\n",
      " 19 train (0.15025161455074945, 0.9950000047683716)\n",
      " 19 test 21:50:32 loss:1.425989, acc:0.630000,score:0.664306\n",
      "-----------------------------------------\n",
      "best: 10 1.6301004886627197 0.645 0.6751349815288434\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(1.6113621020317077, 0.625, 0.6552377459968219)"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tat = TargetAttention2(tan_config)\n",
    "tat.build(embedding_matrix)\n",
    "retrain = '1'\n",
    "if retrain =='1':\n",
    "    tat.fit(train_x_t,train_y_t,valid_x_t,valid_y_t)\n",
    "tat.model.load_weights('best.model')\n",
    "tat.evaluate(test_x_t,test_y_t)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 五折交叉"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "all_data0 =  np.concatenate([train_data[0],test_data[0]])\n",
    "all_data1 =  np.concatenate([train_data[1],test_data[1]])\n",
    "all_data2 =  np.concatenate([train_data[2],test_data[2]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pickle"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with open('data/processed_data.pkl','wb') as f:\n",
    "    pickle.dump([all_data0,all_data1,all_data2,embedding_matrix],f)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with open('data/processed_data.pkl', 'rb')as f:\n",
    "    data = pickle.load(f)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "ename": "TypeError",
     "evalue": "'int' object is not iterable",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-26-ff6822c9f6b7>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[0ma\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mc\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;36m1\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[1;31mTypeError\u001b[0m: 'int' object is not iterable"
     ]
    }
   ],
   "source": [
    "a,b,c = 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 119,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "sentence_input (InputLayer)     (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "target_input (InputLayer)       (None, 7)            0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_36 (Embedding)        multiple             1189800     sentence_input[0][0]             \n",
      "                                                                 target_input[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "target_weights (Dense)          (None, 7, 1)         301         embedding_36[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dot_49 (Dot)                    (None, 1, 300)       0           target_weights[0][0]             \n",
      "                                                                 embedding_36[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_88 (Dense)                (None, 1, 200)       60200       dot_49[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "bidirectional_25 (Bidirectional (None, 78, 200)      320800      embedding_36[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "reshape_49 (Reshape)            (None, 200)          0           dense_88[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "attention_layer_1 (AttentionLay (None, 78)           40000       bidirectional_25[0][0]           \n",
      "                                                                 reshape_49[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "attention_mul (Dot)             (None, 200)          0           attention_layer_1[0][0]          \n",
      "                                                                 bidirectional_25[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "dropout_35 (Dropout)            (None, 200)          0           attention_mul[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_89 (Dense)                (None, 100)          20100       dropout_35[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_90 (Dense)                (None, 3)            303         dense_89[0][0]                   \n",
      "==================================================================================================\n",
      "Total params: 1,631,504\n",
      "Trainable params: 1,631,504\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.0529426676886422, 0.4528571494988033)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 13:38:36 loss:1.029911, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (0.9872049263545445, 0.492857141154153)\n",
      "testing...best_score\n",
      " 2 test 13:39:15 loss:0.976906, acc:0.532338,score:0.494328\n",
      "-----------------------------------------\n",
      " 3 train (0.9007361531257629, 0.5842857105391366)\n",
      "testing...best_score\n",
      " 3 test 13:39:49 loss:0.906026, acc:0.601990,score:0.648750\n",
      "-----------------------------------------\n",
      " 4 train (0.7596263885498047, 0.6871428574834552)\n",
      "testing...best_score\n",
      " 4 test 13:40:23 loss:0.851925, acc:0.631841,score:0.668600\n",
      "-----------------------------------------\n",
      " 5 train (0.5808092483452388, 0.7828571370669773)\n",
      " 5 test 13:40:57 loss:0.979607, acc:0.611940,score:0.624761\n",
      "-----------------------------------------\n",
      " 6 train (0.4131169319152832, 0.8342857105391366)\n",
      " 6 test 13:41:31 loss:0.991189, acc:0.611940,score:0.652105\n",
      "-----------------------------------------\n",
      " 7 train (0.30266691957201275, 0.9028571503502982)\n",
      " 7 test 13:42:05 loss:1.054389, acc:0.611940,score:0.631172\n",
      "-----------------------------------------\n",
      " 8 train (0.1995734636272703, 0.9357142703873771)\n",
      " 8 test 13:42:38 loss:1.378658, acc:0.606965,score:0.593601\n",
      "-----------------------------------------\n",
      " 9 train (0.1317270951611655, 0.9614285741533551)\n",
      " 9 test 13:43:12 loss:1.374130, acc:0.592040,score:0.614756\n",
      "-----------------------------------------\n",
      " 10 train (0.1038001679948398, 0.9700000030653817)\n",
      " 10 test 13:43:46 loss:1.565551, acc:0.611940,score:0.604380\n",
      "-----------------------------------------\n",
      " 11 train (0.07088401381458555, 0.9757142918450492)\n",
      " 11 test 13:44:20 loss:1.607440, acc:0.601990,score:0.611100\n",
      "-----------------------------------------\n",
      " 12 train (0.060584320819803646, 0.9857142993382045)\n",
      " 12 test 13:44:54 loss:1.675395, acc:0.597015,score:0.607165\n",
      "-----------------------------------------\n",
      " 13 train (0.05169382105980601, 0.9828571506908962)\n",
      " 13 test 13:45:28 loss:1.620917, acc:0.597015,score:0.597885\n",
      "-----------------------------------------\n",
      " 14 train (0.033059071084218364, 0.9900000095367432)\n",
      " 14 test 13:46:02 loss:1.653418, acc:0.606965,score:0.614107\n",
      "-----------------------------------------\n",
      " 15 train (0.03509106992610863, 0.9871428694043841)\n",
      " 15 test 13:46:36 loss:1.652292, acc:0.587065,score:0.599186\n",
      "-----------------------------------------\n",
      " 16 train (0.024555410125425885, 0.9942857197352818)\n",
      " 16 test 13:47:11 loss:1.918177, acc:0.597015,score:0.600731\n",
      "-----------------------------------------\n",
      " 17 train (0.0514068358710834, 0.984285729272025)\n",
      " 17 test 13:47:45 loss:1.854384, acc:0.597015,score:0.605556\n",
      "-----------------------------------------\n",
      " 18 train (0.03582942299544811, 0.984285729272025)\n",
      " 18 test 13:48:19 loss:1.800847, acc:0.601990,score:0.593995\n",
      "-----------------------------------------\n",
      " 19 train (0.0356750666563, 0.9900000010217939)\n",
      " 19 test 13:48:54 loss:1.899262, acc:0.601990,score:0.624745\n",
      "-----------------------------------------\n",
      "best: 4 0.8519250542963323 0.6318407972060626 0.6685996191795048\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "sentence_input (InputLayer)     (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "target_input (InputLayer)       (None, 7)            0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_37 (Embedding)        multiple             1189800     sentence_input[0][0]             \n",
      "                                                                 target_input[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "target_weights (Dense)          (None, 7, 1)         301         embedding_37[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dot_51 (Dot)                    (None, 1, 300)       0           target_weights[0][0]             \n",
      "                                                                 embedding_37[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_91 (Dense)                (None, 1, 200)       60200       dot_51[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "bidirectional_26 (Bidirectional (None, 78, 200)      320800      embedding_37[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "reshape_50 (Reshape)            (None, 200)          0           dense_91[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "attention_layer_2 (AttentionLay (None, 78)           40000       bidirectional_26[0][0]           \n",
      "                                                                 reshape_50[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "attention_mul (Dot)             (None, 200)          0           attention_layer_2[0][0]          \n",
      "                                                                 bidirectional_26[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "dropout_36 (Dropout)            (None, 200)          0           attention_mul[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_92 (Dense)                (None, 100)          20100       dropout_36[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_93 (Dense)                (None, 3)            303         dense_92[0][0]                   \n",
      "==================================================================================================\n",
      "Total params: 1,631,504\n",
      "Trainable params: 1,631,504\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.040629037788936, 0.4914285625730242)\n",
      " 1 test 13:50:05 loss:1.019821, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (0.9791253464562553, 0.5014285700661796)\n",
      "testing...best_score\n",
      " 2 test 13:50:43 loss:0.956864, acc:0.502488,score:0.369916\n",
      "-----------------------------------------\n",
      " 3 train (0.8755364418029785, 0.6171428561210632)\n",
      "testing...best_score\n",
      " 3 test 13:51:19 loss:0.869417, acc:0.601990,score:0.611090\n",
      "-----------------------------------------\n",
      " 4 train (0.7167767712048122, 0.7142857142857143)\n",
      "testing...best_score\n",
      " 4 test 13:51:56 loss:0.805711, acc:0.656716,score:0.690476\n",
      "-----------------------------------------\n",
      " 5 train (0.5460904581206185, 0.7799999969346183)\n",
      " 5 test 13:52:32 loss:0.958603, acc:0.611940,score:0.626146\n",
      "-----------------------------------------\n",
      " 6 train (0.4084563766207014, 0.8485714197158813)\n",
      " 6 test 13:53:10 loss:0.960508, acc:0.606965,score:0.617874\n",
      "-----------------------------------------\n",
      " 7 train (0.3081412208931787, 0.8914285727909633)\n",
      " 7 test 13:53:49 loss:1.012318, acc:0.621891,score:0.637822\n",
      "-----------------------------------------\n",
      " 8 train (0.20352167316845485, 0.9357142703873771)\n",
      " 8 test 13:54:28 loss:1.102271, acc:0.641791,score:0.666963\n",
      "-----------------------------------------\n",
      " 9 train (0.145622441811221, 0.9528571452413287)\n",
      " 9 test 13:55:06 loss:1.462267, acc:0.601990,score:0.601499\n",
      "-----------------------------------------\n",
      " 10 train (0.1235480893935476, 0.9614285826683044)\n",
      "testing...best_score\n",
      " 10 test 13:55:44 loss:1.303786, acc:0.656716,score:0.700479\n",
      "-----------------------------------------\n",
      " 11 train (0.08812762291303702, 0.9685714329992022)\n",
      " 11 test 13:56:23 loss:1.475156, acc:0.616915,score:0.639664\n",
      "-----------------------------------------\n",
      " 12 train (0.09604652971029282, 0.9657143013817924)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 12 test 13:57:01 loss:1.382998, acc:0.621891,score:0.646387\n",
      "-----------------------------------------\n",
      " 13 train (0.07922320600066866, 0.9742857217788696)\n",
      " 13 test 13:57:39 loss:1.435336, acc:0.606965,score:0.599343\n",
      "-----------------------------------------\n",
      " 14 train (0.05242646565394742, 0.9900000095367432)\n",
      " 14 test 13:58:17 loss:1.361859, acc:0.651741,score:0.691267\n",
      "-----------------------------------------\n",
      " 15 train (0.03639431404215949, 0.9928571496691022)\n",
      " 15 test 13:58:54 loss:1.482566, acc:0.636816,score:0.646739\n",
      "-----------------------------------------\n",
      " 16 train (0.043609198715005605, 0.984285729272025)\n",
      " 16 test 13:59:32 loss:1.483553, acc:0.631841,score:0.636364\n",
      "-----------------------------------------\n",
      " 17 train (0.04040794021316937, 0.9914285796029227)\n",
      " 17 test 14:00:13 loss:1.425597, acc:0.626866,score:0.657287\n",
      "-----------------------------------------\n",
      " 18 train (0.022100900898554494, 0.9914285796029227)\n",
      " 18 test 14:00:54 loss:1.496931, acc:0.611940,score:0.650126\n",
      "-----------------------------------------\n",
      " 19 train (0.019409369810351303, 0.9942857197352818)\n",
      " 19 test 14:01:35 loss:1.628315, acc:0.641791,score:0.658801\n",
      "-----------------------------------------\n",
      "best: 10 1.3037861401761943 0.6567164179845829 0.7004785684560966\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "sentence_input (InputLayer)     (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "target_input (InputLayer)       (None, 7)            0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_38 (Embedding)        multiple             1189800     sentence_input[0][0]             \n",
      "                                                                 target_input[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "target_weights (Dense)          (None, 7, 1)         301         embedding_38[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dot_53 (Dot)                    (None, 1, 300)       0           target_weights[0][0]             \n",
      "                                                                 embedding_38[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_94 (Dense)                (None, 1, 200)       60200       dot_53[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "bidirectional_27 (Bidirectional (None, 78, 200)      320800      embedding_38[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "reshape_51 (Reshape)            (None, 200)          0           dense_94[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "attention_layer_3 (AttentionLay (None, 78)           40000       bidirectional_27[0][0]           \n",
      "                                                                 reshape_51[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "attention_mul (Dot)             (None, 200)          0           attention_layer_3[0][0]          \n",
      "                                                                 bidirectional_27[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "dropout_37 (Dropout)            (None, 200)          0           attention_mul[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_95 (Dense)                (None, 100)          20100       dropout_37[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_96 (Dense)                (None, 3)            303         dense_95[0][0]                   \n",
      "==================================================================================================\n",
      "Total params: 1,631,504\n",
      "Trainable params: 1,631,504\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.0545256361365318, 0.45124999061226845)\n",
      " 1 test 14:02:51 loss:1.022831, acc:0.485000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (0.9835817664861679, 0.5037499964237213)\n",
      "testing...best_score\n",
      " 2 test 14:03:37 loss:0.956653, acc:0.570000,score:0.586476\n",
      "-----------------------------------------\n",
      " 3 train (0.8735597208142281, 0.5999999865889549)\n",
      "testing...best_score\n",
      " 3 test 14:04:23 loss:0.871294, acc:0.565000,score:0.598989\n",
      "-----------------------------------------\n",
      " 4 train (0.6949775367975235, 0.7012499943375587)\n",
      "testing...best_score\n",
      " 4 test 14:05:08 loss:0.868639, acc:0.610000,score:0.635444\n",
      "-----------------------------------------\n",
      " 5 train (0.5020143203437328, 0.8150000050663948)\n",
      " 5 test 14:05:52 loss:0.995915, acc:0.595000,score:0.615504\n",
      "-----------------------------------------\n",
      " 6 train (0.34397298470139503, 0.8812500014901161)\n",
      "testing...best_score\n",
      " 6 test 14:06:36 loss:1.003419, acc:0.630000,score:0.660293\n",
      "-----------------------------------------\n",
      " 7 train (0.2068934291601181, 0.9350000023841858)\n",
      " 7 test 14:07:22 loss:1.020523, acc:0.620000,score:0.638402\n",
      "-----------------------------------------\n",
      " 8 train (0.1590042794123292, 0.9487500041723251)\n",
      " 8 test 14:08:08 loss:1.075113, acc:0.615000,score:0.645652\n",
      "-----------------------------------------\n",
      " 9 train (0.09315107949078083, 0.9800000041723251)\n",
      " 9 test 14:08:52 loss:1.218154, acc:0.640000,score:0.655840\n",
      "-----------------------------------------\n",
      " 10 train (0.07099719904363155, 0.9787500128149986)\n",
      " 10 test 14:09:38 loss:1.444950, acc:0.640000,score:0.660160\n",
      "-----------------------------------------\n",
      " 11 train (0.04824395733885467, 0.9900000095367432)\n",
      "testing...best_score\n",
      " 11 test 14:10:24 loss:1.381905, acc:0.640000,score:0.678058\n",
      "-----------------------------------------\n",
      " 12 train (0.05021469411440194, 0.9837500154972076)\n",
      " 12 test 14:11:09 loss:1.399962, acc:0.640000,score:0.636905\n",
      "-----------------------------------------\n",
      " 13 train (0.059033720288425684, 0.9787500128149986)\n",
      " 13 test 14:11:57 loss:1.534834, acc:0.605000,score:0.633278\n",
      "-----------------------------------------\n",
      " 14 train (0.06339172029402107, 0.9787500128149986)\n",
      " 14 test 14:12:47 loss:1.360418, acc:0.625000,score:0.639412\n",
      "-----------------------------------------\n",
      " 15 train (0.03303262242116034, 0.987500011920929)\n",
      " 15 test 14:13:30 loss:1.405275, acc:0.605000,score:0.632305\n",
      "-----------------------------------------\n",
      " 16 train (0.03583815216552466, 0.9900000095367432)\n",
      " 16 test 14:14:13 loss:1.382426, acc:0.660000,score:0.669650\n",
      "-----------------------------------------\n",
      " 17 train (0.03386131190927699, 0.9937500059604645)\n",
      " 17 test 14:14:56 loss:1.473434, acc:0.635000,score:0.658696\n",
      "-----------------------------------------\n",
      " 18 train (0.034350207075476646, 0.9862500131130219)\n",
      " 18 test 14:15:39 loss:1.419493, acc:0.650000,score:0.664946\n",
      "-----------------------------------------\n",
      " 19 train (0.023904766305349767, 0.9925000071525574)\n",
      "testing...best_score\n",
      " 19 test 14:16:22 loss:1.497246, acc:0.665000,score:0.685768\n",
      "-----------------------------------------\n",
      "best: 19 1.4972461032867432 0.665 0.6857681271146516\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "sentence_input (InputLayer)     (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "target_input (InputLayer)       (None, 7)            0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_39 (Embedding)        multiple             1189800     sentence_input[0][0]             \n",
      "                                                                 target_input[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "target_weights (Dense)          (None, 7, 1)         301         embedding_39[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dot_55 (Dot)                    (None, 1, 300)       0           target_weights[0][0]             \n",
      "                                                                 embedding_39[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_97 (Dense)                (None, 1, 200)       60200       dot_55[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "bidirectional_28 (Bidirectional (None, 78, 200)      320800      embedding_39[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "reshape_52 (Reshape)            (None, 200)          0           dense_97[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "attention_layer_4 (AttentionLay (None, 78)           40000       bidirectional_28[0][0]           \n",
      "                                                                 reshape_52[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "attention_mul (Dot)             (None, 200)          0           attention_layer_4[0][0]          \n",
      "                                                                 bidirectional_28[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "dropout_38 (Dropout)            (None, 200)          0           attention_mul[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_98 (Dense)                (None, 100)          20100       dropout_38[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_99 (Dense)                (None, 3)            303         dense_98[0][0]                   \n",
      "==================================================================================================\n",
      "Total params: 1,631,504\n",
      "Trainable params: 1,631,504\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 train (1.041150614619255, 0.47874999791383743)\n",
      " 1 test 14:17:43 loss:1.012881, acc:0.487437,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (0.9607479870319366, 0.5362499989569187)\n",
      "testing...best_score\n",
      " 2 test 14:18:30 loss:0.942068, acc:0.597990,score:0.637379\n",
      "-----------------------------------------\n",
      " 3 train (0.8343084529042244, 0.6550000086426735)\n",
      "testing...best_score\n",
      " 3 test 14:19:15 loss:0.856255, acc:0.618090,score:0.651989\n",
      "-----------------------------------------\n",
      " 4 train (0.6632710471749306, 0.7350000068545341)\n",
      "testing...best_score\n",
      " 4 test 14:19:59 loss:0.835032, acc:0.628141,score:0.657234\n",
      "-----------------------------------------\n",
      " 5 train (0.4505744092166424, 0.8487499952316284)\n",
      "testing...best_score\n",
      " 5 test 14:20:42 loss:0.890030, acc:0.638191,score:0.669848\n",
      "-----------------------------------------\n",
      " 6 train (0.30604706332087517, 0.8912500068545341)\n",
      " 6 test 14:21:26 loss:0.937116, acc:0.623116,score:0.652086\n",
      "-----------------------------------------\n",
      " 7 train (0.18073038570582867, 0.9374999925494194)\n",
      " 7 test 14:22:10 loss:1.156855, acc:0.577889,score:0.575970\n",
      "-----------------------------------------\n",
      " 8 train (0.14937359327450395, 0.9512500017881393)\n",
      "testing...best_score\n",
      " 8 test 14:22:54 loss:1.146300, acc:0.673367,score:0.709011\n",
      "-----------------------------------------\n",
      " 9 train (0.10154045606032014, 0.973750002682209)\n",
      " 9 test 14:23:44 loss:1.253761, acc:0.587940,score:0.639002\n",
      "-----------------------------------------\n",
      " 10 train (0.07652007136493921, 0.9775000065565109)\n",
      " 10 test 14:24:31 loss:1.247477, acc:0.648241,score:0.693934\n",
      "-----------------------------------------\n",
      " 11 train (0.03599471878260374, 0.9950000047683716)\n",
      " 11 test 14:25:15 loss:1.459485, acc:0.597990,score:0.649175\n",
      "-----------------------------------------\n",
      " 12 train (0.03298475639894605, 0.9875000044703484)\n",
      " 12 test 14:25:59 loss:1.501269, acc:0.638191,score:0.695000\n",
      "-----------------------------------------\n",
      " 13 train (0.02763238048646599, 0.9925000071525574)\n",
      " 13 test 14:26:43 loss:1.638543, acc:0.597990,score:0.653854\n",
      "-----------------------------------------\n",
      " 14 train (0.029286754841450602, 0.9925000071525574)\n",
      " 14 test 14:27:27 loss:1.699621, acc:0.603015,score:0.655847\n",
      "-----------------------------------------\n",
      " 15 train (0.024208785500377417, 0.9937500059604645)\n",
      " 15 test 14:28:12 loss:1.713031, acc:0.613065,score:0.664607\n",
      "-----------------------------------------\n",
      " 16 train (0.015102687961189076, 0.9950000047683716)\n",
      " 16 test 14:28:58 loss:1.778577, acc:0.608040,score:0.662884\n",
      "-----------------------------------------\n",
      " 17 train (0.015379493881482631, 0.9962500035762787)\n",
      " 17 test 14:29:45 loss:1.796403, acc:0.613065,score:0.660998\n",
      "-----------------------------------------\n",
      " 18 train (0.014307232428109273, 0.9962500035762787)\n",
      " 18 test 14:30:31 loss:1.926705, acc:0.613065,score:0.662380\n",
      "-----------------------------------------\n",
      " 19 train (0.008240266848588362, 0.9987500011920929)\n",
      " 19 test 14:31:16 loss:2.052305, acc:0.587940,score:0.646300\n",
      "-----------------------------------------\n",
      "best: 8 1.1463002721268927 0.6733668350694167 0.7090107057890562\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "sentence_input (InputLayer)     (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "target_input (InputLayer)       (None, 7)            0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_40 (Embedding)        multiple             1189800     sentence_input[0][0]             \n",
      "                                                                 target_input[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "target_weights (Dense)          (None, 7, 1)         301         embedding_40[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dot_57 (Dot)                    (None, 1, 300)       0           target_weights[0][0]             \n",
      "                                                                 embedding_40[1][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_100 (Dense)               (None, 1, 200)       60200       dot_57[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "bidirectional_29 (Bidirectional (None, 78, 200)      320800      embedding_40[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "reshape_53 (Reshape)            (None, 200)          0           dense_100[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "attention_layer_5 (AttentionLay (None, 78)           40000       bidirectional_29[0][0]           \n",
      "                                                                 reshape_53[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "attention_mul (Dot)             (None, 200)          0           attention_layer_5[0][0]          \n",
      "                                                                 bidirectional_29[0][0]           \n",
      "__________________________________________________________________________________________________\n",
      "dropout_39 (Dropout)            (None, 200)          0           attention_mul[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense_101 (Dense)               (None, 100)          20100       dropout_39[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "dense_102 (Dense)               (None, 3)            303         dense_101[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 1,631,504\n",
      "Trainable params: 1,631,504\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.052816927433014, 0.4749999940395355)\n",
      " 1 test 14:32:37 loss:1.018710, acc:0.487437,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (0.9809854701161385, 0.4912499971687794)\n",
      "testing...best_score\n",
      " 2 test 14:33:24 loss:0.978198, acc:0.537688,score:0.415623\n",
      "-----------------------------------------\n",
      " 3 train (0.8764882832765579, 0.6125000044703484)\n",
      "testing...best_score\n",
      " 3 test 14:34:08 loss:0.917986, acc:0.582915,score:0.573836\n",
      "-----------------------------------------\n",
      " 4 train (0.6962652206420898, 0.7337500005960464)\n",
      " 4 test 14:34:55 loss:0.927149, acc:0.592965,score:0.556505\n",
      "-----------------------------------------\n",
      " 5 train (0.5068699531257153, 0.8087500035762787)\n",
      "testing...best_score\n",
      " 5 test 14:35:39 loss:0.883145, acc:0.648241,score:0.646348\n",
      "-----------------------------------------\n",
      " 6 train (0.3414417617022991, 0.8849999904632568)\n",
      " 6 test 14:36:25 loss:0.938067, acc:0.628141,score:0.628665\n",
      "-----------------------------------------\n",
      " 7 train (0.2132008634507656, 0.9300000071525574)\n",
      " 7 test 14:37:13 loss:1.139475, acc:0.628141,score:0.601133\n",
      "-----------------------------------------\n",
      " 8 train (0.13298135064542294, 0.9575000032782555)\n",
      " 8 test 14:38:00 loss:1.207050, acc:0.658291,score:0.635878\n",
      "-----------------------------------------\n",
      " 9 train (0.08427130803465843, 0.9787500128149986)\n",
      " 9 test 14:38:45 loss:1.174999, acc:0.668342,score:0.641202\n",
      "-----------------------------------------\n",
      " 10 train (0.0616598236374557, 0.978750005364418)\n",
      " 10 test 14:39:32 loss:1.308066, acc:0.673367,score:0.639700\n",
      "-----------------------------------------\n",
      " 11 train (0.04374312004074454, 0.987500011920929)\n",
      " 11 test 14:40:21 loss:1.396088, acc:0.673367,score:0.646242\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 12 train (0.03639134741388261, 0.987500011920929)\n",
      " 12 test 14:41:07 loss:1.512052, acc:0.653266,score:0.615964\n",
      "-----------------------------------------\n",
      " 13 train (0.03952350525651127, 0.9887500032782555)\n",
      " 13 test 14:41:52 loss:1.446719, acc:0.663317,score:0.644330\n",
      "-----------------------------------------\n",
      " 14 train (0.03607746632769704, 0.9887500032782555)\n",
      " 14 test 14:42:37 loss:1.484442, acc:0.653266,score:0.641859\n",
      "-----------------------------------------\n",
      " 15 train (0.024624711018987, 0.9912500083446503)\n",
      " 15 test 14:43:22 loss:1.529924, acc:0.648241,score:0.628576\n",
      "-----------------------------------------\n",
      " 16 train (0.01826828153571114, 0.9962500035762787)\n",
      " 16 test 14:44:11 loss:1.560669, acc:0.653266,score:0.637386\n",
      "-----------------------------------------\n",
      " 17 train (0.032859443803317845, 0.987500011920929)\n",
      " 17 test 14:44:55 loss:1.574379, acc:0.658291,score:0.640617\n",
      "-----------------------------------------\n",
      " 18 train (0.021088970941491425, 0.9912500083446503)\n",
      " 18 test 14:45:42 loss:1.658235, acc:0.658291,score:0.629293\n",
      "-----------------------------------------\n",
      " 19 train (0.018471856863470748, 0.9900000095367432)\n",
      " 19 test 14:46:27 loss:1.645349, acc:0.648241,score:0.634685\n",
      "-----------------------------------------\n",
      "best: 5 0.8831447243091449 0.6482412061799112 0.646348394430236\n"
     ]
    }
   ],
   "source": [
    "# 改进ATAE\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True)\n",
    "for train_index, valid_index in kfold.split(np.zeros(1000), np.argmax(all_data2,axis=1)):\n",
    "    train_x_t,valid_x_t = [all_data0[train_index],all_data1[train_index]],[all_data0[valid_index],all_data1[valid_index]]\n",
    "    train_y_t,valid_y_t = all_data2[train_index],all_data2[valid_index]\n",
    "    tat = TargetAttention(tan_config)\n",
    "    tat.build(embedding_matrix)\n",
    "    tat.fit(train_x_t,train_y_t,valid_x_t,valid_y_t)\n",
    "    del tat"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 117,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\r",
      " 1 train (2.313538142613002, 0.47714284913880484)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 12:09:38 loss:2.203155, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.0978198732648576, 0.47857142771993366)\n",
      " 2 test 12:10:08 loss:2.006918, acc:0.487562,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (1.8844433171408517, 0.5414285659790039)\n",
      "testing...best_score\n",
      " 3 test 12:10:37 loss:1.821320, acc:0.547264,score:0.517359\n",
      "-----------------------------------------\n",
      " 4 train (1.6569103683744157, 0.6528571418353489)\n",
      "testing...best_score\n",
      " 4 test 12:11:05 loss:1.631418, acc:0.597015,score:0.604330\n",
      "-----------------------------------------\n",
      " 5 train (1.3682267325265067, 0.7614285605294364)\n",
      "testing...best_score\n",
      " 5 test 12:11:34 loss:1.531863, acc:0.616915,score:0.623480\n",
      "-----------------------------------------\n",
      " 6 train (1.1553443329674857, 0.8028571435383388)\n",
      "testing...best_score\n",
      " 6 test 12:12:02 loss:1.514513, acc:0.626866,score:0.628290\n",
      "-----------------------------------------\n",
      " 7 train (0.9689315216881889, 0.8399999993188041)\n",
      "testing...best_score\n",
      " 7 test 12:12:30 loss:1.322230, acc:0.616915,score:0.651351\n",
      "-----------------------------------------\n",
      " 8 train (0.7572278465543475, 0.9085714306150164)\n",
      "testing...best_score\n",
      " 8 test 12:12:59 loss:1.309307, acc:0.646766,score:0.655577\n",
      "-----------------------------------------\n",
      " 9 train (0.6217794929231916, 0.9385714360645839)\n",
      " 9 test 12:13:27 loss:1.296744, acc:0.631841,score:0.647416\n",
      "-----------------------------------------\n",
      " 10 train (0.5097284018993378, 0.954285706792559)\n",
      " 10 test 12:13:55 loss:1.350012, acc:0.636816,score:0.647729\n",
      "-----------------------------------------\n",
      " 11 train (0.4278417229652405, 0.9671428629330227)\n",
      " 11 test 12:14:24 loss:1.355006, acc:0.616915,score:0.622679\n",
      "-----------------------------------------\n",
      " 12 train (0.3462657800742558, 0.9814285806247166)\n",
      "testing...best_score\n",
      " 12 test 12:14:52 loss:1.371707, acc:0.641791,score:0.674675\n",
      "-----------------------------------------\n",
      " 13 train (0.30622061661311556, 0.9785714404923576)\n",
      " 13 test 12:15:20 loss:1.436990, acc:0.621891,score:0.621668\n",
      "-----------------------------------------\n",
      " 14 train (0.25853388011455536, 0.9871428694043841)\n",
      " 14 test 12:15:49 loss:1.372499, acc:0.601990,score:0.624544\n",
      "-----------------------------------------\n",
      " 15 train (0.22837845981121063, 0.9842857207570758)\n",
      " 15 test 12:16:17 loss:1.377195, acc:0.621891,score:0.643495\n",
      "-----------------------------------------\n",
      " 16 train (0.19044649600982666, 0.9857142993382045)\n",
      " 16 test 12:16:45 loss:1.384127, acc:0.626866,score:0.626846\n",
      "-----------------------------------------\n",
      " 17 train (0.16300693154335022, 0.9900000010217939)\n",
      " 17 test 12:17:13 loss:1.325554, acc:0.646766,score:0.671134\n",
      "-----------------------------------------\n",
      " 18 train (0.1568830864770072, 0.9871428694043841)\n",
      " 18 test 12:17:41 loss:1.356758, acc:0.636816,score:0.657891\n",
      "-----------------------------------------\n",
      " 19 train (0.1313510856458119, 0.9900000010217939)\n",
      " 19 test 12:18:10 loss:1.386316, acc:0.631841,score:0.652758\n",
      "-----------------------------------------\n",
      "best: 12 1.371707391976124 0.6417910452209302 0.6746751766583086\n",
      " 1 train (2.3094731739589145, 0.48285714217594694)\n",
      " 1 test 12:19:02 loss:2.195164, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.0904436792646135, 0.48571428656578064)\n",
      " 2 test 12:19:30 loss:2.001735, acc:0.487562,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (1.893870438848223, 0.5328571455819267)\n",
      "testing...best_score\n",
      " 3 test 12:20:00 loss:1.813246, acc:0.537313,score:0.430835\n",
      "-----------------------------------------\n",
      " 4 train (1.642065473965236, 0.6257142850330898)\n",
      "testing...best_score\n",
      " 4 test 12:20:28 loss:1.621293, acc:0.616915,score:0.628466\n",
      "-----------------------------------------\n",
      " 5 train (1.3714373792920793, 0.764285717691694)\n",
      " 5 test 12:20:57 loss:1.589101, acc:0.616915,score:0.614448\n",
      "-----------------------------------------\n",
      " 6 train (1.1258389609200614, 0.8142857125827244)\n",
      "testing...best_score\n",
      " 6 test 12:21:25 loss:1.419526, acc:0.606965,score:0.628681\n",
      "-----------------------------------------\n",
      " 7 train (0.8942752054759434, 0.8800000037465777)\n",
      " 7 test 12:21:54 loss:1.403522, acc:0.621891,score:0.614110\n",
      "-----------------------------------------\n",
      " 8 train (0.7111743433134896, 0.9228571397917611)\n",
      "testing...best_score\n",
      " 8 test 12:22:23 loss:1.469152, acc:0.631841,score:0.671035\n",
      "-----------------------------------------\n",
      " 9 train (0.5945895101342883, 0.9371428659984044)\n",
      " 9 test 12:22:51 loss:1.506789, acc:0.636816,score:0.652400\n",
      "-----------------------------------------\n",
      " 10 train (0.484328636101314, 0.9600000040871757)\n",
      " 10 test 12:23:19 loss:1.435427, acc:0.631841,score:0.663576\n",
      "-----------------------------------------\n",
      " 11 train (0.3990071032728468, 0.9800000190734863)\n",
      " 11 test 12:23:48 loss:1.450502, acc:0.641791,score:0.660716\n",
      "-----------------------------------------\n",
      " 12 train (0.32943736229624065, 0.9842857207570758)\n",
      " 12 test 12:24:16 loss:1.493041, acc:0.636816,score:0.656190\n",
      "-----------------------------------------\n",
      " 13 train (0.28286380427224295, 0.984285729272025)\n",
      " 13 test 12:24:45 loss:1.599591, acc:0.592040,score:0.608768\n",
      "-----------------------------------------\n",
      " 14 train (0.2484105804136821, 0.984285729272025)\n",
      " 14 test 12:25:14 loss:1.574625, acc:0.646766,score:0.662353\n",
      "-----------------------------------------\n",
      " 15 train (0.21109803020954132, 0.9857142993382045)\n",
      " 15 test 12:25:43 loss:1.477790, acc:0.636816,score:0.645833\n",
      "-----------------------------------------\n",
      " 16 train (0.18354414190564836, 0.9900000095367432)\n",
      " 16 test 12:26:11 loss:1.438375, acc:0.621891,score:0.634090\n",
      "-----------------------------------------\n",
      " 17 train (0.15845661078180587, 0.9900000095367432)\n",
      " 17 test 12:26:40 loss:1.439671, acc:0.611940,score:0.626834\n",
      "-----------------------------------------\n",
      " 18 train (0.13424378633499146, 0.9914285796029227)\n",
      " 18 test 12:27:09 loss:1.414207, acc:0.606965,score:0.636940\n",
      "-----------------------------------------\n",
      " 19 train (0.11468047648668289, 0.9928571496691022)\n",
      " 19 test 12:27:38 loss:1.509181, acc:0.611940,score:0.641344\n",
      "-----------------------------------------\n",
      "best: 8 1.4691519375464217 0.6318407969095221 0.6710346710346711\n",
      " 1 train (2.2964846789836884, 0.4599999971687794)\n",
      " 1 test 12:28:37 loss:2.162252, acc:0.485000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.050140082836151, 0.48999999836087227)\n",
      "testing...best_score\n",
      " 2 test 12:29:11 loss:1.948294, acc:0.490000,score:0.347311\n",
      "-----------------------------------------\n",
      " 3 train (1.8126545697450638, 0.5249999947845936)\n",
      "testing...best_score\n",
      " 3 test 12:29:44 loss:1.735272, acc:0.560000,score:0.549614\n",
      "-----------------------------------------\n",
      " 4 train (1.5284731090068817, 0.646249994635582)\n",
      "testing...best_score\n",
      " 4 test 12:30:17 loss:1.566400, acc:0.605000,score:0.647727\n",
      "-----------------------------------------\n",
      " 5 train (1.2718441188335419, 0.7312499955296516)\n",
      "testing...best_score\n",
      " 5 test 12:30:50 loss:1.410872, acc:0.640000,score:0.662605\n",
      "-----------------------------------------\n",
      " 6 train (1.0250959321856499, 0.8162500038743019)\n",
      " 6 test 12:31:23 loss:1.367065, acc:0.610000,score:0.643893\n",
      "-----------------------------------------\n",
      " 7 train (0.8392379358410835, 0.864999994635582)\n",
      "testing...best_score\n",
      " 7 test 12:31:56 loss:1.294954, acc:0.635000,score:0.665073\n",
      "-----------------------------------------\n",
      " 8 train (0.6639350354671478, 0.9125000014901161)\n",
      " 8 test 12:32:29 loss:1.372427, acc:0.620000,score:0.652427\n",
      "-----------------------------------------\n",
      " 9 train (0.5162526965141296, 0.9450000077486038)\n",
      "testing...best_score\n",
      " 9 test 12:33:02 loss:1.419797, acc:0.635000,score:0.669280\n",
      "-----------------------------------------\n",
      " 10 train (0.40878475084900856, 0.958749994635582)\n",
      " 10 test 12:33:35 loss:1.442697, acc:0.640000,score:0.654703\n",
      "-----------------------------------------\n",
      " 11 train (0.33176666498184204, 0.9800000116229057)\n",
      " 11 test 12:34:08 loss:1.450734, acc:0.635000,score:0.649714\n",
      "-----------------------------------------\n",
      " 12 train (0.2813117876648903, 0.9812500104308128)\n",
      "testing...best_score\n",
      " 12 test 12:34:42 loss:1.471407, acc:0.665000,score:0.682360\n",
      "-----------------------------------------\n",
      " 13 train (0.2333733979612589, 0.9875000044703484)\n",
      " 13 test 12:35:15 loss:1.514414, acc:0.620000,score:0.663783\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 14 train (0.19282583333551884, 0.9862500131130219)\n",
      "testing...best_score\n",
      " 14 test 12:35:49 loss:1.489859, acc:0.650000,score:0.688113\n",
      "-----------------------------------------\n",
      " 15 train (0.15663293935358524, 0.9925000071525574)\n",
      "testing...best_score\n",
      " 15 test 12:36:22 loss:1.527977, acc:0.665000,score:0.689549\n",
      "-----------------------------------------\n",
      " 16 train (0.13385386485606432, 0.9925000071525574)\n",
      "testing...best_score\n",
      " 16 test 12:36:56 loss:1.506042, acc:0.670000,score:0.695438\n",
      "-----------------------------------------\n",
      " 17 train (0.11744499672204256, 0.9900000095367432)\n",
      " 17 test 12:37:29 loss:1.547482, acc:0.670000,score:0.687799\n",
      "-----------------------------------------\n",
      " 18 train (0.09811824932694435, 0.9937500059604645)\n",
      "testing...best_score\n",
      " 18 test 12:38:03 loss:1.490797, acc:0.665000,score:0.697276\n",
      "-----------------------------------------\n",
      " 19 train (0.08901452738791704, 0.9925000071525574)\n",
      " 19 test 12:38:36 loss:1.485651, acc:0.665000,score:0.690877\n",
      "-----------------------------------------\n",
      "best: 18 1.4907971048355102 0.665 0.6972756410256411\n",
      " 1 train (2.318012684583664, 0.4699999988079071)\n",
      " 1 test 12:39:35 loss:2.173136, acc:0.487437,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.0701073706150055, 0.48375000059604645)\n",
      " 2 test 12:40:09 loss:1.956367, acc:0.487437,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (1.8318161219358444, 0.5437500067055225)\n",
      "testing...best_score\n",
      " 3 test 12:40:45 loss:1.728490, acc:0.567839,score:0.561821\n",
      "-----------------------------------------\n",
      " 4 train (1.5477261245250702, 0.6687500104308128)\n",
      "testing...best_score\n",
      " 4 test 12:41:19 loss:1.538059, acc:0.643216,score:0.675456\n",
      "-----------------------------------------\n",
      " 5 train (1.2503172308206558, 0.7700000032782555)\n",
      " 5 test 12:41:54 loss:1.457921, acc:0.633166,score:0.603793\n",
      "-----------------------------------------\n",
      " 6 train (1.0020803660154343, 0.8325000107288361)\n",
      " 6 test 12:42:28 loss:1.492507, acc:0.608040,score:0.622018\n",
      "-----------------------------------------\n",
      " 7 train (0.7823779806494713, 0.8937499970197678)\n",
      " 7 test 12:43:02 loss:1.531404, acc:0.643216,score:0.625091\n",
      "-----------------------------------------\n",
      " 8 train (0.6073761135339737, 0.9387500062584877)\n",
      " 8 test 12:43:36 loss:1.555519, acc:0.582915,score:0.560318\n",
      "-----------------------------------------\n",
      " 9 train (0.4939700663089752, 0.9549999907612801)\n",
      " 9 test 12:44:11 loss:1.547917, acc:0.587940,score:0.622995\n",
      "-----------------------------------------\n",
      " 10 train (0.39836201071739197, 0.9750000089406967)\n",
      " 10 test 12:44:45 loss:1.485739, acc:0.597990,score:0.615118\n",
      "-----------------------------------------\n",
      " 11 train (0.31463348865509033, 0.9887500107288361)\n",
      " 11 test 12:45:19 loss:1.555859, acc:0.648241,score:0.658075\n",
      "-----------------------------------------\n",
      " 12 train (0.26414325274527073, 0.983750008046627)\n",
      " 12 test 12:45:53 loss:1.481753, acc:0.613065,score:0.634897\n",
      "-----------------------------------------\n",
      " 13 train (0.22152518667280674, 0.9912500083446503)\n",
      " 13 test 12:46:27 loss:1.417238, acc:0.618090,score:0.640291\n",
      "-----------------------------------------\n",
      " 14 train (0.19126460701227188, 0.9912500083446503)\n",
      " 14 test 12:47:02 loss:1.412054, acc:0.603015,score:0.625294\n",
      "-----------------------------------------\n",
      " 15 train (0.1654145698994398, 0.9925000071525574)\n",
      " 15 test 12:47:36 loss:1.553346, acc:0.638191,score:0.643300\n",
      "-----------------------------------------\n",
      " 16 train (0.1324432510882616, 0.9962500035762787)\n",
      " 16 test 12:48:10 loss:1.599540, acc:0.608040,score:0.640533\n",
      "-----------------------------------------\n",
      " 17 train (0.11471750680357218, 0.9937500059604645)\n",
      " 17 test 12:48:45 loss:1.483371, acc:0.628141,score:0.617108\n",
      "-----------------------------------------\n",
      " 18 train (0.09938761964440346, 0.9950000047683716)\n",
      " 18 test 12:49:19 loss:1.414424, acc:0.628141,score:0.652862\n",
      "-----------------------------------------\n",
      " 19 train (0.08835698943585157, 0.9925000071525574)\n",
      " 19 test 12:49:53 loss:1.472937, acc:0.633166,score:0.652897\n",
      "-----------------------------------------\n",
      "best: 4 1.5380586092196518 0.6432160810010517 0.6754563894523327\n",
      " 1 train (2.310989022254944, 0.47749999538064003)\n",
      " 1 test 12:50:57 loss:2.173970, acc:0.487437,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.0602899491786957, 0.48874998837709427)\n",
      "testing...best_score\n",
      " 2 test 12:51:35 loss:1.957099, acc:0.492462,score:0.348421\n",
      "-----------------------------------------\n",
      " 3 train (1.807300791144371, 0.5512499958276749)\n",
      "testing...best_score\n",
      " 3 test 12:52:12 loss:1.739256, acc:0.592965,score:0.626286\n",
      "-----------------------------------------\n",
      " 4 train (1.4863580912351608, 0.7087500020861626)\n",
      " 4 test 12:52:50 loss:1.755750, acc:0.582915,score:0.607394\n",
      "-----------------------------------------\n",
      " 5 train (1.201372742652893, 0.7962500005960464)\n",
      " 5 test 12:53:27 loss:1.685739, acc:0.623116,score:0.620948\n",
      "-----------------------------------------\n",
      " 6 train (0.9883205443620682, 0.8474999964237213)\n",
      " 6 test 12:54:05 loss:1.620493, acc:0.613065,score:0.589607\n",
      "-----------------------------------------\n",
      " 7 train (0.7939614206552505, 0.8937499970197678)\n",
      "testing...best_score\n",
      " 7 test 12:54:42 loss:1.705033, acc:0.582915,score:0.629100\n",
      "-----------------------------------------\n",
      " 8 train (0.6556350067257881, 0.9187499955296516)\n",
      " 8 test 12:55:20 loss:1.653971, acc:0.572864,score:0.623972\n",
      "-----------------------------------------\n",
      " 9 train (0.5043353587388992, 0.9562500044703484)\n",
      "testing...best_score\n",
      " 9 test 12:55:57 loss:1.525367, acc:0.608040,score:0.631034\n",
      "-----------------------------------------\n",
      " 10 train (0.4203072525560856, 0.9687500074505806)\n",
      "testing...best_score\n",
      " 10 test 12:56:35 loss:1.556008, acc:0.623116,score:0.654743\n",
      "-----------------------------------------\n",
      " 11 train (0.3400672897696495, 0.9800000116229057)\n",
      " 11 test 12:57:12 loss:1.707039, acc:0.608040,score:0.654401\n",
      "-----------------------------------------\n",
      " 12 train (0.3005206398665905, 0.9737499952316284)\n",
      " 12 test 12:57:50 loss:1.730583, acc:0.618090,score:0.632502\n",
      "-----------------------------------------\n",
      " 13 train (0.243629802018404, 0.9812500104308128)\n",
      "testing...best_score\n",
      " 13 test 12:58:27 loss:1.644151, acc:0.623116,score:0.676379\n",
      "-----------------------------------------\n",
      " 14 train (0.20957585982978344, 0.9825000092387199)\n",
      " 14 test 12:59:05 loss:1.641268, acc:0.633166,score:0.668478\n",
      "-----------------------------------------\n",
      " 15 train (0.18238226138055325, 0.9862500131130219)\n",
      " 15 test 12:59:43 loss:1.524486, acc:0.603015,score:0.666421\n",
      "-----------------------------------------\n",
      " 16 train (0.15145220793783665, 0.9900000095367432)\n",
      " 16 test 13:00:20 loss:1.537155, acc:0.628141,score:0.673134\n",
      "-----------------------------------------\n",
      " 17 train (0.1352428374812007, 0.9875000044703484)\n",
      " 17 test 13:00:58 loss:1.652660, acc:0.618090,score:0.652977\n",
      "-----------------------------------------\n",
      " 18 train (0.12145322374999523, 0.9887500032782555)\n",
      " 18 test 13:01:36 loss:1.710548, acc:0.587940,score:0.634740\n",
      "-----------------------------------------\n",
      " 19 train (0.10364317335188389, 0.9937500059604645)\n",
      " 19 test 13:02:13 loss:1.747777, acc:0.608040,score:0.631034\n",
      "-----------------------------------------\n",
      "best: 13 1.6441510694110812 0.6231155780392077 0.6763791763791763\n"
     ]
    }
   ],
   "source": [
    "# 改进TAN\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True)\n",
    "for train_index, valid_index in kfold.split(np.zeros(1000), np.argmax(all_data2,axis=1)):\n",
    "    train_x_t,valid_x_t = [all_data0[train_index],all_data1[train_index]],[all_data0[valid_index],all_data1[valid_index]]\n",
    "    train_y_t,valid_y_t = all_data2[train_index],all_data2[valid_index]\n",
    "    tat = TargetAttention2(tan_config)\n",
    "    tat.build(embedding_matrix)\n",
    "    tat.fit(train_x_t,train_y_t,valid_x_t,valid_y_t)\n",
    "    del tat"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 104,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "sentence_input (InputLayer)  (None, 78)                0         \n",
      "_________________________________________________________________\n",
      "embedding_1 (Embedding)      (None, 78, 300)           1189800   \n",
      "_________________________________________________________________\n",
      "bidirectional_1 (Bidirection (None, 600)               1442400   \n",
      "_________________________________________________________________\n",
      "dropout_1 (Dropout)          (None, 600)               0         \n",
      "_________________________________________________________________\n",
      "dense_1 (Dense)              (None, 3)                 1803      \n",
      "=================================================================\n",
      "Total params: 2,634,003\n",
      "Trainable params: 2,634,003\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      " 1 train (1.1768345832824707, 0.4186666637659073)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 16:20:35 loss:1.083338, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.049096699555715, 0.5306666612625122)\n",
      "testing...best_score\n",
      " 2 test 16:21:07 loss:1.054389, acc:0.492537,score:0.346876\n",
      "-----------------------------------------\n",
      " 3 train (0.9681066473325094, 0.5693333367506663)\n",
      "testing...best_score\n",
      " 3 test 16:21:38 loss:0.959442, acc:0.572139,score:0.581081\n",
      "-----------------------------------------\n",
      " 4 train (0.7992673794428508, 0.6893333355585735)\n",
      "testing...best_score\n",
      " 4 test 16:22:10 loss:1.012159, acc:0.572139,score:0.604905\n",
      "-----------------------------------------\n",
      " 5 train (0.6200139423211416, 0.7773333350817363)\n",
      "testing...best_score\n",
      " 5 test 16:22:41 loss:0.891089, acc:0.606965,score:0.630153\n",
      "-----------------------------------------\n",
      " 6 train (0.459913424650828, 0.8493333260218302)\n",
      "testing...best_score\n",
      " 6 test 16:23:13 loss:1.016285, acc:0.572139,score:0.644448\n",
      "-----------------------------------------\n",
      " 7 train (0.34074620803197225, 0.9159999926884969)\n",
      "testing...best_score\n",
      " 7 test 16:23:46 loss:0.971131, acc:0.631841,score:0.672897\n",
      "-----------------------------------------\n",
      " 8 train (0.2558181047439575, 0.9359999934832255)\n",
      " 8 test 16:24:19 loss:1.051646, acc:0.621891,score:0.668697\n",
      "-----------------------------------------\n",
      " 9 train (0.18675426095724107, 0.9613333304723104)\n",
      "testing...best_score\n",
      " 9 test 16:24:52 loss:1.233937, acc:0.636816,score:0.684424\n",
      "-----------------------------------------\n",
      " 10 train (0.1561758870879809, 0.972000002861023)\n",
      " 10 test 16:25:24 loss:1.158394, acc:0.621891,score:0.671874\n",
      "-----------------------------------------\n",
      " 11 train (0.13776300301154454, 0.9760000030199687)\n",
      "testing...best_score\n",
      " 11 test 16:25:56 loss:1.165608, acc:0.646766,score:0.704762\n",
      "-----------------------------------------\n",
      " 12 train (0.12425109644730886, 0.9773333350817363)\n",
      " 12 test 16:26:29 loss:1.235781, acc:0.651741,score:0.702280\n",
      "-----------------------------------------\n",
      " 13 train (0.12383469839890798, 0.9799999992052714)\n",
      " 13 test 16:27:03 loss:1.092352, acc:0.626866,score:0.680271\n",
      "-----------------------------------------\n",
      " 14 train (0.11963956753412883, 0.9733333349227905)\n",
      " 14 test 16:27:34 loss:1.217278, acc:0.611940,score:0.650305\n",
      "-----------------------------------------\n",
      " 15 train (0.09260751952727636, 0.9920000036557516)\n",
      " 15 test 16:28:09 loss:1.286555, acc:0.646766,score:0.677713\n",
      "-----------------------------------------\n",
      " 16 train (0.09095582614342372, 0.9853333353996276)\n",
      " 16 test 16:28:42 loss:1.217897, acc:0.641791,score:0.673589\n",
      "-----------------------------------------\n",
      " 17 train (0.07893829469879468, 0.9880000074704488)\n",
      " 17 test 16:29:13 loss:1.207775, acc:0.621891,score:0.673260\n",
      "-----------------------------------------\n",
      " 18 train (0.07506413410107295, 0.990666675567627)\n",
      " 18 test 16:29:45 loss:1.359329, acc:0.641791,score:0.675726\n",
      "-----------------------------------------\n",
      " 19 train (0.07317351773381234, 0.9880000034968058)\n",
      " 19 test 16:30:18 loss:1.215214, acc:0.621891,score:0.665714\n",
      "-----------------------------------------\n",
      "best: 11 1.1656084244524068 0.6467661700438504 0.7047619047619048\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "sentence_input (InputLayer)  (None, 78)                0         \n",
      "_________________________________________________________________\n",
      "embedding_2 (Embedding)      (None, 78, 300)           1189800   \n",
      "_________________________________________________________________\n",
      "bidirectional_2 (Bidirection (None, 600)               1442400   \n",
      "_________________________________________________________________\n",
      "dropout_2 (Dropout)          (None, 600)               0         \n",
      "_________________________________________________________________\n",
      "dense_2 (Dense)              (None, 3)                 1803      \n",
      "=================================================================\n",
      "Total params: 2,634,003\n",
      "Trainable params: 2,634,003\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      " 1 train (1.1561513066291809, 0.4306666692097982)\n",
      " 1 test 16:30:58 loss:1.103290, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.0404017249743143, 0.5293333351612091)\n",
      "testing...best_score\n",
      " 2 test 16:31:30 loss:1.036227, acc:0.562189,score:0.377440\n",
      "-----------------------------------------\n",
      " 3 train (0.9185351371765137, 0.610666670401891)\n",
      "testing...best_score\n",
      " 3 test 16:32:02 loss:0.981148, acc:0.512438,score:0.551240\n",
      "-----------------------------------------\n",
      " 4 train (0.7886210759480794, 0.6746666709582011)\n",
      " 4 test 16:32:37 loss:0.926901, acc:0.552239,score:0.543621\n",
      "-----------------------------------------\n",
      " 5 train (0.6228500882784526, 0.7573333342870077)\n",
      "testing...best_score\n",
      " 5 test 16:33:10 loss:0.960843, acc:0.577114,score:0.555721\n",
      "-----------------------------------------\n",
      " 6 train (0.42507483959198, 0.8613333344459534)\n",
      " 6 test 16:33:42 loss:1.031075, acc:0.587065,score:0.549263\n",
      "-----------------------------------------\n",
      " 7 train (0.33203767041365306, 0.9119999965031942)\n",
      "testing...best_score\n",
      " 7 test 16:34:15 loss:1.092839, acc:0.567164,score:0.560124\n",
      "-----------------------------------------\n",
      " 8 train (0.25106531381607056, 0.9426666577657064)\n",
      "testing...best_score\n",
      " 8 test 16:34:47 loss:1.124965, acc:0.592040,score:0.593330\n",
      "-----------------------------------------\n",
      " 9 train (0.2105039785305659, 0.9466666618982951)\n",
      " 9 test 16:35:19 loss:1.204098, acc:0.552239,score:0.564912\n",
      "-----------------------------------------\n",
      " 10 train (0.1709131399790446, 0.9613333384195963)\n",
      " 10 test 16:35:52 loss:1.236447, acc:0.557214,score:0.568118\n",
      "-----------------------------------------\n",
      " 11 train (0.14914802014827727, 0.9693333347638448)\n",
      " 11 test 16:36:26 loss:1.209608, acc:0.582090,score:0.574446\n",
      "-----------------------------------------\n",
      " 12 train (0.13408407121896743, 0.9839999993642171)\n",
      "testing...best_score\n",
      " 12 test 16:36:59 loss:1.238227, acc:0.572139,score:0.594460\n",
      "-----------------------------------------\n",
      " 13 train (0.12266134669383368, 0.9840000033378601)\n",
      " 13 test 16:37:32 loss:1.210734, acc:0.587065,score:0.570737\n",
      "-----------------------------------------\n",
      " 14 train (0.12358845422665278, 0.981333339214325)\n",
      " 14 test 16:38:05 loss:1.167939, acc:0.601990,score:0.585540\n",
      "-----------------------------------------\n",
      " 15 train (0.09125018417835236, 0.9893333395322164)\n",
      "testing...best_score\n",
      " 15 test 16:38:38 loss:1.336746, acc:0.592040,score:0.604592\n",
      "-----------------------------------------\n",
      " 16 train (0.09551870971918106, 0.9893333395322164)\n",
      " 16 test 16:39:11 loss:1.344204, acc:0.557214,score:0.578418\n",
      "-----------------------------------------\n",
      " 17 train (0.08635937745372455, 0.984000007311503)\n",
      " 17 test 16:39:44 loss:1.365794, acc:0.572139,score:0.597419\n",
      "-----------------------------------------\n",
      " 18 train (0.08983780220150947, 0.9880000074704488)\n",
      " 18 test 16:40:17 loss:1.233360, acc:0.587065,score:0.595955\n",
      "-----------------------------------------\n",
      " 19 train (0.08213957076271375, 0.9826666712760925)\n",
      " 19 test 16:40:53 loss:1.565332, acc:0.567164,score:0.574776\n",
      "-----------------------------------------\n",
      "best: 15 1.3367462798730652 0.592039802181187 0.6045921378821862\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "sentence_input (InputLayer)  (None, 78)                0         \n",
      "_________________________________________________________________\n",
      "embedding_3 (Embedding)      (None, 78, 300)           1189800   \n",
      "_________________________________________________________________\n",
      "bidirectional_3 (Bidirection (None, 600)               1442400   \n",
      "_________________________________________________________________\n",
      "dropout_3 (Dropout)          (None, 600)               0         \n",
      "_________________________________________________________________\n",
      "dense_3 (Dense)              (None, 3)                 1803      \n",
      "=================================================================\n",
      "Total params: 2,634,003\n",
      "Trainable params: 2,634,003\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 train (1.1586000844836235, 0.44249999802559614)\n",
      "testing...best_score\n",
      " 1 test 16:41:33 loss:1.137400, acc:0.425000,score:0.491692\n",
      "-----------------------------------------\n",
      " 2 train (1.0653697066009045, 0.5049999970942736)\n",
      " 2 test 16:42:07 loss:1.059843, acc:0.515000,score:0.398516\n",
      "-----------------------------------------\n",
      " 3 train (0.8971378915011883, 0.6200000029057264)\n",
      "testing...best_score\n",
      " 3 test 16:42:41 loss:1.004542, acc:0.555000,score:0.590250\n",
      "-----------------------------------------\n",
      " 4 train (0.7092165052890778, 0.7200000062584877)\n",
      " 4 test 16:43:15 loss:1.044920, acc:0.550000,score:0.555733\n",
      "-----------------------------------------\n",
      " 5 train (0.5041749496012926, 0.8324999995529652)\n",
      " 5 test 16:43:49 loss:1.176934, acc:0.490000,score:0.538133\n",
      "-----------------------------------------\n",
      " 6 train (0.34818204678595066, 0.9162499979138374)\n",
      " 6 test 16:44:23 loss:1.332826, acc:0.550000,score:0.589447\n",
      "-----------------------------------------\n",
      " 7 train (0.26777773071080446, 0.9287499934434891)\n",
      "testing...best_score\n",
      " 7 test 16:44:57 loss:1.364752, acc:0.560000,score:0.613636\n",
      "-----------------------------------------\n",
      " 8 train (0.18722200393676758, 0.9612499959766865)\n",
      " 8 test 16:45:32 loss:1.484270, acc:0.545000,score:0.583979\n",
      "-----------------------------------------\n",
      " 9 train (0.18801337899640203, 0.9662499986588955)\n",
      " 9 test 16:46:06 loss:1.388575, acc:0.550000,score:0.601956\n",
      "-----------------------------------------\n",
      " 10 train (0.15070859668776393, 0.9687499962747097)\n",
      "testing...best_score\n",
      " 10 test 16:46:40 loss:1.460241, acc:0.560000,score:0.630711\n",
      "-----------------------------------------\n",
      " 11 train (0.12941583152860403, 0.9800000004470348)\n",
      " 11 test 16:47:14 loss:1.561880, acc:0.550000,score:0.626242\n",
      "-----------------------------------------\n",
      " 12 train (0.10489248484373093, 0.9850000068545341)\n",
      " 12 test 16:47:48 loss:1.702666, acc:0.565000,score:0.623360\n",
      "-----------------------------------------\n",
      " 13 train (0.10565236816182733, 0.9812500067055225)\n",
      " 13 test 16:48:23 loss:1.640101, acc:0.555000,score:0.622306\n",
      "-----------------------------------------\n",
      " 14 train (0.09562274115160108, 0.9862500056624413)\n",
      " 14 test 16:48:59 loss:1.680570, acc:0.555000,score:0.612746\n",
      "-----------------------------------------\n",
      " 15 train (0.09160343720577657, 0.9887500032782555)\n",
      " 15 test 16:49:34 loss:1.714837, acc:0.515000,score:0.590206\n",
      "-----------------------------------------\n",
      " 16 train (0.0909968379419297, 0.9875000081956387)\n",
      " 16 test 16:50:09 loss:1.670833, acc:0.535000,score:0.595620\n",
      "-----------------------------------------\n",
      " 17 train (0.07940228609368205, 0.9875000044703484)\n",
      " 17 test 16:50:45 loss:1.604042, acc:0.565000,score:0.627928\n",
      "-----------------------------------------\n",
      " 18 train (0.08694910234771669, 0.986250001937151)\n",
      "testing...best_score\n",
      " 18 test 16:51:20 loss:1.566301, acc:0.560000,score:0.632727\n",
      "-----------------------------------------\n",
      " 19 train (0.06789482152089477, 0.9900000058114529)\n",
      " 19 test 16:51:54 loss:1.611514, acc:0.560000,score:0.628545\n",
      "-----------------------------------------\n",
      "best: 18 1.566300687789917 0.56 0.6327272727272727\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "sentence_input (InputLayer)  (None, 78)                0         \n",
      "_________________________________________________________________\n",
      "embedding_4 (Embedding)      (None, 78, 300)           1189800   \n",
      "_________________________________________________________________\n",
      "bidirectional_4 (Bidirection (None, 600)               1442400   \n",
      "_________________________________________________________________\n",
      "dropout_4 (Dropout)          (None, 600)               0         \n",
      "_________________________________________________________________\n",
      "dense_4 (Dense)              (None, 3)                 1803      \n",
      "=================================================================\n",
      "Total params: 2,634,003\n",
      "Trainable params: 2,634,003\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      " 1 train (1.198700189590454, 0.4299999997019768)\n",
      " 1 test 16:52:36 loss:1.113972, acc:0.477387,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.0683965273201466, 0.4775000009685755)\n",
      "testing...best_score\n",
      " 2 test 16:53:11 loss:1.064224, acc:0.547739,score:0.451128\n",
      "-----------------------------------------\n",
      " 3 train (0.9478183351457119, 0.5687499977648258)\n",
      "testing...best_score\n",
      " 3 test 16:53:46 loss:1.007676, acc:0.517588,score:0.566092\n",
      "-----------------------------------------\n",
      " 4 train (0.7484200075268745, 0.7124999985098839)\n",
      " 4 test 16:54:24 loss:0.977778, acc:0.567839,score:0.550985\n",
      "-----------------------------------------\n",
      " 5 train (0.5539546217769384, 0.7937499992549419)\n",
      "testing...best_score\n",
      " 5 test 16:55:00 loss:0.995635, acc:0.577889,score:0.619304\n",
      "-----------------------------------------\n",
      " 6 train (0.38689290080219507, 0.8762499913573265)\n",
      " 6 test 16:55:35 loss:1.097904, acc:0.587940,score:0.601379\n",
      "-----------------------------------------\n",
      " 7 train (0.3059436855837703, 0.9250000007450581)\n",
      "testing...best_score\n",
      " 7 test 16:56:11 loss:1.059384, acc:0.603015,score:0.649606\n",
      "-----------------------------------------\n",
      " 8 train (0.23721089959144592, 0.9487500004470348)\n",
      " 8 test 16:56:48 loss:1.133560, acc:0.552764,score:0.572558\n",
      "-----------------------------------------\n",
      " 9 train (0.19372552866116166, 0.9637500084936619)\n",
      " 9 test 16:57:24 loss:1.185243, acc:0.582915,score:0.623522\n",
      "-----------------------------------------\n",
      " 10 train (0.1511413836851716, 0.9812500104308128)\n",
      " 10 test 16:57:59 loss:1.511600, acc:0.592965,score:0.635339\n",
      "-----------------------------------------\n",
      " 11 train (0.13543128175660968, 0.9762500040233135)\n",
      " 11 test 16:58:35 loss:1.625646, acc:0.587940,score:0.613567\n",
      "-----------------------------------------\n",
      " 12 train (0.12651260010898113, 0.9837500043213367)\n",
      "testing...best_score\n",
      " 12 test 16:59:10 loss:1.363491, acc:0.597990,score:0.658219\n",
      "-----------------------------------------\n",
      " 13 train (0.10903665632940829, 0.982499998062849)\n",
      " 13 test 16:59:45 loss:1.398275, acc:0.592965,score:0.651841\n",
      "-----------------------------------------\n",
      " 14 train (0.0896857634652406, 0.9875000007450581)\n",
      " 14 test 17:00:21 loss:1.456335, acc:0.582915,score:0.645076\n",
      "-----------------------------------------\n",
      " 15 train (0.10559712257236242, 0.9850000031292439)\n",
      " 15 test 17:00:59 loss:1.519482, acc:0.582915,score:0.642652\n",
      "-----------------------------------------\n",
      " 16 train (0.0951728941872716, 0.9887500032782555)\n",
      " 16 test 17:01:35 loss:1.403249, acc:0.587940,score:0.643152\n",
      "-----------------------------------------\n",
      " 17 train (0.08212542464025319, 0.9862500056624413)\n",
      " 17 test 17:02:12 loss:1.628935, acc:0.587940,score:0.648788\n",
      "-----------------------------------------\n",
      " 18 train (0.07607009215280414, 0.9900000058114529)\n",
      " 18 test 17:02:49 loss:1.471796, acc:0.592965,score:0.641778\n",
      "-----------------------------------------\n",
      " 19 train (0.07114515593275428, 0.9912500008940697)\n",
      " 19 test 17:03:25 loss:1.543733, acc:0.577889,score:0.627240\n",
      "-----------------------------------------\n",
      "best: 12 1.3634907935731975 0.5979899506473062 0.6582193617591848\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "sentence_input (InputLayer)  (None, 78)                0         \n",
      "_________________________________________________________________\n",
      "embedding_5 (Embedding)      (None, 78, 300)           1189800   \n",
      "_________________________________________________________________\n",
      "bidirectional_5 (Bidirection (None, 600)               1442400   \n",
      "_________________________________________________________________\n",
      "dropout_5 (Dropout)          (None, 600)               0         \n",
      "_________________________________________________________________\n",
      "dense_5 (Dense)              (None, 3)                 1803      \n",
      "=================================================================\n",
      "Total params: 2,634,003\n",
      "Trainable params: 2,634,003\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      " 1 train (1.243614099919796, 0.40125000197440386)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "testing...best_score\n",
      " 1 test 17:04:10 loss:1.094868, acc:0.492462,score:0.348421\n",
      "-----------------------------------------\n",
      " 2 train (1.0663175135850906, 0.49875000305473804)\n",
      "testing...best_score\n",
      " 2 test 17:04:46 loss:1.041270, acc:0.497487,score:0.369229\n",
      "-----------------------------------------\n",
      " 3 train (0.9693566747009754, 0.5549999997019768)\n",
      "testing...best_score\n",
      " 3 test 17:05:22 loss:0.962911, acc:0.562814,score:0.598907\n",
      "-----------------------------------------\n",
      " 4 train (0.8190486878156662, 0.702500008046627)\n",
      " 4 test 17:05:59 loss:0.934125, acc:0.613065,score:0.583240\n",
      "-----------------------------------------\n",
      " 5 train (0.6147503238171339, 0.7712499909102917)\n",
      " 5 test 17:06:35 loss:0.918699, acc:0.597990,score:0.596814\n",
      "-----------------------------------------\n",
      " 6 train (0.4030076153576374, 0.8924999907612801)\n",
      "testing...best_score\n",
      " 6 test 17:07:12 loss:0.942220, acc:0.638191,score:0.625472\n",
      "-----------------------------------------\n",
      " 7 train (0.3209801772609353, 0.9125000014901161)\n",
      "testing...best_score\n",
      " 7 test 17:07:49 loss:1.154882, acc:0.618090,score:0.627885\n",
      "-----------------------------------------\n",
      " 8 train (0.2714183898642659, 0.9287499934434891)\n",
      "testing...best_score\n",
      " 8 test 17:08:25 loss:1.099683, acc:0.643216,score:0.655157\n",
      "-----------------------------------------\n",
      " 9 train (0.1540883844718337, 0.978750005364418)\n",
      " 9 test 17:09:02 loss:1.232597, acc:0.603015,score:0.616336\n",
      "-----------------------------------------\n",
      " 10 train (0.11222621658816934, 0.9912500083446503)\n",
      " 10 test 17:09:39 loss:1.529388, acc:0.567839,score:0.538201\n",
      "-----------------------------------------\n",
      " 11 train (0.10189776541665196, 0.9937499985098839)\n",
      " 11 test 17:10:17 loss:1.291754, acc:0.592965,score:0.614218\n",
      "-----------------------------------------\n",
      " 12 train (0.09665952762588859, 0.9899999983608723)\n",
      " 12 test 17:10:55 loss:1.348156, acc:0.603015,score:0.597535\n",
      "-----------------------------------------\n",
      " 13 train (0.09161808504723012, 0.9900000058114529)\n",
      " 13 test 17:11:30 loss:1.255322, acc:0.638191,score:0.646585\n",
      "-----------------------------------------\n",
      " 14 train (0.08439727895893157, 0.9900000020861626)\n",
      " 14 test 17:12:06 loss:1.432408, acc:0.577889,score:0.579910\n",
      "-----------------------------------------\n",
      " 15 train (0.06650207936763763, 0.9950000047683716)\n",
      " 15 test 17:12:41 loss:1.430027, acc:0.613065,score:0.623356\n",
      "-----------------------------------------\n",
      " 16 train (0.06468714610673487, 0.9925000071525574)\n",
      " 16 test 17:13:17 loss:1.376622, acc:0.592965,score:0.614919\n",
      "-----------------------------------------\n",
      " 17 train (0.06335557089187205, 0.9950000047683716)\n",
      " 17 test 17:13:52 loss:1.463752, acc:0.603015,score:0.582026\n",
      "-----------------------------------------\n",
      " 18 train (0.06677346164360642, 0.9924999997019768)\n",
      " 18 test 17:14:28 loss:1.423626, acc:0.567839,score:0.526667\n",
      "-----------------------------------------\n",
      " 19 train (0.06830728030763566, 0.9900000020861626)\n",
      " 19 test 17:15:03 loss:1.472477, acc:0.638191,score:0.647778\n",
      "-----------------------------------------\n",
      "best: 8 1.099682656963866 0.6432160805517705 0.6551567469332121\n"
     ]
    }
   ],
   "source": [
    "# lstm\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True)\n",
    "for train_index, valid_index in kfold.split(np.zeros(1000), np.argmax(all_data2,axis=1)):\n",
    "    train_x_t,valid_x_t = all_data0[train_index],all_data0[valid_index]\n",
    "    train_y_t,valid_y_t = all_data2[train_index],all_data2[valid_index]\n",
    "    tat = CommonLstm(lstm_config)\n",
    "    tat.build(embedding_matrix)\n",
    "    tat.fit(train_x_t,train_y_t,valid_x_t,valid_y_t)\n",
    "    del tat"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 106,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:16: UserWarning: The `Merge` layer is deprecated and will be removed after 08/2017. Use instead layers from `keras.layers.merge`, e.g. `add`, `concatenate`, etc.\n",
      "  app.launch_new_instance()\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_1 (InputLayer)            (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_11 (Embedding)        (None, 78, 300)      1189800     input_1[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_1 (Conv1D)               (None, 76, 250)      225250      embedding_11[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_2 (Conv1D)               (None, 75, 250)      300250      embedding_11[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_3 (Conv1D)               (None, 74, 250)      375250      embedding_11[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_1 (GlobalM (None, 250)          0           conv1d_1[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_2 (GlobalM (None, 250)          0           conv1d_2[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_3 (GlobalM (None, 250)          0           conv1d_3[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "merge_1 (Merge)                 (None, 750)          0           global_max_pooling1d_1[0][0]     \n",
      "                                                                 global_max_pooling1d_2[0][0]     \n",
      "                                                                 global_max_pooling1d_3[0][0]     \n",
      "__________________________________________________________________________________________________\n",
      "dense_21 (Dense)                (None, 250)          187750      merge_1[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dropout_11 (Dropout)            (None, 250)          0           dense_21[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_22 (Dense)                (None, 3)            753         dropout_11[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 2,279,053\n",
      "Trainable params: 2,279,053\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.6779289722442627, 0.3866666634877523)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 19:33:35 loss:1.078510, acc:0.482587,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.0362884322802226, 0.518666668732961)\n",
      "testing...best_score\n",
      " 2 test 19:33:54 loss:0.999754, acc:0.517413,score:0.448834\n",
      "-----------------------------------------\n",
      " 3 train (0.8385404944419861, 0.6533333420753479)\n",
      "testing...best_score\n",
      " 3 test 19:34:14 loss:0.958398, acc:0.582090,score:0.634804\n",
      "-----------------------------------------\n",
      " 4 train (0.6787169138590495, 0.781333331267039)\n",
      "testing...best_score\n",
      " 4 test 19:34:34 loss:0.894798, acc:0.646766,score:0.702103\n",
      "-----------------------------------------\n",
      " 5 train (0.482963756720225, 0.8879999955495198)\n",
      " 5 test 19:34:53 loss:0.908905, acc:0.592040,score:0.602665\n",
      "-----------------------------------------\n",
      " 6 train (0.32878670195738474, 0.9480000019073487)\n",
      " 6 test 19:35:12 loss:0.913069, acc:0.606965,score:0.589407\n",
      "-----------------------------------------\n",
      " 7 train (0.21049123207728068, 0.9733333349227905)\n",
      " 7 test 19:35:31 loss:0.913033, acc:0.651741,score:0.684324\n",
      "-----------------------------------------\n",
      " 8 train (0.1383526106675466, 0.9880000034968058)\n",
      " 8 test 19:35:50 loss:0.937138, acc:0.616915,score:0.652142\n",
      "-----------------------------------------\n",
      " 9 train (0.1069834366440773, 0.9893333355585734)\n",
      " 9 test 19:36:09 loss:0.976539, acc:0.601990,score:0.635277\n",
      "-----------------------------------------\n",
      " 10 train (0.08238861834009488, 0.9933333357175191)\n",
      " 10 test 19:36:28 loss:1.004485, acc:0.651741,score:0.679627\n",
      "-----------------------------------------\n",
      " 11 train (0.0735783097644647, 0.9933333357175191)\n",
      " 11 test 19:36:47 loss:1.023963, acc:0.611940,score:0.645642\n",
      "-----------------------------------------\n",
      " 12 train (0.06188260739048322, 0.9920000036557516)\n",
      " 12 test 19:37:06 loss:1.195095, acc:0.656716,score:0.694676\n",
      "-----------------------------------------\n",
      " 13 train (0.061786904186010363, 0.9906666715939839)\n",
      " 13 test 19:37:26 loss:1.136618, acc:0.587065,score:0.628220\n",
      "-----------------------------------------\n",
      " 14 train (0.058737305303414665, 0.9946666677792867)\n",
      " 14 test 19:37:47 loss:1.200085, acc:0.636816,score:0.667584\n",
      "-----------------------------------------\n",
      " 15 train (0.058497193331519766, 0.9933333396911621)\n",
      " 15 test 19:38:06 loss:1.087804, acc:0.636816,score:0.665123\n",
      "-----------------------------------------\n",
      " 16 train (0.04827683555583159, 0.9933333396911621)\n",
      " 16 test 19:38:24 loss:1.175404, acc:0.641791,score:0.675926\n",
      "-----------------------------------------\n",
      " 17 train (0.06183788205186526, 0.990666667620341)\n",
      " 17 test 19:38:44 loss:1.105794, acc:0.646766,score:0.678985\n",
      "-----------------------------------------\n",
      " 18 train (0.061795372143387796, 0.9880000034968058)\n",
      " 18 test 19:39:02 loss:1.121179, acc:0.651741,score:0.681056\n",
      "-----------------------------------------\n",
      " 19 train (0.053371078024307884, 0.9920000076293946)\n",
      " 19 test 19:39:21 loss:1.100984, acc:0.611940,score:0.643556\n",
      "-----------------------------------------\n",
      "best: 4 0.8947978764031064 0.646766170340391 0.7021028037383178\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_2 (InputLayer)            (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_12 (Embedding)        (None, 78, 300)      1189800     input_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_4 (Conv1D)               (None, 76, 250)      225250      embedding_12[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_5 (Conv1D)               (None, 75, 250)      300250      embedding_12[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_6 (Conv1D)               (None, 74, 250)      375250      embedding_12[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_4 (GlobalM (None, 250)          0           conv1d_4[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_5 (GlobalM (None, 250)          0           conv1d_5[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_6 (GlobalM (None, 250)          0           conv1d_6[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "merge_2 (Merge)                 (None, 750)          0           global_max_pooling1d_4[0][0]     \n",
      "                                                                 global_max_pooling1d_5[0][0]     \n",
      "                                                                 global_max_pooling1d_6[0][0]     \n",
      "__________________________________________________________________________________________________\n",
      "dense_23 (Dense)                (None, 250)          187750      merge_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dropout_12 (Dropout)            (None, 250)          0           dense_23[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_24 (Dense)                (None, 3)            753         dropout_12[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 2,279,053\n",
      "Trainable params: 2,279,053\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.8293472290039063, 0.39066666464010874)\n",
      " 1 test 19:39:47 loss:1.122159, acc:0.542289,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.0025502761205038, 0.554666668176651)\n",
      "testing...best_score\n",
      " 2 test 19:40:07 loss:0.997757, acc:0.567164,score:0.468708\n",
      "-----------------------------------------\n",
      " 3 train (0.8014427145322164, 0.6746666749318441)\n",
      "testing...best_score\n",
      " 3 test 19:40:26 loss:0.993647, acc:0.532338,score:0.492214\n",
      "-----------------------------------------\n",
      " 4 train (0.6463832219441732, 0.7919999996821085)\n",
      "testing...best_score\n",
      " 4 test 19:40:45 loss:0.916137, acc:0.597015,score:0.562529\n",
      "-----------------------------------------\n",
      " 5 train (0.4260010063648224, 0.9266666690508525)\n",
      "testing...best_score\n",
      " 5 test 19:41:05 loss:0.872432, acc:0.656716,score:0.673179\n",
      "-----------------------------------------\n",
      " 6 train (0.2704578161239624, 0.9706666668256124)\n",
      " 6 test 19:41:24 loss:0.848180, acc:0.671642,score:0.665563\n",
      "-----------------------------------------\n",
      " 7 train (0.16920628448327382, 0.9826666712760925)\n",
      "testing...best_score\n",
      " 7 test 19:41:44 loss:0.850089, acc:0.661692,score:0.688038\n",
      "-----------------------------------------\n",
      " 8 train (0.11748948643604915, 0.9919999996821086)\n",
      " 8 test 19:42:03 loss:0.913220, acc:0.651741,score:0.661616\n",
      "-----------------------------------------\n",
      " 9 train (0.08093499839305877, 0.9946666717529297)\n",
      "testing...best_score\n",
      " 9 test 19:42:23 loss:0.891720, acc:0.671642,score:0.695134\n",
      "-----------------------------------------\n",
      " 10 train (0.07182305331031481, 0.9920000076293946)\n",
      " 10 test 19:42:42 loss:0.943566, acc:0.646766,score:0.663903\n",
      "-----------------------------------------\n",
      " 11 train (0.07454453508059183, 0.990666675567627)\n",
      " 11 test 19:43:01 loss:0.927940, acc:0.656716,score:0.657254\n",
      "-----------------------------------------\n",
      " 12 train (0.05929812143246333, 0.9919999996821086)\n",
      " 12 test 19:43:21 loss:0.944736, acc:0.651741,score:0.665738\n",
      "-----------------------------------------\n",
      " 13 train (0.06470650434494019, 0.9946666717529297)\n",
      " 13 test 19:43:41 loss:0.979989, acc:0.651741,score:0.687166\n",
      "-----------------------------------------\n",
      " 14 train (0.05591172228256861, 0.9959999998410543)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 14 test 19:44:01 loss:0.988400, acc:0.636816,score:0.622696\n",
      "-----------------------------------------\n",
      " 15 train (0.061407181123892464, 0.9933333396911621)\n",
      " 15 test 19:44:20 loss:1.098345, acc:0.646766,score:0.677982\n",
      "-----------------------------------------\n",
      " 16 train (0.05263351301352183, 0.9959999998410543)\n",
      " 16 test 19:44:39 loss:0.971502, acc:0.656716,score:0.647243\n",
      "-----------------------------------------\n",
      " 17 train (0.05284634704391162, 0.9946666677792867)\n",
      " 17 test 19:44:59 loss:0.998340, acc:0.661692,score:0.680552\n",
      "-----------------------------------------\n",
      " 18 train (0.049059272309144336, 0.9960000038146972)\n",
      " 18 test 19:45:18 loss:0.989567, acc:0.676617,score:0.693851\n",
      "-----------------------------------------\n",
      " 19 train (0.0498540988812844, 0.9933333396911621)\n",
      " 19 test 19:45:37 loss:0.960159, acc:0.661692,score:0.677245\n",
      "-----------------------------------------\n",
      "best: 9 0.8917204114928174 0.6716417919343977 0.6951339683731406\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_3 (InputLayer)            (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_13 (Embedding)        (None, 78, 300)      1189800     input_3[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_7 (Conv1D)               (None, 76, 250)      225250      embedding_13[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_8 (Conv1D)               (None, 75, 250)      300250      embedding_13[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_9 (Conv1D)               (None, 74, 250)      375250      embedding_13[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_7 (GlobalM (None, 250)          0           conv1d_7[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_8 (GlobalM (None, 250)          0           conv1d_8[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_9 (GlobalM (None, 250)          0           conv1d_9[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "merge_3 (Merge)                 (None, 750)          0           global_max_pooling1d_7[0][0]     \n",
      "                                                                 global_max_pooling1d_8[0][0]     \n",
      "                                                                 global_max_pooling1d_9[0][0]     \n",
      "__________________________________________________________________________________________________\n",
      "dense_25 (Dense)                (None, 250)          187750      merge_3[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dropout_13 (Dropout)            (None, 250)          0           dense_25[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_26 (Dense)                (None, 3)            753         dropout_13[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 2,279,053\n",
      "Trainable params: 2,279,053\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.5361507534980774, 0.39249999821186066)\n",
      " 1 test 19:46:07 loss:1.152558, acc:0.485000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.021907713264227, 0.537499999627471)\n",
      "testing...best_score\n",
      " 2 test 19:46:28 loss:1.045851, acc:0.485000,score:0.444842\n",
      "-----------------------------------------\n",
      " 3 train (0.7784400954842567, 0.7000000029802322)\n",
      "testing...best_score\n",
      " 3 test 19:46:48 loss:1.024134, acc:0.500000,score:0.493208\n",
      "-----------------------------------------\n",
      " 4 train (0.5677815042436123, 0.8375000022351742)\n",
      "testing...best_score\n",
      " 4 test 19:47:08 loss:1.014910, acc:0.550000,score:0.528606\n",
      "-----------------------------------------\n",
      " 5 train (0.3655731175094843, 0.9237499907612801)\n",
      " 5 test 19:47:28 loss:1.021985, acc:0.550000,score:0.527306\n",
      "-----------------------------------------\n",
      " 6 train (0.188308110460639, 0.9887500032782555)\n",
      "testing...best_score\n",
      " 6 test 19:47:49 loss:1.061445, acc:0.570000,score:0.556671\n",
      "-----------------------------------------\n",
      " 7 train (0.1154926442541182, 0.9962500035762787)\n",
      " 7 test 19:48:09 loss:1.096749, acc:0.555000,score:0.547826\n",
      "-----------------------------------------\n",
      " 8 train (0.08169239014387131, 0.9962500035762787)\n",
      "testing...best_score\n",
      " 8 test 19:48:29 loss:1.157218, acc:0.575000,score:0.579128\n",
      "-----------------------------------------\n",
      " 9 train (0.060962528456002474, 0.9975000023841858)\n",
      " 9 test 19:48:49 loss:1.189714, acc:0.560000,score:0.562775\n",
      "-----------------------------------------\n",
      " 10 train (0.0593601749278605, 0.9962499998509884)\n",
      "testing...best_score\n",
      " 10 test 19:49:10 loss:1.245882, acc:0.575000,score:0.589084\n",
      "-----------------------------------------\n",
      " 11 train (0.051864928333088756, 0.9974999986588955)\n",
      " 11 test 19:49:33 loss:1.259532, acc:0.565000,score:0.568039\n",
      "-----------------------------------------\n",
      " 12 train (0.04806671035476029, 0.9950000047683716)\n",
      "testing...best_score\n",
      " 12 test 19:49:53 loss:1.293797, acc:0.585000,score:0.594841\n",
      "-----------------------------------------\n",
      " 13 train (0.0467785750515759, 0.9962499998509884)\n",
      " 13 test 19:50:13 loss:1.324966, acc:0.565000,score:0.579206\n",
      "-----------------------------------------\n",
      " 14 train (0.04506402602419257, 0.9975000023841858)\n",
      " 14 test 19:50:33 loss:1.348733, acc:0.570000,score:0.574719\n",
      "-----------------------------------------\n",
      " 15 train (0.04586293431930244, 0.9950000010430813)\n",
      " 15 test 19:50:53 loss:1.354822, acc:0.560000,score:0.574500\n",
      "-----------------------------------------\n",
      " 16 train (0.04978562123142183, 0.9950000010430813)\n",
      " 16 test 19:51:13 loss:1.375883, acc:0.560000,score:0.584962\n",
      "-----------------------------------------\n",
      " 17 train (0.03939411183819175, 0.9975000023841858)\n",
      " 17 test 19:51:34 loss:1.386058, acc:0.555000,score:0.566560\n",
      "-----------------------------------------\n",
      " 18 train (0.03854234085883945, 0.9975000023841858)\n",
      " 18 test 19:51:54 loss:1.430244, acc:0.570000,score:0.573883\n",
      "-----------------------------------------\n",
      " 19 train (0.041302391211502254, 0.9950000047683716)\n",
      " 19 test 19:52:15 loss:1.405053, acc:0.565000,score:0.573416\n",
      "-----------------------------------------\n",
      "best: 12 1.2937972736358643 0.585 0.594840520787958\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_4 (InputLayer)            (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_14 (Embedding)        (None, 78, 300)      1189800     input_4[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_10 (Conv1D)              (None, 76, 250)      225250      embedding_14[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_11 (Conv1D)              (None, 75, 250)      300250      embedding_14[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_12 (Conv1D)              (None, 74, 250)      375250      embedding_14[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_10 (Global (None, 250)          0           conv1d_10[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_11 (Global (None, 250)          0           conv1d_11[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_12 (Global (None, 250)          0           conv1d_12[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "merge_4 (Merge)                 (None, 750)          0           global_max_pooling1d_10[0][0]    \n",
      "                                                                 global_max_pooling1d_11[0][0]    \n",
      "                                                                 global_max_pooling1d_12[0][0]    \n",
      "__________________________________________________________________________________________________\n",
      "dense_27 (Dense)                (None, 250)          187750      merge_4[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dropout_14 (Dropout)            (None, 250)          0           dense_27[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_28 (Dense)                (None, 3)            753         dropout_14[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 2,279,053\n",
      "Trainable params: 2,279,053\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 train (1.5687978342175484, 0.39750000089406967)\n",
      " 1 test 19:52:42 loss:1.110347, acc:0.361809,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.000175017863512, 0.5724999979138374)\n",
      "testing...best_score\n",
      " 2 test 19:53:03 loss:0.989598, acc:0.582915,score:0.607975\n",
      "-----------------------------------------\n",
      " 3 train (0.7799031175673008, 0.6837500110268593)\n",
      " 3 test 19:53:23 loss:0.927488, acc:0.542714,score:0.545098\n",
      "-----------------------------------------\n",
      " 4 train (0.5542448442429304, 0.8399999961256981)\n",
      " 4 test 19:53:42 loss:0.860907, acc:0.608040,score:0.587832\n",
      "-----------------------------------------\n",
      " 5 train (0.34132230654358864, 0.9500000067055225)\n",
      " 5 test 19:54:03 loss:0.843704, acc:0.597990,score:0.603866\n",
      "-----------------------------------------\n",
      " 6 train (0.20907195657491684, 0.978750005364418)\n",
      " 6 test 19:54:23 loss:0.870648, acc:0.547739,score:0.559621\n",
      "-----------------------------------------\n",
      " 7 train (0.1258728001266718, 0.9912500008940697)\n",
      " 7 test 19:54:43 loss:0.906833, acc:0.547739,score:0.567376\n",
      "-----------------------------------------\n",
      " 8 train (0.08921271096915007, 0.9925000071525574)\n",
      " 8 test 19:55:05 loss:0.909946, acc:0.603015,score:0.602152\n",
      "-----------------------------------------\n",
      " 9 train (0.07407778920605779, 0.9925000034272671)\n",
      " 9 test 19:55:26 loss:1.010291, acc:0.613065,score:0.586140\n",
      "-----------------------------------------\n",
      " 10 train (0.06311739864759147, 0.9950000047683716)\n",
      " 10 test 19:55:47 loss:0.964349, acc:0.562814,score:0.577503\n",
      "-----------------------------------------\n",
      " 11 train (0.06336115184240043, 0.9925000071525574)\n",
      " 11 test 19:56:08 loss:1.010217, acc:0.592965,score:0.595957\n",
      "-----------------------------------------\n",
      " 12 train (0.060830286238342524, 0.9925000071525574)\n",
      "testing...best_score\n",
      " 12 test 19:56:29 loss:0.992738, acc:0.603015,score:0.611111\n",
      "-----------------------------------------\n",
      " 13 train (0.05867904075421393, 0.9912500083446503)\n",
      "testing...best_score\n",
      " 13 test 19:56:50 loss:1.071732, acc:0.633166,score:0.612829\n",
      "-----------------------------------------\n",
      " 14 train (0.05345306801609695, 0.9925000071525574)\n",
      " 14 test 19:57:12 loss:1.092337, acc:0.597990,score:0.588658\n",
      "-----------------------------------------\n",
      " 15 train (0.053751397179439664, 0.9925000071525574)\n",
      "testing...best_score\n",
      " 15 test 19:57:35 loss:1.118441, acc:0.577889,score:0.619625\n",
      "-----------------------------------------\n",
      " 16 train (0.06159618007950485, 0.9925000034272671)\n",
      " 16 test 19:57:56 loss:1.211905, acc:0.582915,score:0.506427\n",
      "-----------------------------------------\n",
      " 17 train (0.04765823599882424, 0.9937500022351742)\n",
      " 17 test 19:58:19 loss:1.035397, acc:0.597990,score:0.609820\n",
      "-----------------------------------------\n",
      " 18 train (0.04683099139947444, 0.9925000071525574)\n",
      " 18 test 19:58:41 loss:1.046807, acc:0.587940,score:0.609941\n",
      "-----------------------------------------\n",
      " 19 train (0.04759704740718007, 0.9937500022351742)\n",
      " 19 test 19:59:03 loss:1.137769, acc:0.603015,score:0.600478\n",
      "-----------------------------------------\n",
      "best: 15 1.1184411665902065 0.5778894481347434 0.619624613922547\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_5 (InputLayer)            (None, 78)           0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding_15 (Embedding)        (None, 78, 300)      1189800     input_5[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_13 (Conv1D)              (None, 76, 250)      225250      embedding_15[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_14 (Conv1D)              (None, 75, 250)      300250      embedding_15[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "conv1d_15 (Conv1D)              (None, 74, 250)      375250      embedding_15[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_13 (Global (None, 250)          0           conv1d_13[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_14 (Global (None, 250)          0           conv1d_14[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "global_max_pooling1d_15 (Global (None, 250)          0           conv1d_15[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "merge_5 (Merge)                 (None, 750)          0           global_max_pooling1d_13[0][0]    \n",
      "                                                                 global_max_pooling1d_14[0][0]    \n",
      "                                                                 global_max_pooling1d_15[0][0]    \n",
      "__________________________________________________________________________________________________\n",
      "dense_29 (Dense)                (None, 250)          187750      merge_5[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dropout_15 (Dropout)            (None, 250)          0           dense_29[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_30 (Dense)                (None, 3)            753         dropout_15[0][0]                 \n",
      "==================================================================================================\n",
      "Total params: 2,279,053\n",
      "Trainable params: 2,279,053\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      " 1 train (1.5246552228927612, 0.39749999437481165)\n",
      " 1 test 19:59:35 loss:1.117095, acc:0.487437,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.052049744874239, 0.5112500060349703)\n",
      "testing...best_score\n",
      " 2 test 19:59:59 loss:1.035026, acc:0.562814,score:0.374586\n",
      "-----------------------------------------\n",
      " 3 train (0.9034847803413868, 0.6362500004470348)\n",
      "testing...best_score\n",
      " 3 test 20:00:19 loss:0.952597, acc:0.542714,score:0.463095\n",
      "-----------------------------------------\n",
      " 4 train (0.700962021946907, 0.7599999904632568)\n",
      "testing...best_score\n",
      " 4 test 20:00:42 loss:0.897541, acc:0.587940,score:0.568414\n",
      "-----------------------------------------\n",
      " 5 train (0.4864519126713276, 0.8837499991059303)\n",
      " 5 test 20:01:02 loss:0.860394, acc:0.597990,score:0.519886\n",
      "-----------------------------------------\n",
      " 6 train (0.30942194256931543, 0.9487500041723251)\n",
      "testing...best_score\n",
      " 6 test 20:01:23 loss:0.846489, acc:0.603015,score:0.634521\n",
      "-----------------------------------------\n",
      " 7 train (0.18438557256013155, 0.9825000092387199)\n",
      "testing...best_score\n",
      " 7 test 20:01:44 loss:0.851448, acc:0.638191,score:0.649849\n",
      "-----------------------------------------\n",
      " 8 train (0.12235281802713871, 0.9900000095367432)\n",
      " 8 test 20:02:05 loss:0.900490, acc:0.643216,score:0.647114\n",
      "-----------------------------------------\n",
      " 9 train (0.089248115895316, 0.9900000058114529)\n",
      " 9 test 20:02:25 loss:0.967017, acc:0.618090,score:0.608998\n",
      "-----------------------------------------\n",
      " 10 train (0.08077215822413564, 0.9925000034272671)\n",
      " 10 test 20:02:45 loss:1.094095, acc:0.608040,score:0.568421\n",
      "-----------------------------------------\n",
      " 11 train (0.06642612325958908, 0.9925000071525574)\n",
      " 11 test 20:03:05 loss:0.978030, acc:0.623116,score:0.634542\n",
      "-----------------------------------------\n",
      " 12 train (0.05644285958260298, 0.9937500059604645)\n",
      " 12 test 20:03:27 loss:0.999192, acc:0.633166,score:0.642512\n",
      "-----------------------------------------\n",
      " 13 train (0.05619052844122052, 0.9925000034272671)\n",
      " 13 test 20:03:50 loss:1.015928, acc:0.603015,score:0.630441\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 14 train (0.05841208645142615, 0.9937500059604645)\n",
      " 14 test 20:04:11 loss:1.085007, acc:0.623116,score:0.630383\n",
      "-----------------------------------------\n",
      " 15 train (0.048500685836188495, 0.9925000071525574)\n",
      "testing...best_score\n",
      " 15 test 20:04:31 loss:1.118452, acc:0.613065,score:0.654071\n",
      "-----------------------------------------\n",
      " 16 train (0.05879240750800818, 0.9937500022351742)\n",
      " 16 test 20:04:52 loss:1.196854, acc:0.597990,score:0.542324\n",
      "-----------------------------------------\n",
      " 17 train (0.0421857995679602, 0.9962500035762787)\n",
      " 17 test 20:05:12 loss:1.103922, acc:0.623116,score:0.640498\n",
      "-----------------------------------------\n",
      " 18 train (0.05388204916380346, 0.9937500059604645)\n",
      " 18 test 20:05:32 loss:1.233961, acc:0.597990,score:0.621004\n",
      "-----------------------------------------\n",
      " 19 train (0.053673712187446654, 0.9950000047683716)\n",
      " 19 test 20:05:54 loss:1.262059, acc:0.572864,score:0.525677\n",
      "-----------------------------------------\n",
      "best: 15 1.1184517810093098 0.6130653272322075 0.6540706605222735\n"
     ]
    }
   ],
   "source": [
    "# cnn\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True)\n",
    "for train_index, valid_index in kfold.split(np.zeros(1000), np.argmax(all_data2,axis=1)):\n",
    "    train_x_t,valid_x_t = all_data0[train_index],all_data0[valid_index]\n",
    "    train_y_t,valid_y_t = all_data2[train_index],all_data2[valid_index]\n",
    "    tat = CommonCnn(cnn_config)\n",
    "    tat.build(embedding_matrix)\n",
    "    tat.fit(train_x_t,train_y_t,valid_x_t,valid_y_t)\n",
    "    del tat"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## visuialization"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "tt = tat.model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "intermediate_layer_model = Model(inputs=tt.input,\n",
    "#                                  outputs=model.get_layer('target_softmax').output\n",
    "                                 outputs=tt.get_layer('att').output\n",
    "                                )\n",
    "att_vis = intermediate_layer_model.predict([test_x_t[0],test_x_t[1]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 78,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "intermediate_layer_model = Model(inputs=tt.input,\n",
    "                                 outputs=tt.get_layer('target_softmax').output\n",
    "                                )\n",
    "target_weights = intermediate_layer_model.predict([test_x_t[0],test_x_t[1]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 81,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([0.03659217, 0.07666643, 0.0718499 , 0.09163786, 0.61788225,\n",
       "       0.05251552, 0.0528558 ], dtype=float32)"
      ]
     },
     "execution_count": 81,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "target_weights[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([   0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,\n",
       "          0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,\n",
       "          0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,\n",
       "          0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,\n",
       "          0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,\n",
       "          0,    0,    0,    0,    0,    0,    0,    0,  190,   97,   14,\n",
       "         26,    3,    8,  631,   26,    3, 1124,  866,  700,    4,   33,\n",
       "         25])"
      ]
     },
     "execution_count": 70,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_x_t[0][91]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.        , 0.        ,\n",
       "       0.        , 0.        , 0.        , 0.02135521, 0.02135507,\n",
       "       0.02135556, 0.02135508, 0.02136223, 0.02284208, 0.15379892,\n",
       "       0.02135508, 0.02136223, 0.1577938 , 0.15760756, 0.1577913 ,\n",
       "       0.02151693, 0.02135512, 0.15779379], dtype=float32)"
      ]
     },
     "execution_count": 71,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "att_vis[91]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['because you are from xinjiang . a man from xinjiang accuses airport officers of ethnic discrimination',\n",
       " 'when will people realise that the xinjiang ban on fasting isnt strictly islamphobic but more about ethnic discrimination of the uyghur',\n",
       " 'i was saddened to hear about the terrorist attack that took place on a market in xinjiang , china , on the morning of 22 may , killing more than 30 people . my thoughts and condolences are with the victims and their families , hague said in a statement .',\n",
       " 'discrimination and repression accompany chinese colonization of the indigenous uigher homeland in xinjiang .',\n",
       " 'some uighurs protest what they say is cultural assimilation .',\n",
       " 'nyt on employment discrimination in xinjiang . uighurs in china say bias is growing',\n",
       " 'now , back to your question . chinese government is a representative of han people . their presence in the illegally occupied territories is unlawful . so , any ruling carried out by the ethnic majority against the ethnic minority in their own land is by default illegal.·',\n",
       " 'poorly integrated , poor , and ignorant muslims are going to be easy to recruit by terrorist organizations regardless of a ban on certain items of clothing and beards . this is true in suburbs of paris , as it is in western xinjiang .',\n",
       " 'china imprisons brothers of u.s. reporter to suppress xinjiang coverage',\n",
       " 'xinjiang government decide to play good uyghur , bad uyghur picture game in another short sighted propaganda piece !',\n",
       " 'humanrightslaw china : allow independent investigations into xinjiang violence',\n",
       " 'as i have noted before , the uyghur muslims of china are some of the worst persecuted people in our planet because of their ethnicity and religion .',\n",
       " 'the law contains a firm indication that the country is poised to give a big push to its international counter terrorism cooperation ; for countering the emerging grave threats to the global stability and prosperity from terrorist forces like the islamic state , the indication should be welcome to the outside world .',\n",
       " 'hundreds of people have been killed in xinjiang in the past two years , mostly because of violence between the muslim uighur people , who call the region home , and the ethnic majority han chinese .',\n",
       " 'USER_MENTION USER_MENTION have you been to china ? have you been to xinjiang ? beyond 30000 uyghurs spontaneously help the police arrest terrorists .',\n",
       " 'chinese discrimination policies cause ethnic violence urumqi xinjiang china',\n",
       " 'want to pray to allah in china ? go right ahead . want to have a mosque ? sure , but pay for it yourself because the chinese government does not subsidize religious institutions or give them special tax exemptions . want to wear a burqa ? not a problem in terms of the police though there will obviously be strange looks from others .',\n",
       " 'USER_MENTION i worked in xinjiang a decade ago ; the tensions were high back then . many tales of discrimination , oppression by han .',\n",
       " 'there is a lot of inter ethnic hatred',\n",
       " 'USER_MENTION that really make beijing pissed off really bad . hence , the conflict that is currently happening in xinjiang .',\n",
       " 'china says extremism losing grip in restless xinjiang via dawn_com',\n",
       " 'one han chinese entrepreneur in the city to scout out opportunities said he felt safe because they are so many police on the streets .',\n",
       " 'china bans beards , veils from buses in xinjiang city',\n",
       " 'china has created a 100 million yuan 14.5 million fund for anti terrorism awards in a violence wracked part of the country , state media reported , as the government steps up a campaign targeting unrest .',\n",
       " 'this is a despicable and outrageous act of violence against innocent civilians , and the united states resolutely opposes all forms of terrorism , white house press secretary jay carney said in a statement .',\n",
       " 'xinjiang went from being a place of benevolent minstrels amp ; fruit to a place of violence and danger . nice interview',\n",
       " 'wooing islamists with a beer festival . interesting look at terrorism in chinas xinjiang region via USER_MENTION',\n",
       " 'pakistan has condemned incidents of violence in chinese city of kashgar , the capital of xinjiang province',\n",
       " 'in recent weeks , china has tightened its crackdown on uighurs in the region , but many foreign experts say beijing ’ heavy handed policies have consistently failed to curb unrest in the past and are unlikely to be any more successful now .',\n",
       " 'in both the cases of xinjiang and tibet , the government has launched strike –hard campaigns to suppress the alleged terrorist tendencies .',\n",
       " 'authorities typically respond to violence or unrest in xinjiang with lockdowns , raids on homes , and restrictions on phone and internet communications .',\n",
       " 'china steps up controls in unruly xinjiang as ramazan approaches via dawn_com',\n",
       " 'overseas cri audiences condemn xinjiang riot',\n",
       " 'the uyghur ethnic and cultural identity is not suppressed . only the religious aspect is regulated and this is the same across the country whether you are han , hui , tibetan or uyghur .',\n",
       " 'like all of these religious based clothing bans , whether it be in france or in china , it ’ just a counterproductive measure that will not address the root causes of terrorism and end up doing more harm than good . well integrated , prosperous , and well educated muslims are not going to turn to terrorists .',\n",
       " 'such re education involves forcibly detaining people for months without charges and inculcating them in political doctrine considered acceptable by the chinese state .',\n",
       " 'but chinese government restrictions on religion and society in xinjiang , and alleged state violence against civilians there , have prompted serious human rights concerns , while tight information control has made chinese claims of local links to international terrorist organizations difficult to verify',\n",
       " 'the state department also criticised chinese restrictions on religious expression in xinjiang , like banning veils for women . many chinese government policies may have exacerbated ethnic tension in xinjiang and could contribute to increased violent extremism , it said .',\n",
       " 'xinjiang protests : han chinese demand better security after syringe attacks in urumqi',\n",
       " 'china ’ xinjiang bans religion in government buildings iina beijing china',\n",
       " 'he noted that the city in northern xinjiang has become more prosperous and vibrant there are a lot more shops and malls , and people have better jobs .',\n",
       " 'chinese embassy denies reports about restricting fasting in xinjiang nation',\n",
       " 'china is right to worry about the dangers of islamic extremism in its western provinces .',\n",
       " 'the nsc has made national security synonymous with state or regime security , while the new legislation provides legal basis for security services to identify and suppress individuals or groups deemed to be terrorists , and requires internet providers and tech companies to provide assistance , including encryption keys , during counter terrorism operations .',\n",
       " 'this is a despicable and outrageous act of violence against innocent civilians , and the united states resolutely opposes all forms of terrorism , white house press secretary jay carney said in a statement .',\n",
       " 'afghanistan turkistan islamic party tip muft abuzar azam criticizes china policy in xinjiang . terrormonitor',\n",
       " 'the real intention of the chinese government is to eliminate the uyghurs as a distinct ethnic group , rebiya kadeer , a longtime exiled uyghur leader who recently stepped down as president of the world uyghur congress , told cnn in an interview in washington last year .',\n",
       " 'they are discouraged and in some cases forbidden from displaying any outward sign of their islamic identity , such as growing beard for adult men , and wearing hijab for women',\n",
       " 'rights groups say that party restrictions on the religion and culture of the uighurs create simmering resentment of the state and of policies that encourage the majority han people to move to the region in vast numbers .',\n",
       " 'china releases photos of raid on alleged terrorists in xinjiang , opposition condemns repression :',\n",
       " 'thousands of uyghur muslims have fled their homes in xinjiang',\n",
       " 'lom harshni chauhan retweeted ft chinabetween isis video n chinese troops show of strength , not a reassuring place 4 innocent bystander in xinjiang , whether uighyr or han . lom harshni chauhan added ,',\n",
       " 'USER_MENTION featured in USER_MENTION on beijings latest anti terror measure in xinjiang : bad pop music',\n",
       " 'if you are on the internet and you mention anything about independence , you will get sent in for questioning . i think this is a bad policy and it should be a simple remove the post instead of questioning and possible arrest .',\n",
       " 'USER_MENTION USER_MENTION there are no difference between fishballrevolution rioters and xinjiang terrorists .',\n",
       " 'strongly condemn horrific terrorist attack vs innocents in xinjiang china today . thoughts are with 31 killed , 90+ injured amp ; their families .',\n",
       " 'bad ideas 6 in a series of many . china to ban beards in parts of xinjiang to ease potential hazards to society',\n",
       " 'china is quick to argue that the best way to fight terrorism elsewhere in the world is to address root causes ; it is not exempt from that same diagnosis .',\n",
       " 'more violence in china ’ xinjiang after deadly attack',\n",
       " 'xinjiang : ‘ islam only becomes a threat when muslims are repressed and treated as third class citizens. ’ a rashid',\n",
       " 'isesco condemns persecution of uighur muslims in china xinjiang',\n",
       " 'USER_MENTION nine jailed in xinjiang for promoting ethnic hatred , discrimination',\n",
       " 'xinjiang has generally been quiet this year , with no major reported attacks or other violent incidents .',\n",
       " 'xinjiang ’ security state is evolving rapidly . bad news for the uyghur minority .',\n",
       " 'china says most people in tibet and xinjiang have a good life there is no issue of discrimination cescr',\n",
       " 'the youth culture is increasingly inclusive , tolerant and vibrant .',\n",
       " 'ethnic discrimination in passport applications across china will help build a very disharmonious society xinjiang',\n",
       " 'china faces a serious threat from violent extremism , particularly in its western region of xinjiang',\n",
       " 'leibold said china was using terrorism legislation to target some of the country ethnic minorities .',\n",
       " 'even the pro independence people i ’ ve talked to all tell me that their living standards have definitely increased like in tibet .',\n",
       " 'pakistan strongly condemns recent acts of terrorism in xinjiang , china',\n",
       " 'new post : whitewashing beijing ’ repression in xinjiang won ’ t do the trick oped',\n",
       " 'new post : beijing xinjiang problem : uighur terrorism is escalating despite china crackdown',\n",
       " 'but many uighurs—especially those who do not speak mandarin or who lack skills—feel they miss out on benefits that instead flow to han migrants .',\n",
       " 'during a visit to xinjiang last month , president xi jinping promised greater integration and warned terrorists would be isolated like rats scurrying across a street . but attacks like today serve to escalate mistrust between uighurs and the majority han population making it even harder to resolve the underlying problems that feed the violence .',\n",
       " 'this is a despicable and outrageous act of violence against innocent civilians , and the united states resolutely opposes all forms of terrorism , white house spokesman jay carney said in a statement .',\n",
       " 'wearing the burka and face veil isnt compulsory in islam and its unecessary because you ’ re not living in a desert , where clothes like this is more suitable to keep the sand of your face , nose , ears , eyes and clothes .',\n",
       " 'senior chinese officials have increasingly described the security challenges in xinjiang as an important front in the global fight against terrorism . zhang has previously said some people from the region have gone to fight with islamic state .',\n",
       " 'any protest or sign of disapproval of the apartheid like treatment of the uyghurs against the racist han chinese settlers has been treated by the colonial chinese government as an act of terrorism .',\n",
       " 'an iron fist in xinjiang is fuelling an insurrection . china leadership must switch tactics',\n",
       " 'western countries have long been reluctant to share intelligence with china or otherwise cooperate , saying china has provided little evidence to prove etim existence and citing worries about possible human rights abuses in xinjiang .',\n",
       " 'amnesty international has said uyghurs face widespread discrimination in housing , education and employment as well as curtailed religious freedom in their homeland . other critics have linked the rise of violence in xinjiang to beijing ’ repressive reign there a claim the government vehemently denies .',\n",
       " 'china needs stability in pakistan to prevent it from becoming a safe haven for terrorists originating in xinjiang .',\n",
       " 'no , not at all . china has a large muslim population and substantial mosques in most towns , including beijing , xian and several in shanghai that i have visited .',\n",
       " 'just like any place in the world , life in xinjiang is both wonderful and frustrating .',\n",
       " 'at the height of violence in china western xinjiang region as recently as 2014 , mr alamjan , an ethnic uighur working as a hotel chef in ining city , felt his colleagues from other ethnic groups were wary of him .',\n",
       " 'xi calls for ‘ great wall of iron ’ to safeguard restive xinjiang via dawn_com',\n",
       " 'the south african government sends it deepest condolences to the government and the people of the people republic of china , and in particular to the families of the deceased , and wishes the injured speedy recoveries , monyela said .',\n",
       " 'china confiscates passports of xinjiang people , many of whom r muslims who regularly face widespread discrimination',\n",
       " 'according to an associated press investigation , chinese government documents describe the detention centers as free , completely closed off , militarized areas where training sessions that can last three months to two years are conducted . there , uyghurs learn mandarin , law , ethnic unity , de radicalization , [ and ] patriotism .',\n",
       " 'china targets muslim parents w/ religion rules in xinjiang in ongoing state discrimination against muslim populace',\n",
       " 'more heart breaking news of repression of uighurs in china xinjiang region',\n",
       " 'beijing is pouring billions of dollars into a city which it designated as a special economic zone back in 2010 one of only half a dozen such zones in the country .',\n",
       " 'in some villages , you can ’ t see men on the streets anymore only women and children all the men have been sent to the camps .',\n",
       " 'more importantly , xinjiang has become the focal point for terrorism in china after 9/11 despite the cpc ’ repeated security crackdowns in the region .',\n",
       " 'it should be banned and i am a muslim too by the way , i converted to islam .',\n",
       " 'the draft law , which could require technology firms to install backdoors in products or to hand over sensitive information such as encryption keys to the government , has also been criticised by some western business groups .',\n",
       " 'moreover , sweeping powers have been given to counter terrorism agencies under the law',\n",
       " 'this will only hasten the desire to secede xinjiang from china as uighurs are usually very protective of their religious freedom .',\n",
       " 'british foreign secretary william hague said : i was saddened to hear about the terrorist attack that took place on a market in xinjiang , china , on the morning of 22 may , killing more than 30 people . my thoughts and condolences are with the victims and their families .',\n",
       " 'china defends xinjiang crackdown after turkish criticism',\n",
       " 'a spokesman told the bbc the violent incidents were a direct result of beijing policies in the region .',\n",
       " 'seem xinjiang is being calm . there are more things to do to solve the anti han emotion of bottom uigurs . let them know no discrimination',\n",
       " 'but the nets appear to be also catching many innocent people , residents complain . you should arrest the bad guys , said one uighur professional in urumqi , not just anyone who looks suspicious .',\n",
       " 'many uighurs also say that their traditions and culture are under threat .',\n",
       " 'the chinese government is after their natural resources , and has been treating the resources rich xinjiang region more like a colony settling millions of han chinese from outside , threatening the demographic makeup of the restive region .',\n",
       " 'humanrights news china , tibet and xinjiang : political repression at a high mark : hrw',\n",
       " 'there is no clear cut guideline , rather interpretations as to what should be covered or not . what is clear in islam is that the decision lies with the individual , and not the state .',\n",
       " 'i believe that kashgar could one day catch up and even surpass the development we see in other chinese cities , says zhang yunjian .']"
      ]
     },
     "execution_count": 69,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xinjiang.iloc[891:,:]\n",
    "xinjiang.WORDS.tolist()[891:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "2"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xinjiang.LABEL.tolist()[802]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 88,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<font style=\"background-color: rgba(255,0,0,0.06285826861858368)\">a</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05877641588449478)\">call</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.059839341789484024)\">to</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05935276299715042)\">stop</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.058766357600688934)\">ethnic</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.43422678112983704)\">discrimination</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05876621976494789)\">in</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.058785922825336456)\">xinjiang</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05876648426055908)\">but</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05876622349023819)\">the</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05908791720867157)\">official</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.058807726949453354)\">reaction</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.058766212314367294)\">will</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.058766212314367294)\">be</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05896018445491791)\">similar</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.059839341789484024)\">to</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.0658288523554802)\">tibets</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05876621976494789)\">more</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.4342268109321594)\">repression</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05899867042899132)\">tighter</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.05904694274067879)\">control</font>\n"
     ]
    }
   ],
   "source": [
    "for i in zip(att_vis[13][-21:],[i for i in xinjiang.WORDS.tolist()[813].split(' ') if i not in ['.',':']]):\n",
    "    print('<font style=\"background-color: rgba(255,0,0,{})\">{}</font>'.format(i[0]*2,i[1]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 89,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<font style=\"background-color: rgba(255,0,0,0.04271042346954346)\">because</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.042710140347480774)\">you</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04271111637353897)\">are</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04271015152335167)\">from</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04272446408867836)\">xinjiang</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04568416625261307)\">a</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.3075978457927704)\">man</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04271015152335167)\">from</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04272446408867836)\">xinjiang</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.3155876100063324)\">accuses</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.3152151107788086)\">airport</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.31558260321617126)\">officers</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.043033864349126816)\">of</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.04271024465560913)\">ethnic</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.31558758020401)\">discrimination</font>\n"
     ]
    }
   ],
   "source": [
    "for i in zip(att_vis[91][-15:],[i for i in xinjiang.WORDS.tolist()[891].split(' ') if i not in ['.',':']]):\n",
    "    print('<font style=\"background-color: rgba(255,0,0,{})\">{}</font>'.format(i[0]*2,i[1]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 93,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<font style=\"background-color: rgba(255,0,0,0.14636866748332977)\">the</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.30666571855545044)\">chinese</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.28739961981773376)\">goverment</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.3665514588356018)\">anti</font>\n",
      "<font style=\"background-color: rgba(255,0,0,2.471529006958008)\">terrorist</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.21006207168102264)\">in</font>\n",
      "<font style=\"background-color: rgba(255,0,0,0.21142320334911346)\">xinjiang</font>\n"
     ]
    }
   ],
   "source": [
    "for i in zip(target_weights[0],'the chinese goverment anti terrorist in xinjiang'.split()):\n",
    "    print('<font style=\"background-color: rgba(255,0,0,{})\">{}</font>'.format(i[0]*4,i[1]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "predict_label = tt.predict([test_x_t[0],test_x_t[1]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 107,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'batch_size': 100,\n",
       " 'dropout_rate': 0.2,\n",
       " 'embed_trainable': True,\n",
       " 'epochs': 20,\n",
       " 'lr': 0.001,\n",
       " 'lstm_output_size': 100,\n",
       " 'n_stop': 20,\n",
       " 'optimizer': 'adam',\n",
       " 'target_nums': 7,\n",
       " 'time_steps': 78}"
      ]
     },
     "execution_count": 107,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tan_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 112,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "##############\n",
      "0.001 100 100\n",
      " 1 train (2.3093396027882895, 0.4749999940395355)\n",
      "testing..."
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\program\\Lib\\site-packages\\sklearn\\metrics\\classification.py:1113: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples.\n",
      "  'precision', 'predicted', average, warn_for)\n",
      "D:\\program\\Lib\\site-packages\\ipykernel_launcher.py:88: RuntimeWarning: invalid value encountered in double_scalars\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 1 test 20:11:14 loss:2.249568, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.119758407274882, 0.49833332498868305)\n",
      " 2 test 20:11:27 loss:2.074841, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (1.9313640594482422, 0.5633333375056585)\n",
      "testing...best_score\n",
      " 3 test 20:11:41 loss:1.909807, acc:0.530000,score:0.526943\n",
      "-----------------------------------------\n",
      " 4 train (1.7257487177848816, 0.646666685740153)\n",
      "testing...best_score\n",
      " 4 test 20:11:54 loss:1.791893, acc:0.565000,score:0.578741\n",
      "-----------------------------------------\n",
      " 5 train (1.4858714540799458, 0.698333332935969)\n",
      "testing...best_score\n",
      " 5 test 20:12:07 loss:1.692672, acc:0.590000,score:0.625641\n",
      "-----------------------------------------\n",
      " 6 train (1.2523666421572368, 0.7966666718324026)\n",
      " 6 test 20:12:20 loss:1.612926, acc:0.610000,score:0.586892\n",
      "-----------------------------------------\n",
      " 7 train (1.0246010422706604, 0.8633333345254263)\n",
      " 7 test 20:12:33 loss:1.589104, acc:0.580000,score:0.586929\n",
      "-----------------------------------------\n",
      " 8 train (0.8720507820447286, 0.8850000003973643)\n",
      " 8 test 20:12:47 loss:1.608018, acc:0.610000,score:0.599964\n",
      "-----------------------------------------\n",
      " 9 train (0.7324225703875223, 0.9116666615009308)\n",
      "testing...best_score\n",
      " 9 test 20:13:00 loss:1.450419, acc:0.605000,score:0.627933\n",
      "-----------------------------------------\n",
      " 10 train (0.5819884538650513, 0.9633333384990692)\n",
      "testing...best_score\n",
      " 10 test 20:13:13 loss:1.601773, acc:0.630000,score:0.637745\n",
      "-----------------------------------------\n",
      " 11 train (0.4840235610802968, 0.9666666686534882)\n",
      " 11 test 20:13:27 loss:1.658568, acc:0.600000,score:0.627269\n",
      "-----------------------------------------\n",
      " 12 train (0.4103843569755554, 0.9799999992052714)\n",
      " 12 test 20:13:41 loss:1.719042, acc:0.590000,score:0.611135\n",
      "-----------------------------------------\n",
      " 13 train (0.3678506364425023, 0.9783333440621694)\n",
      " 13 test 20:13:55 loss:1.868927, acc:0.615000,score:0.622482\n",
      "-----------------------------------------\n",
      " 14 train (0.3183591266473134, 0.9816666742165884)\n",
      " 14 test 20:14:09 loss:1.698302, acc:0.585000,score:0.602285\n",
      "-----------------------------------------\n",
      " 15 train (0.26872556904951733, 0.9883333444595337)\n",
      " 15 test 20:14:25 loss:1.591827, acc:0.590000,score:0.606154\n",
      "-----------------------------------------\n",
      " 16 train (0.2383352369070053, 0.9900000095367432)\n",
      " 16 test 20:14:39 loss:1.626614, acc:0.565000,score:0.578151\n",
      "-----------------------------------------\n",
      " 17 train (0.20154846956332526, 0.9950000047683716)\n",
      " 17 test 20:14:53 loss:1.653671, acc:0.570000,score:0.581293\n",
      "-----------------------------------------\n",
      " 18 train (0.19328132520119348, 0.9866666793823242)\n",
      " 18 test 20:15:06 loss:1.633848, acc:0.575000,score:0.580745\n",
      "-----------------------------------------\n",
      " 19 train (0.15522735814253488, 0.996666669845581)\n",
      " 19 test 20:15:20 loss:1.582469, acc:0.560000,score:0.579623\n",
      "-----------------------------------------\n",
      " 20 train (0.14965187013149261, 0.9933333396911621)\n",
      " 20 test 20:15:34 loss:1.546910, acc:0.595000,score:0.605670\n",
      "-----------------------------------------\n",
      " 21 train (0.12854382395744324, 0.9883333444595337)\n",
      " 21 test 20:15:49 loss:1.606222, acc:0.585000,score:0.616399\n",
      "-----------------------------------------\n",
      " 22 train (0.10876356189449628, 0.9933333396911621)\n",
      " 22 test 20:16:02 loss:1.582011, acc:0.565000,score:0.597826\n",
      "-----------------------------------------\n",
      " 23 train (0.09600396951039632, 0.9983333349227905)\n",
      " 23 test 20:16:17 loss:1.577234, acc:0.585000,score:0.620496\n",
      "-----------------------------------------\n",
      " 24 train (0.101759718110164, 0.9900000095367432)\n",
      " 24 test 20:16:31 loss:1.523551, acc:0.585000,score:0.629751\n",
      "-----------------------------------------\n",
      " 25 train (0.09893417606751125, 0.9883333444595337)\n",
      " 25 test 20:16:45 loss:1.417305, acc:0.575000,score:0.598358\n",
      "-----------------------------------------\n",
      " 26 train (0.08018432557582855, 0.9950000047683716)\n",
      " 26 test 20:16:59 loss:1.430269, acc:0.580000,score:0.606796\n",
      "-----------------------------------------\n",
      " 27 train (0.06321681042512257, 0.996666669845581)\n",
      " 27 test 20:17:12 loss:1.572725, acc:0.585000,score:0.599350\n",
      "-----------------------------------------\n",
      " 28 train (0.06108187511563301, 0.9933333396911621)\n",
      " 28 test 20:17:25 loss:1.624312, acc:0.580000,score:0.596807\n",
      "-----------------------------------------\n",
      " 29 train (0.05661202408373356, 0.996666669845581)\n",
      " 29 test 20:17:38 loss:1.641296, acc:0.575000,score:0.589558\n",
      "-----------------------------------------\n",
      "best: 10 1.601772882938385 0.63 0.6377450980392158\n",
      "(1.474624342918396, 0.615, 0.6398190045248868)\n",
      "##############\n",
      "0.001 100 50\n",
      " 1 train (2.239058574040731, 0.4700000000496705)\n",
      " 1 test 20:18:10 loss:2.090730, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.9135171473026276, 0.5083333353201548)\n",
      "testing...best_score\n",
      " 2 test 20:18:26 loss:1.823138, acc:0.505000,score:0.446931\n",
      "-----------------------------------------\n",
      " 3 train (1.5737416744232178, 0.6566666811704636)\n",
      "testing...best_score\n",
      " 3 test 20:18:41 loss:1.600517, acc:0.560000,score:0.568997\n",
      "-----------------------------------------\n",
      " 4 train (1.1853288610776265, 0.7933333317438761)\n",
      "testing...best_score\n",
      " 4 test 20:18:56 loss:1.498723, acc:0.580000,score:0.608196\n",
      "-----------------------------------------\n",
      " 5 train (0.8282933284838995, 0.8800000001986822)\n",
      "testing...best_score\n",
      " 5 test 20:19:11 loss:1.421741, acc:0.630000,score:0.623070\n",
      "-----------------------------------------\n",
      " 6 train (0.5782837172349294, 0.9433333327372869)\n",
      " 6 test 20:19:28 loss:1.667381, acc:0.580000,score:0.558576\n",
      "-----------------------------------------\n",
      " 7 train (0.44487390170494717, 0.9550000031789144)\n",
      "testing...best_score\n",
      " 7 test 20:19:43 loss:1.475240, acc:0.615000,score:0.639346\n",
      "-----------------------------------------\n",
      " 8 train (0.3213192398349444, 0.9766666690508524)\n",
      " 8 test 20:19:58 loss:1.578209, acc:0.580000,score:0.598125\n",
      "-----------------------------------------\n",
      " 9 train (0.28151898210247356, 0.9699999938408533)\n",
      " 9 test 20:20:12 loss:1.330454, acc:0.595000,score:0.593140\n",
      "-----------------------------------------\n",
      " 10 train (0.22449778517087302, 0.981666679183642)\n",
      " 10 test 20:20:26 loss:1.304374, acc:0.595000,score:0.610179\n",
      "-----------------------------------------\n",
      " 11 train (0.16192699844638506, 0.9950000047683716)\n",
      "testing...best_score\n",
      " 11 test 20:20:41 loss:1.316904, acc:0.610000,score:0.648213\n",
      "-----------------------------------------\n",
      " 12 train (0.1413314752280712, 0.9883333394924799)\n",
      " 12 test 20:20:55 loss:1.401878, acc:0.615000,score:0.637205\n",
      "-----------------------------------------\n",
      " 13 train (0.11061971882979076, 0.9916666696468989)\n",
      " 13 test 20:21:09 loss:1.310880, acc:0.620000,score:0.642033\n",
      "-----------------------------------------\n",
      " 14 train (0.09469033715625604, 0.9900000045696894)\n",
      " 14 test 20:21:24 loss:1.448434, acc:0.610000,score:0.633241\n",
      "-----------------------------------------\n",
      " 15 train (0.08509643748402596, 0.9883333345254263)\n",
      " 15 test 20:21:39 loss:1.347472, acc:0.595000,score:0.625530\n",
      "-----------------------------------------\n",
      " 16 train (0.07435429748147726, 0.9916666696468989)\n",
      " 16 test 20:21:54 loss:1.333413, acc:0.605000,score:0.598625\n",
      "-----------------------------------------\n",
      " 17 train (0.0550590076794227, 0.9950000047683716)\n",
      " 17 test 20:22:08 loss:1.443672, acc:0.610000,score:0.610752\n",
      "-----------------------------------------\n",
      " 18 train (0.058079748414456844, 0.9916666746139526)\n",
      " 18 test 20:22:23 loss:1.431889, acc:0.590000,score:0.605317\n",
      "-----------------------------------------\n",
      " 19 train (0.0468064791833361, 0.9916666696468989)\n",
      " 19 test 20:22:38 loss:1.486491, acc:0.590000,score:0.610120\n",
      "-----------------------------------------\n",
      " 20 train (0.03832715765262643, 0.9983333349227905)\n",
      " 20 test 20:22:52 loss:1.410156, acc:0.615000,score:0.630823\n",
      "-----------------------------------------\n",
      " 21 train (0.04206526714066664, 0.9949999998013178)\n",
      "testing...best_score\n",
      " 21 test 20:23:08 loss:1.438808, acc:0.625000,score:0.648510\n",
      "-----------------------------------------\n",
      " 22 train (0.03736482855553428, 0.9950000047683716)\n",
      "testing...best_score\n",
      " 22 test 20:23:22 loss:1.295893, acc:0.635000,score:0.654662\n",
      "-----------------------------------------\n",
      " 23 train (0.0326650341351827, 0.9966666648785273)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 23 test 20:23:38 loss:1.310612, acc:0.615000,score:0.629286\n",
      "-----------------------------------------\n",
      " 24 train (0.03131490697463354, 0.996666669845581)\n",
      " 24 test 20:23:52 loss:1.414569, acc:0.615000,score:0.611111\n",
      "-----------------------------------------\n",
      " 25 train (0.03056277831395467, 0.9950000047683716)\n",
      " 25 test 20:24:07 loss:1.436579, acc:0.615000,score:0.614929\n",
      "-----------------------------------------\n",
      " 26 train (0.034154750096301235, 0.9933333396911621)\n",
      " 26 test 20:24:23 loss:1.458475, acc:0.620000,score:0.636003\n",
      "-----------------------------------------\n",
      " 27 train (0.030184600812693436, 0.9933333396911621)\n",
      " 27 test 20:24:37 loss:1.425194, acc:0.615000,score:0.643184\n",
      "-----------------------------------------\n",
      " 28 train (0.02503474134330948, 0.996666669845581)\n",
      " 28 test 20:24:52 loss:1.414527, acc:0.620000,score:0.638435\n",
      "-----------------------------------------\n",
      " 29 train (0.030205679514134925, 0.9916666746139526)\n",
      " 29 test 20:25:06 loss:1.477920, acc:0.615000,score:0.622774\n",
      "-----------------------------------------\n",
      "best: 22 1.2958933734893798 0.635 0.6546621998883305\n",
      "(1.333871192932129, 0.595, 0.6222448979591837)\n",
      "##############\n",
      "0.001 100 20\n",
      " 1 train (2.0725274125734967, 0.49000000158945717)\n",
      "testing...best_score\n",
      " 1 test 20:25:43 loss:1.789578, acc:0.500000,score:0.429097\n",
      "-----------------------------------------\n",
      " 2 train (1.4650858203570047, 0.6016666710376739)\n",
      "testing...best_score\n",
      " 2 test 20:26:01 loss:1.412347, acc:0.585000,score:0.541238\n",
      "-----------------------------------------\n",
      " 3 train (0.9348303496837616, 0.7866666654745738)\n",
      "testing...best_score\n",
      " 3 test 20:26:20 loss:1.260490, acc:0.590000,score:0.611139\n",
      "-----------------------------------------\n",
      " 4 train (0.5633505940437317, 0.8799999972184499)\n",
      "testing...best_score\n",
      " 4 test 20:26:38 loss:1.350581, acc:0.605000,score:0.625519\n",
      "-----------------------------------------\n",
      " 5 train (0.3121741379300753, 0.951666659116745)\n",
      " 5 test 20:26:56 loss:1.202418, acc:0.585000,score:0.607843\n",
      "-----------------------------------------\n",
      " 6 train (0.17306924412647884, 0.9866666634877522)\n",
      " 6 test 20:27:14 loss:1.427970, acc:0.595000,score:0.605981\n",
      "-----------------------------------------\n",
      " 7 train (0.15669302095969517, 0.9749999940395355)\n",
      " 7 test 20:27:33 loss:1.437540, acc:0.590000,score:0.619062\n",
      "-----------------------------------------\n",
      " 8 train (0.11984815349181493, 0.9816666622956594)\n",
      " 8 test 20:27:52 loss:1.274489, acc:0.580000,score:0.589296\n",
      "-----------------------------------------\n",
      " 9 train (0.079849615568916, 0.9933333317438762)\n",
      " 9 test 20:28:10 loss:1.473499, acc:0.575000,score:0.609545\n",
      "-----------------------------------------\n",
      " 10 train (0.06316556669771671, 0.9949999988079071)\n",
      " 10 test 20:28:29 loss:1.405699, acc:0.610000,score:0.604040\n",
      "-----------------------------------------\n",
      " 11 train (0.0677478164434433, 0.9866666634877522)\n",
      " 11 test 20:28:47 loss:1.494248, acc:0.565000,score:0.599040\n",
      "-----------------------------------------\n",
      " 12 train (0.06367234388987224, 0.9899999976158143)\n",
      " 12 test 20:29:06 loss:1.275943, acc:0.590000,score:0.606952\n",
      "-----------------------------------------\n",
      " 13 train (0.051223070236543815, 0.9933333317438762)\n",
      "testing...best_score\n",
      " 13 test 20:29:25 loss:1.508085, acc:0.615000,score:0.641827\n",
      "-----------------------------------------\n",
      " 14 train (0.03911923406024774, 0.9966666658719381)\n",
      " 14 test 20:29:45 loss:1.520774, acc:0.560000,score:0.587369\n",
      "-----------------------------------------\n",
      " 15 train (0.03313968696941932, 0.998333332935969)\n",
      " 15 test 20:30:05 loss:1.487609, acc:0.550000,score:0.588772\n",
      "-----------------------------------------\n",
      " 16 train (0.033836879394948484, 0.9949999988079071)\n",
      " 16 test 20:30:25 loss:1.545487, acc:0.575000,score:0.595769\n",
      "-----------------------------------------\n",
      " 17 train (0.033361549178759255, 0.9949999988079071)\n",
      " 17 test 20:30:45 loss:1.482842, acc:0.590000,score:0.608294\n",
      "-----------------------------------------\n",
      " 18 train (0.03006596298267444, 0.9949999988079071)\n",
      " 18 test 20:31:04 loss:1.562373, acc:0.575000,score:0.592300\n",
      "-----------------------------------------\n",
      " 19 train (0.03441413963834445, 0.9916666646798452)\n",
      " 19 test 20:31:24 loss:1.563434, acc:0.570000,score:0.580964\n",
      "-----------------------------------------\n",
      " 20 train (0.026932845326761403, 0.9949999988079071)\n",
      " 20 test 20:31:42 loss:1.571067, acc:0.585000,score:0.612380\n",
      "-----------------------------------------\n",
      " 21 train (0.02521871943026781, 0.9949999988079071)\n",
      " 21 test 20:32:01 loss:1.563584, acc:0.585000,score:0.611485\n",
      "-----------------------------------------\n",
      " 22 train (0.030991164874285458, 0.9966666658719381)\n",
      " 22 test 20:32:20 loss:1.586617, acc:0.615000,score:0.633030\n",
      "-----------------------------------------\n",
      " 23 train (0.02707940001661579, 0.9949999988079071)\n",
      " 23 test 20:32:38 loss:1.500791, acc:0.575000,score:0.599713\n",
      "-----------------------------------------\n",
      " 24 train (0.025430615867177646, 0.9949999988079071)\n",
      " 24 test 20:32:58 loss:1.515421, acc:0.590000,score:0.605861\n",
      "-----------------------------------------\n",
      " 25 train (0.022256226651370524, 0.9949999988079071)\n",
      " 25 test 20:33:17 loss:1.521848, acc:0.575000,score:0.593210\n",
      "-----------------------------------------\n",
      " 26 train (0.021140839842458566, 0.9949999988079071)\n",
      " 26 test 20:33:36 loss:1.725189, acc:0.590000,score:0.611752\n",
      "-----------------------------------------\n",
      " 27 train (0.020245689867685237, 0.9966666658719381)\n",
      " 27 test 20:33:54 loss:1.552069, acc:0.595000,score:0.611844\n",
      "-----------------------------------------\n",
      " 28 train (0.026681730772058168, 0.9949999988079071)\n",
      " 28 test 20:34:13 loss:1.654281, acc:0.580000,score:0.622062\n",
      "-----------------------------------------\n",
      " 29 train (0.026898431529601415, 0.9966666658719381)\n",
      " 29 test 20:34:32 loss:1.552999, acc:0.575000,score:0.586740\n",
      "-----------------------------------------\n",
      "best: 13 1.5080850148200988 0.615 0.6418269230769231\n",
      "(1.3896463799476624, 0.625, 0.6445865692096848)\n",
      "##############\n",
      "0.001 300 100\n",
      " 1 train (2.6420522928237915, 0.4683333287636439)\n",
      " 1 test 20:35:32 loss:2.508206, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.2950666348139444, 0.5216666658719381)\n",
      "testing...best_score\n",
      " 2 test 20:36:18 loss:2.175592, acc:0.550000,score:0.573013\n",
      "-----------------------------------------\n",
      " 3 train (1.9557171662648518, 0.6400000055631002)\n",
      "testing...best_score\n",
      " 3 test 20:37:02 loss:1.925834, acc:0.585000,score:0.625000\n",
      "-----------------------------------------\n",
      " 4 train (1.6060328880945842, 0.7449999948342642)\n",
      " 4 test 20:37:50 loss:1.709368, acc:0.600000,score:0.581081\n",
      "-----------------------------------------\n",
      " 5 train (1.299531360467275, 0.7949999868869781)\n",
      " 5 test 20:38:38 loss:1.633303, acc:0.605000,score:0.588560\n",
      "-----------------------------------------\n",
      " 6 train (1.0189224183559418, 0.8599999944368998)\n",
      "testing...best_score\n",
      " 6 test 20:39:24 loss:1.541888, acc:0.620000,score:0.655470\n",
      "-----------------------------------------\n",
      " 7 train (0.8444644709428152, 0.8783333400885264)\n",
      " 7 test 20:40:11 loss:1.407273, acc:0.615000,score:0.631908\n",
      "-----------------------------------------\n",
      " 8 train (0.6404930849870046, 0.9350000023841858)\n",
      " 8 test 20:40:57 loss:1.326618, acc:0.620000,score:0.630105\n",
      "-----------------------------------------\n",
      " 9 train (0.518383209904035, 0.9516666730244955)\n",
      " 9 test 20:41:43 loss:1.494959, acc:0.595000,score:0.589234\n",
      "-----------------------------------------\n",
      " 10 train (0.4053393950064977, 0.9633333384990692)\n",
      " 10 test 20:42:30 loss:1.404024, acc:0.580000,score:0.617462\n",
      "-----------------------------------------\n",
      " 11 train (0.32034212350845337, 0.9783333440621694)\n",
      " 11 test 20:43:17 loss:1.582792, acc:0.620000,score:0.622896\n",
      "-----------------------------------------\n",
      " 12 train (0.28063595791657764, 0.975000003973643)\n",
      " 12 test 20:44:04 loss:1.290224, acc:0.605000,score:0.638462\n",
      "-----------------------------------------\n",
      " 13 train (0.21864456683397293, 0.9933333396911621)\n",
      "testing...best_score\n",
      " 13 test 20:44:51 loss:1.512066, acc:0.640000,score:0.657661\n",
      "-----------------------------------------\n",
      " 14 train (0.20416788756847382, 0.9833333392937978)\n",
      " 14 test 20:45:43 loss:1.784905, acc:0.585000,score:0.554103\n",
      "-----------------------------------------\n",
      " 15 train (0.1887543722987175, 0.9800000091393789)\n",
      " 15 test 20:46:35 loss:1.347747, acc:0.585000,score:0.606816\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 16 train (0.14827105899651846, 0.9850000043710073)\n",
      " 16 test 20:47:27 loss:1.245748, acc:0.605000,score:0.625585\n",
      "-----------------------------------------\n",
      " 17 train (0.1118389976521333, 0.9916666746139526)\n",
      " 17 test 20:48:20 loss:1.380686, acc:0.595000,score:0.587996\n",
      "-----------------------------------------\n",
      " 18 train (0.09279349322120349, 0.9916666746139526)\n",
      " 18 test 20:49:11 loss:1.528432, acc:0.590000,score:0.595641\n",
      "-----------------------------------------\n",
      " 19 train (0.07920158406098683, 0.9916666746139526)\n",
      " 19 test 20:49:58 loss:1.669957, acc:0.580000,score:0.590243\n",
      "-----------------------------------------\n",
      " 20 train (0.06909410779674847, 0.9916666746139526)\n",
      " 20 test 20:50:46 loss:1.776532, acc:0.605000,score:0.620306\n",
      "-----------------------------------------\n",
      " 21 train (0.07728259886304538, 0.9883333444595337)\n",
      " 21 test 20:51:37 loss:1.826502, acc:0.585000,score:0.606405\n",
      "-----------------------------------------\n",
      " 22 train (0.052596223230163254, 0.9950000047683716)\n",
      " 22 test 20:52:25 loss:1.517145, acc:0.590000,score:0.605495\n",
      "-----------------------------------------\n",
      " 23 train (0.04953337088227272, 0.9950000047683716)\n",
      " 23 test 20:53:14 loss:1.408740, acc:0.590000,score:0.597312\n",
      "-----------------------------------------\n",
      " 24 train (0.05153460924824079, 0.9950000047683716)\n",
      " 24 test 20:54:09 loss:1.651879, acc:0.580000,score:0.592136\n",
      "-----------------------------------------\n",
      " 25 train (0.047479730720321335, 0.9900000095367432)\n",
      " 25 test 20:55:01 loss:1.591029, acc:0.595000,score:0.590294\n",
      "-----------------------------------------\n",
      " 26 train (0.0526940148944656, 0.9900000095367432)\n",
      " 26 test 20:55:57 loss:1.462722, acc:0.595000,score:0.605608\n",
      "-----------------------------------------\n",
      " 27 train (0.045937858521938324, 0.9933333396911621)\n",
      " 27 test 20:56:47 loss:1.517469, acc:0.565000,score:0.593137\n",
      "-----------------------------------------\n",
      " 28 train (0.045934130748113, 0.9916666746139526)\n",
      " 28 test 20:57:40 loss:1.397393, acc:0.595000,score:0.605910\n",
      "-----------------------------------------\n",
      " 29 train (0.04905284754931927, 0.9883333444595337)\n",
      " 29 test 20:58:29 loss:1.514422, acc:0.585000,score:0.591787\n",
      "-----------------------------------------\n",
      "best: 13 1.5120664739608765 0.64 0.6576608602915037\n",
      "(1.621918067932129, 0.585, 0.6062826062826063)\n",
      "##############\n",
      "0.001 300 50\n",
      " 1 train (2.511105398337046, 0.49166666716337204)\n",
      "testing...best_score\n",
      " 1 test 20:59:54 loss:2.218882, acc:0.480000,score:0.392172\n",
      "-----------------------------------------\n",
      " 2 train (1.905080149571101, 0.5899999986092249)\n",
      "testing...best_score\n",
      " 2 test 21:00:54 loss:1.750596, acc:0.580000,score:0.616087\n",
      "-----------------------------------------\n",
      " 3 train (1.3726878960927327, 0.7516666601101557)\n",
      "testing...best_score\n",
      " 3 test 21:01:54 loss:1.485089, acc:0.610000,score:0.631164\n",
      "-----------------------------------------\n",
      " 4 train (0.9630952527125677, 0.824999988079071)\n",
      " 4 test 21:02:56 loss:1.515154, acc:0.600000,score:0.612568\n",
      "-----------------------------------------\n",
      " 5 train (0.6923569043477377, 0.8683333297570547)\n",
      " 5 test 21:04:02 loss:1.271796, acc:0.595000,score:0.628866\n",
      "-----------------------------------------\n",
      " 6 train (0.4789028863112132, 0.9183333317438761)\n",
      " 6 test 21:05:04 loss:1.377145, acc:0.570000,score:0.595622\n",
      "-----------------------------------------\n",
      " 7 train (0.3076356103022893, 0.9683333237965902)\n",
      " 7 test 21:06:03 loss:1.601144, acc:0.570000,score:0.579308\n",
      "-----------------------------------------\n",
      " 8 train (0.26894692331552505, 0.9583333283662796)\n",
      " 8 test 21:07:06 loss:1.366422, acc:0.565000,score:0.608018\n",
      "-----------------------------------------\n",
      " 9 train (0.2257501818239689, 0.9583333283662796)\n",
      " 9 test 21:08:08 loss:1.118215, acc:0.585000,score:0.622915\n",
      "-----------------------------------------\n",
      " 10 train (0.1595922907193502, 0.9850000043710073)\n",
      "testing...best_score\n",
      " 10 test 21:09:11 loss:1.256702, acc:0.630000,score:0.637887\n",
      "-----------------------------------------\n",
      " 11 train (0.12515944056212902, 0.9883333394924799)\n",
      " 11 test 21:10:09 loss:1.560845, acc:0.600000,score:0.630733\n",
      "-----------------------------------------\n",
      " 12 train (0.1284304844836394, 0.9816666742165884)\n",
      " 12 test 21:11:06 loss:1.425258, acc:0.560000,score:0.581434\n",
      "-----------------------------------------\n",
      " 13 train (0.10286909217635791, 0.9783333341280619)\n",
      " 13 test 21:12:04 loss:1.297957, acc:0.595000,score:0.606883\n",
      "-----------------------------------------\n",
      " 14 train (0.07852961712827285, 0.9883333394924799)\n",
      " 14 test 21:13:03 loss:1.304877, acc:0.595000,score:0.625078\n",
      "-----------------------------------------\n",
      " 15 train (0.06862506022055943, 0.9900000045696894)\n",
      " 15 test 21:14:04 loss:1.297808, acc:0.595000,score:0.595122\n",
      "-----------------------------------------\n",
      " 16 train (0.06065998940418164, 0.9950000047683716)\n",
      " 16 test 21:15:03 loss:1.274833, acc:0.590000,score:0.605114\n",
      "-----------------------------------------\n",
      " 17 train (0.04322283466657003, 0.9950000047683716)\n",
      " 17 test 21:16:02 loss:1.387410, acc:0.605000,score:0.624901\n",
      "-----------------------------------------\n",
      " 18 train (0.035570369102060795, 0.996666669845581)\n",
      " 18 test 21:17:05 loss:1.453390, acc:0.625000,score:0.630588\n",
      "-----------------------------------------\n",
      " 19 train (0.043825064630558096, 0.9950000047683716)\n",
      " 19 test 21:18:03 loss:1.305753, acc:0.585000,score:0.610566\n",
      "-----------------------------------------\n",
      " 20 train (0.0499661210924387, 0.9916666646798452)\n",
      " 20 test 21:19:01 loss:1.388595, acc:0.585000,score:0.611995\n",
      "-----------------------------------------\n",
      " 21 train (0.036245687088618674, 0.9983333349227905)\n",
      " 21 test 21:19:58 loss:1.370549, acc:0.575000,score:0.584663\n",
      "-----------------------------------------\n",
      " 22 train (0.037000253330916166, 0.996666669845581)\n",
      " 22 test 21:20:59 loss:1.450276, acc:0.585000,score:0.599282\n",
      "-----------------------------------------\n",
      " 23 train (0.03269268417110046, 0.9933333347241083)\n",
      " 23 test 21:21:57 loss:1.633948, acc:0.600000,score:0.614593\n",
      "-----------------------------------------\n",
      " 24 train (0.044983836356550455, 0.9933333396911621)\n",
      " 24 test 21:22:54 loss:1.476650, acc:0.605000,score:0.609000\n",
      "-----------------------------------------\n",
      " 25 train (0.05005720630288124, 0.9900000045696894)\n",
      " 25 test 21:23:55 loss:1.239830, acc:0.610000,score:0.632397\n",
      "-----------------------------------------\n",
      " 26 train (0.035111592461665474, 0.996666669845581)\n",
      " 26 test 21:24:53 loss:1.393623, acc:0.600000,score:0.621449\n",
      "-----------------------------------------\n",
      " 27 train (0.030185069423168898, 0.9950000047683716)\n",
      " 27 test 21:25:49 loss:1.543294, acc:0.600000,score:0.625359\n",
      "-----------------------------------------\n",
      " 28 train (0.029752267990261316, 0.9950000047683716)\n",
      " 28 test 21:26:47 loss:1.633821, acc:0.585000,score:0.602527\n",
      "-----------------------------------------\n",
      " 29 train (0.027779059795041878, 0.9950000047683716)\n",
      " 29 test 21:27:44 loss:1.518274, acc:0.585000,score:0.616567\n",
      "-----------------------------------------\n",
      "best: 10 1.256702311038971 0.63 0.6378871937124364\n",
      "(1.2408080053329469, 0.59, 0.6275257108287962)\n",
      "##############\n",
      "0.001 300 20\n",
      " 1 train (2.189209274450938, 0.4949999988079071)\n",
      "testing...best_score\n",
      " 1 test 21:29:14 loss:1.691081, acc:0.550000,score:0.600387\n",
      "-----------------------------------------\n",
      " 2 train (1.3106483896573384, 0.6783333351214726)\n",
      " 2 test 21:30:09 loss:1.327051, acc:0.520000,score:0.438007\n",
      "-----------------------------------------\n",
      " 3 train (0.8026360392570495, 0.7749999980131785)\n",
      " 3 test 21:31:05 loss:1.100771, acc:0.585000,score:0.577530\n",
      "-----------------------------------------\n",
      " 4 train (0.47105976144472755, 0.8716666658719381)\n",
      "testing...best_score\n",
      " 4 test 21:32:00 loss:0.992729, acc:0.580000,score:0.612387\n",
      "-----------------------------------------\n",
      " 5 train (0.310048379500707, 0.9283333261807759)\n",
      " 5 test 21:32:54 loss:1.046516, acc:0.585000,score:0.559966\n",
      "-----------------------------------------\n",
      " 6 train (0.22678410063187282, 0.9466666599114736)\n",
      "testing...best_score\n",
      " 6 test 21:33:51 loss:1.122046, acc:0.600000,score:0.617909\n",
      "-----------------------------------------\n",
      " 7 train (0.1452941946685314, 0.9816666622956594)\n",
      " 7 test 21:34:50 loss:1.327371, acc:0.590000,score:0.579609\n",
      "-----------------------------------------\n",
      " 8 train (0.10965449959039689, 0.9833333293596903)\n",
      "testing...best_score\n",
      " 8 test 21:35:50 loss:1.242648, acc:0.565000,score:0.622147\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 9 train (0.09517499580979347, 0.9883333305517833)\n",
      " 9 test 21:36:50 loss:1.194608, acc:0.600000,score:0.575507\n",
      "-----------------------------------------\n",
      " 10 train (0.06296211952964464, 0.9899999976158143)\n",
      " 10 test 21:37:51 loss:1.343481, acc:0.610000,score:0.611111\n",
      "-----------------------------------------\n",
      " 11 train (0.08222623330851396, 0.9833333293596903)\n",
      " 11 test 21:38:47 loss:1.237517, acc:0.565000,score:0.612150\n",
      "-----------------------------------------\n",
      " 12 train (0.09110502041876316, 0.9799999952316284)\n",
      " 12 test 21:39:43 loss:1.423451, acc:0.595000,score:0.619733\n",
      "-----------------------------------------\n",
      " 13 train (0.097837433218956, 0.9766666611035665)\n",
      " 13 test 21:40:40 loss:1.428979, acc:0.570000,score:0.558965\n",
      "-----------------------------------------\n",
      " 14 train (0.07379399028917154, 0.9883333305517833)\n",
      "testing...best_score\n",
      " 14 test 21:41:39 loss:1.108711, acc:0.630000,score:0.638020\n",
      "-----------------------------------------\n",
      " 15 train (0.053128722868859765, 0.9916666646798452)\n",
      " 15 test 21:42:33 loss:1.275739, acc:0.610000,score:0.637618\n",
      "-----------------------------------------\n",
      " 16 train (0.040411147040625414, 0.9933333317438762)\n",
      " 16 test 21:43:27 loss:1.358317, acc:0.620000,score:0.619417\n",
      "-----------------------------------------\n",
      " 17 train (0.03688272746900718, 0.9949999988079071)\n",
      " 17 test 21:44:23 loss:1.359189, acc:0.605000,score:0.626734\n",
      "-----------------------------------------\n",
      " 18 train (0.038123117511471113, 0.9933333317438762)\n",
      " 18 test 21:45:18 loss:1.415052, acc:0.575000,score:0.593978\n",
      "-----------------------------------------\n",
      " 19 train (0.04464002003272374, 0.9883333305517833)\n",
      "testing...best_score\n",
      " 19 test 21:46:15 loss:1.527730, acc:0.605000,score:0.643938\n",
      "-----------------------------------------\n",
      " 20 train (0.040755495180686316, 0.9949999988079071)\n",
      " 20 test 21:47:12 loss:1.382464, acc:0.590000,score:0.622105\n",
      "-----------------------------------------\n",
      " 21 train (0.034227758459746835, 0.9933333317438762)\n",
      " 21 test 21:48:10 loss:1.325858, acc:0.590000,score:0.631335\n",
      "-----------------------------------------\n",
      " 22 train (0.02719307681545615, 0.9949999988079071)\n",
      " 22 test 21:49:08 loss:1.413181, acc:0.600000,score:0.635867\n",
      "-----------------------------------------\n",
      " 23 train (0.02349190249418219, 0.9949999988079071)\n",
      " 23 test 21:50:04 loss:1.564251, acc:0.595000,score:0.622438\n",
      "-----------------------------------------\n",
      " 24 train (0.028891009216507276, 0.9933333317438762)\n",
      " 24 test 21:50:59 loss:1.755558, acc:0.600000,score:0.631509\n",
      "-----------------------------------------\n",
      " 25 train (0.029409168070803085, 0.9933333317438762)\n",
      " 25 test 21:51:56 loss:1.602104, acc:0.605000,score:0.628518\n",
      "-----------------------------------------\n",
      " 26 train (0.03780516224602858, 0.9933333317438762)\n",
      " 26 test 21:52:53 loss:1.811975, acc:0.605000,score:0.631012\n",
      "-----------------------------------------\n",
      " 27 train (0.0916193624958396, 0.9749999980131785)\n",
      " 27 test 21:53:52 loss:1.788913, acc:0.590000,score:0.581415\n",
      "-----------------------------------------\n",
      " 28 train (0.06987547141810259, 0.9883333305517833)\n",
      " 28 test 21:54:55 loss:1.484856, acc:0.570000,score:0.605957\n",
      "-----------------------------------------\n",
      " 29 train (0.05891380806763967, 0.9916666646798452)\n",
      " 29 test 21:55:52 loss:1.717071, acc:0.580000,score:0.578095\n",
      "-----------------------------------------\n",
      "best: 19 1.5277295327186584 0.605 0.6439379787035306\n",
      "(1.3738480186462403, 0.595, 0.6269627279936558)\n",
      "##############\n",
      "0.0005 100 100\n",
      " 1 train (2.3644384145736694, 0.451666663090388)\n",
      " 1 test 21:56:45 loss:2.310342, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.2477699518203735, 0.4966666599114736)\n",
      " 2 test 21:57:05 loss:2.247212, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (2.156667947769165, 0.49666666487852734)\n",
      " 3 test 21:57:24 loss:2.164021, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 4 train (2.061777949333191, 0.49666666487852734)\n",
      " 4 test 21:57:43 loss:2.068779, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 5 train (1.970408817132314, 0.5116666605075201)\n",
      " 5 test 21:58:02 loss:1.988948, acc:0.470000,score:nan\n",
      "-----------------------------------------\n",
      " 6 train (1.8652058442433674, 0.5566666622956594)\n",
      "testing...best_score\n",
      " 6 test 21:58:23 loss:1.920466, acc:0.485000,score:0.343174\n",
      "-----------------------------------------\n",
      " 7 train (1.7438044548034668, 0.5966666539510092)\n",
      "testing...best_score\n",
      " 7 test 21:58:42 loss:1.840596, acc:0.540000,score:0.444189\n",
      "-----------------------------------------\n",
      " 8 train (1.6114811301231384, 0.7433333396911621)\n",
      "testing...best_score\n",
      " 8 test 21:59:01 loss:1.762866, acc:0.575000,score:0.576482\n",
      "-----------------------------------------\n",
      " 9 train (1.4496139486630757, 0.7983333269755045)\n",
      "testing...best_score\n",
      " 9 test 21:59:20 loss:1.740864, acc:0.620000,score:0.649025\n",
      "-----------------------------------------\n",
      " 10 train (1.3260835607846577, 0.8166666626930237)\n",
      "testing...best_score\n",
      " 10 test 21:59:39 loss:1.655022, acc:0.620000,score:0.650000\n",
      "-----------------------------------------\n",
      " 11 train (1.1779856483141582, 0.8616666694482168)\n",
      " 11 test 21:59:58 loss:1.594400, acc:0.605000,score:0.642215\n",
      "-----------------------------------------\n",
      " 12 train (1.0591310063997905, 0.8833333353201548)\n",
      " 12 test 22:00:17 loss:1.576241, acc:0.600000,score:0.607491\n",
      "-----------------------------------------\n",
      " 13 train (0.9441368182500204, 0.8949999908606211)\n",
      " 13 test 22:00:37 loss:1.558836, acc:0.585000,score:0.607314\n",
      "-----------------------------------------\n",
      " 14 train (0.8413919607798258, 0.92999999721845)\n",
      " 14 test 22:00:56 loss:1.576546, acc:0.600000,score:0.590093\n",
      "-----------------------------------------\n",
      " 15 train (0.7554027636845907, 0.948333332935969)\n",
      " 15 test 22:01:15 loss:1.652199, acc:0.590000,score:0.599246\n",
      "-----------------------------------------\n",
      " 16 train (0.6730620960394541, 0.9666666785875956)\n",
      " 16 test 22:01:34 loss:1.598693, acc:0.625000,score:0.639725\n",
      "-----------------------------------------\n",
      " 17 train (0.6181342601776123, 0.9783333440621694)\n",
      " 17 test 22:01:53 loss:1.608559, acc:0.610000,score:0.623736\n",
      "-----------------------------------------\n",
      " 18 train (0.5763602058092753, 0.9800000091393789)\n",
      " 18 test 22:02:12 loss:1.865855, acc:0.595000,score:0.579385\n",
      "-----------------------------------------\n",
      " 19 train (0.5444664160410563, 0.973333348830541)\n",
      " 19 test 22:02:32 loss:1.639831, acc:0.605000,score:0.613280\n",
      "-----------------------------------------\n",
      " 20 train (0.4971866508324941, 0.9833333492279053)\n",
      " 20 test 22:02:51 loss:1.602353, acc:0.610000,score:0.626301\n",
      "-----------------------------------------\n",
      " 21 train (0.46095674733320874, 0.9850000043710073)\n",
      " 21 test 22:03:10 loss:1.662548, acc:0.590000,score:0.583520\n",
      "-----------------------------------------\n",
      " 22 train (0.4334359069665273, 0.9866666793823242)\n",
      " 22 test 22:03:29 loss:1.711154, acc:0.625000,score:0.610550\n",
      "-----------------------------------------\n",
      " 23 train (0.42214536666870117, 0.9866666793823242)\n",
      " 23 test 22:03:48 loss:1.538399, acc:0.625000,score:0.637311\n",
      "-----------------------------------------\n",
      " 24 train (0.3837201048930486, 0.9866666793823242)\n",
      " 24 test 22:04:07 loss:1.604666, acc:0.610000,score:0.624987\n",
      "-----------------------------------------\n",
      " 25 train (0.3628394504388173, 0.9850000043710073)\n",
      " 25 test 22:04:27 loss:1.551092, acc:0.610000,score:0.627626\n",
      "-----------------------------------------\n",
      " 26 train (0.3243215282758077, 0.9916666746139526)\n",
      " 26 test 22:04:46 loss:1.535252, acc:0.620000,score:0.635749\n",
      "-----------------------------------------\n",
      " 27 train (0.30786368250846863, 0.9883333444595337)\n",
      " 27 test 22:05:05 loss:1.536720, acc:0.590000,score:0.613911\n",
      "-----------------------------------------\n",
      " 28 train (0.28555194040139514, 0.9933333396911621)\n",
      " 28 test 22:05:24 loss:1.608936, acc:0.600000,score:0.625496\n",
      "-----------------------------------------\n",
      " 29 train (0.26945842305819195, 0.9933333396911621)\n",
      " 29 test 22:05:43 loss:1.551172, acc:0.585000,score:0.603087\n",
      "-----------------------------------------\n",
      "best: 10 1.655022177696228 0.62 0.6499999999999999\n",
      "(1.6575434875488282, 0.605, 0.628052805280528)\n",
      "##############\n",
      "0.0005 100 50\n",
      " 1 train (2.3213422497113547, 0.46000000337759656)\n",
      " 1 test 22:06:25 loss:2.237329, acc:0.460000,score:nan\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 2 train (2.1225929856300354, 0.49666666736205417)\n",
      " 2 test 22:06:42 loss:2.081225, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (1.9560473561286926, 0.4999999975164731)\n",
      "testing...best_score\n",
      " 3 test 22:07:00 loss:1.939843, acc:0.465000,score:0.335382\n",
      "-----------------------------------------\n",
      " 4 train (1.7751931150754292, 0.5683333327372869)\n",
      "testing...best_score\n",
      " 4 test 22:07:18 loss:1.801934, acc:0.510000,score:0.502380\n",
      "-----------------------------------------\n",
      " 5 train (1.5496783256530762, 0.6733333369096121)\n",
      "testing...best_score\n",
      " 5 test 22:07:35 loss:1.707791, acc:0.590000,score:0.635576\n",
      "-----------------------------------------\n",
      " 6 train (1.348873883485794, 0.7366666595141093)\n",
      "testing...best_score\n",
      " 6 test 22:07:53 loss:1.598702, acc:0.605000,score:0.643854\n",
      "-----------------------------------------\n",
      " 7 train (1.1434964487950008, 0.8266666531562805)\n",
      " 7 test 22:08:10 loss:1.515098, acc:0.615000,score:0.642935\n",
      "-----------------------------------------\n",
      " 8 train (0.9601550201574961, 0.8666666646798452)\n",
      " 8 test 22:08:28 loss:1.521029, acc:0.590000,score:0.586370\n",
      "-----------------------------------------\n",
      " 9 train (0.8037365823984146, 0.8966666609048843)\n",
      " 9 test 22:08:45 loss:1.563651, acc:0.575000,score:0.599515\n",
      "-----------------------------------------\n",
      " 10 train (0.6829733153184255, 0.9266666620969772)\n",
      " 10 test 22:09:02 loss:1.566181, acc:0.575000,score:0.565657\n",
      "-----------------------------------------\n",
      " 11 train (0.585219810406367, 0.9516666680574417)\n",
      " 11 test 22:09:20 loss:1.662773, acc:0.590000,score:0.607412\n",
      "-----------------------------------------\n",
      " 12 train (0.49567827334006626, 0.9699999988079071)\n",
      " 12 test 22:09:37 loss:1.488333, acc:0.590000,score:0.632827\n",
      "-----------------------------------------\n",
      " 13 train (0.4386724929014842, 0.9716666688521703)\n",
      " 13 test 22:09:55 loss:1.570296, acc:0.590000,score:0.596880\n",
      "-----------------------------------------\n",
      " 14 train (0.3935222377379735, 0.9733333339293798)\n",
      " 14 test 22:10:12 loss:1.639232, acc:0.620000,score:0.640492\n",
      "-----------------------------------------\n",
      " 15 train (0.3354544962445895, 0.9850000043710073)\n",
      " 15 test 22:10:29 loss:1.497240, acc:0.590000,score:0.613262\n",
      "-----------------------------------------\n",
      " 16 train (0.3113505889972051, 0.9883333345254263)\n",
      " 16 test 22:10:47 loss:1.602714, acc:0.595000,score:0.619965\n",
      "-----------------------------------------\n",
      " 17 train (0.2510688540836175, 0.9933333396911621)\n",
      " 17 test 22:11:04 loss:1.407662, acc:0.615000,score:0.622572\n",
      "-----------------------------------------\n",
      " 18 train (0.23621201142668724, 0.9900000045696894)\n",
      " 18 test 22:11:22 loss:1.442156, acc:0.595000,score:0.633540\n",
      "-----------------------------------------\n",
      " 19 train (0.20979309578736624, 0.9900000045696894)\n",
      " 19 test 22:11:39 loss:1.590220, acc:0.585000,score:0.574430\n",
      "-----------------------------------------\n",
      " 20 train (0.2061242274940014, 0.9850000043710073)\n",
      " 20 test 22:11:56 loss:1.416090, acc:0.580000,score:0.621817\n",
      "-----------------------------------------\n",
      " 21 train (0.18571661288539568, 0.9900000095367432)\n",
      " 21 test 22:12:14 loss:1.520671, acc:0.595000,score:0.613882\n",
      "-----------------------------------------\n",
      " 22 train (0.15553597112496695, 0.9900000045696894)\n",
      " 22 test 22:12:31 loss:1.422148, acc:0.585000,score:0.611585\n",
      "-----------------------------------------\n",
      " 23 train (0.1423258427530527, 0.9883333345254263)\n",
      " 23 test 22:12:49 loss:1.463487, acc:0.580000,score:0.595413\n",
      "-----------------------------------------\n",
      " 24 train (0.12225178939600785, 0.996666669845581)\n",
      " 24 test 22:13:06 loss:1.513606, acc:0.590000,score:0.618572\n",
      "-----------------------------------------\n",
      " 25 train (0.11135489990313847, 0.9950000047683716)\n",
      " 25 test 22:13:24 loss:1.418275, acc:0.585000,score:0.606450\n",
      "-----------------------------------------\n",
      " 26 train (0.09938137792050838, 0.996666669845581)\n",
      " 26 test 22:13:41 loss:1.439728, acc:0.580000,score:0.610306\n",
      "-----------------------------------------\n",
      " 27 train (0.09353119693696499, 0.996666669845581)\n",
      " 27 test 22:13:59 loss:1.489448, acc:0.590000,score:0.614576\n",
      "-----------------------------------------\n",
      "early stop\n",
      "best: 6 1.5987023544311523 0.605 0.6438538205980067\n",
      "(1.5543445110321046, 0.615, 0.6261843238587426)\n",
      "##############\n",
      "0.0005 100 20\n",
      " 1 train (2.2073801517486573, 0.49666666487852734)\n",
      " 1 test 22:14:44 loss:2.067711, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (1.8560517231623332, 0.525000003973643)\n",
      "testing...best_score\n",
      " 2 test 22:15:08 loss:1.780966, acc:0.470000,score:0.370518\n",
      "-----------------------------------------\n",
      " 3 train (1.470586359500885, 0.6616666634877523)\n",
      "testing...best_score\n",
      " 3 test 22:15:30 loss:1.569068, acc:0.575000,score:0.616153\n",
      "-----------------------------------------\n",
      " 4 train (1.12391370733579, 0.7850000003973643)\n",
      " 4 test 22:15:53 loss:1.510388, acc:0.590000,score:0.578744\n",
      "-----------------------------------------\n",
      " 5 train (0.836526381969452, 0.8516666650772095)\n",
      " 5 test 22:16:16 loss:1.396627, acc:0.610000,score:0.610053\n",
      "-----------------------------------------\n",
      " 6 train (0.6143482635418575, 0.8983333309491476)\n",
      " 6 test 22:16:38 loss:1.476549, acc:0.555000,score:0.542238\n",
      "-----------------------------------------\n",
      " 7 train (0.45906763672828677, 0.9433333257834117)\n",
      " 7 test 22:17:01 loss:1.508192, acc:0.590000,score:0.589035\n",
      "-----------------------------------------\n",
      " 8 train (0.3596272508303324, 0.9633333285649618)\n",
      "testing...best_score\n",
      " 8 test 22:17:23 loss:1.416738, acc:0.620000,score:0.635619\n",
      "-----------------------------------------\n",
      " 9 train (0.2869878500699997, 0.9666666607062022)\n",
      " 9 test 22:17:46 loss:1.377992, acc:0.600000,score:0.622481\n",
      "-----------------------------------------\n",
      " 10 train (0.20917712152004242, 0.9816666622956594)\n",
      " 10 test 22:18:09 loss:1.428410, acc:0.605000,score:0.616551\n",
      "-----------------------------------------\n",
      " 11 train (0.15788518885771433, 0.9916666646798452)\n",
      " 11 test 22:18:31 loss:1.549324, acc:0.605000,score:0.632582\n",
      "-----------------------------------------\n",
      " 12 train (0.1467595470448335, 0.9866666634877522)\n",
      " 12 test 22:18:54 loss:1.435747, acc:0.605000,score:0.593295\n",
      "-----------------------------------------\n",
      " 13 train (0.10930665110548338, 0.9933333317438762)\n",
      " 13 test 22:19:16 loss:1.386248, acc:0.615000,score:0.621149\n",
      "-----------------------------------------\n",
      " 14 train (0.09058811453481515, 0.9916666646798452)\n",
      " 14 test 22:19:39 loss:1.441163, acc:0.590000,score:0.614117\n",
      "-----------------------------------------\n",
      " 15 train (0.08850443934400877, 0.9933333317438762)\n",
      " 15 test 22:20:02 loss:1.418246, acc:0.590000,score:0.606914\n",
      "-----------------------------------------\n",
      " 16 train (0.07438716764251391, 0.9933333317438762)\n",
      " 16 test 22:20:24 loss:1.431994, acc:0.620000,score:0.635000\n",
      "-----------------------------------------\n",
      " 17 train (0.06393599150081476, 0.9933333317438762)\n",
      " 17 test 22:20:47 loss:1.512102, acc:0.595000,score:0.589205\n",
      "-----------------------------------------\n",
      " 18 train (0.057921882097919784, 0.9949999988079071)\n",
      " 18 test 22:21:09 loss:1.418928, acc:0.605000,score:0.600785\n",
      "-----------------------------------------\n",
      " 19 train (0.059914072851339976, 0.9916666646798452)\n",
      " 19 test 22:21:32 loss:1.364952, acc:0.585000,score:0.610364\n",
      "-----------------------------------------\n",
      " 20 train (0.05568266157060862, 0.9949999988079071)\n",
      " 20 test 22:21:54 loss:1.511363, acc:0.600000,score:0.610929\n",
      "-----------------------------------------\n",
      " 21 train (0.06525360997766257, 0.9899999976158143)\n",
      " 21 test 22:22:17 loss:1.355262, acc:0.580000,score:0.600842\n",
      "-----------------------------------------\n",
      " 22 train (0.05325869973748922, 0.9966666658719381)\n",
      " 22 test 22:22:40 loss:1.539732, acc:0.580000,score:0.579395\n",
      "-----------------------------------------\n",
      " 23 train (0.046346011199057104, 0.9933333317438762)\n",
      " 23 test 22:23:02 loss:1.423812, acc:0.610000,score:0.631064\n",
      "-----------------------------------------\n",
      " 24 train (0.05158751662820578, 0.9899999976158143)\n",
      " 24 test 22:23:25 loss:1.588584, acc:0.585000,score:0.603141\n",
      "-----------------------------------------\n",
      " 25 train (0.046131075173616407, 0.9916666646798452)\n",
      " 25 test 22:23:47 loss:1.557616, acc:0.580000,score:0.598485\n",
      "-----------------------------------------\n",
      " 26 train (0.03595140526692073, 0.9966666658719381)\n",
      " 26 test 22:24:10 loss:1.574277, acc:0.555000,score:0.584741\n",
      "-----------------------------------------\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 27 train (0.04184057613213857, 0.9899999976158143)\n",
      " 27 test 22:24:33 loss:1.693084, acc:0.590000,score:0.605128\n",
      "-----------------------------------------\n",
      " 28 train (0.039521762169897556, 0.9916666646798452)\n",
      "testing...best_score\n",
      " 28 test 22:24:55 loss:1.665564, acc:0.615000,score:0.649319\n",
      "-----------------------------------------\n",
      " 29 train (0.04247487634420395, 0.9933333317438762)\n",
      " 29 test 22:25:18 loss:1.661860, acc:0.590000,score:0.584099\n",
      "-----------------------------------------\n",
      "best: 28 1.665564045906067 0.615 0.6493192276673467\n",
      "(1.37210223197937, 0.615, 0.6584628005947804)\n",
      "##############\n",
      "0.0005 300 100\n",
      " 1 train (2.7013197342554727, 0.4699999988079071)\n",
      " 1 test 22:26:35 loss:2.627369, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.508941928545634, 0.49666666487852734)\n",
      " 2 test 22:27:29 loss:2.451636, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 3 train (2.32863716284434, 0.5083333303531011)\n",
      "testing...best_score\n",
      " 3 test 22:28:25 loss:2.298835, acc:0.470000,score:0.354977\n",
      "-----------------------------------------\n",
      " 4 train (2.1391119162241616, 0.5433333317438761)\n",
      "testing...best_score\n",
      " 4 test 22:29:21 loss:2.146158, acc:0.530000,score:0.513935\n",
      "-----------------------------------------\n",
      " 5 train (1.9347015023231506, 0.6883333325386047)\n",
      "testing...best_score\n",
      " 5 test 22:30:18 loss:2.063556, acc:0.610000,score:0.638211\n",
      "-----------------------------------------\n",
      " 6 train (1.7097076574961345, 0.7483333349227905)\n",
      " 6 test 22:31:15 loss:1.853531, acc:0.595000,score:0.628319\n",
      "-----------------------------------------\n",
      " 7 train (1.5316594044367473, 0.7883333365122477)\n",
      " 7 test 22:32:13 loss:1.774683, acc:0.620000,score:0.621515\n",
      "-----------------------------------------\n",
      " 8 train (1.3519575198491414, 0.8283333480358124)\n",
      " 8 test 22:33:11 loss:1.830989, acc:0.620000,score:0.603553\n",
      "-----------------------------------------\n",
      " 9 train (1.2196768522262573, 0.8600000043710073)\n",
      " 9 test 22:34:10 loss:1.674746, acc:0.595000,score:0.637924\n",
      "-----------------------------------------\n",
      " 10 train (1.0430193146069844, 0.8899999956289927)\n",
      " 10 test 22:35:09 loss:1.658503, acc:0.620000,score:0.634225\n",
      "-----------------------------------------\n",
      " 11 train (0.9199690421422323, 0.9116666714350382)\n",
      "testing...best_score\n",
      " 11 test 22:36:08 loss:1.619548, acc:0.625000,score:0.641407\n",
      "-----------------------------------------\n",
      " 12 train (0.8003734250863394, 0.9333333373069763)\n",
      "testing...best_score\n",
      " 12 test 22:37:07 loss:1.715515, acc:0.645000,score:0.649068\n",
      "-----------------------------------------\n",
      " 13 train (0.6992928385734558, 0.9549999932448069)\n",
      " 13 test 22:38:07 loss:1.774131, acc:0.620000,score:0.622106\n",
      "-----------------------------------------\n",
      " 14 train (0.6218000650405884, 0.9700000087420145)\n",
      " 14 test 22:39:07 loss:1.749708, acc:0.610000,score:0.642883\n",
      "-----------------------------------------\n",
      " 15 train (0.5646575192610422, 0.9633333384990692)\n",
      " 15 test 22:40:07 loss:1.976464, acc:0.600000,score:0.606441\n",
      "-----------------------------------------\n",
      " 16 train (0.5100066661834717, 0.9666666587193807)\n",
      "testing...best_score\n",
      " 16 test 22:41:07 loss:1.721858, acc:0.620000,score:0.653944\n",
      "-----------------------------------------\n",
      " 17 train (0.4598675072193146, 0.9816666742165884)\n",
      " 17 test 22:42:07 loss:1.650040, acc:0.595000,score:0.604056\n",
      "-----------------------------------------\n",
      " 18 train (0.41428271929423016, 0.9716666837533315)\n",
      " 18 test 22:43:08 loss:1.782594, acc:0.610000,score:0.627217\n",
      "-----------------------------------------\n",
      " 19 train (0.3616102586189906, 0.9850000143051147)\n",
      " 19 test 22:44:08 loss:1.752748, acc:0.610000,score:0.618391\n",
      "-----------------------------------------\n",
      " 20 train (0.3300870557626088, 0.9833333392937978)\n",
      " 20 test 22:45:09 loss:1.699711, acc:0.620000,score:0.634484\n",
      "-----------------------------------------\n",
      " 21 train (0.2930952161550522, 0.9866666793823242)\n",
      " 21 test 22:46:10 loss:1.622011, acc:0.625000,score:0.626481\n",
      "-----------------------------------------\n",
      " 22 train (0.27577945838371914, 0.9883333444595337)\n",
      " 22 test 22:47:12 loss:1.560435, acc:0.620000,score:0.634532\n",
      "-----------------------------------------\n",
      " 23 train (0.2398418908317884, 0.9916666746139526)\n",
      " 23 test 22:48:14 loss:1.526679, acc:0.595000,score:0.607078\n",
      "-----------------------------------------\n",
      " 24 train (0.21073596676190695, 0.9916666746139526)\n",
      " 24 test 22:49:16 loss:1.483987, acc:0.600000,score:0.606798\n",
      "-----------------------------------------\n",
      " 25 train (0.20559796194235483, 0.9916666746139526)\n",
      " 25 test 22:50:18 loss:1.605105, acc:0.610000,score:0.632997\n",
      "-----------------------------------------\n",
      " 26 train (0.18238483120997748, 0.9900000095367432)\n",
      " 26 test 22:51:19 loss:1.679743, acc:0.620000,score:0.620215\n",
      "-----------------------------------------\n",
      " 27 train (0.1631112297375997, 0.9883333444595337)\n",
      " 27 test 22:52:21 loss:1.618310, acc:0.615000,score:0.616694\n",
      "-----------------------------------------\n",
      " 28 train (0.14917085071404776, 0.9900000095367432)\n",
      " 28 test 22:53:23 loss:1.594455, acc:0.635000,score:0.645218\n",
      "-----------------------------------------\n",
      " 29 train (0.1364263780415058, 0.9899999996026357)\n",
      " 29 test 22:54:25 loss:1.583776, acc:0.620000,score:0.634117\n",
      "-----------------------------------------\n",
      "best: 16 1.7218577527999879 0.62 0.6539437719640766\n",
      "(1.7953904819488526, 0.595, 0.6428099425608702)\n",
      "##############\n",
      "0.0005 300 50\n",
      " 1 train (2.62799604733785, 0.49166665971279144)\n",
      " 1 test 22:56:09 loss:2.479989, acc:0.460000,score:nan\n",
      "-----------------------------------------\n",
      " 2 train (2.2916287183761597, 0.5066666627923647)\n",
      "testing...best_score\n",
      " 2 test 22:57:21 loss:2.195616, acc:0.480000,score:0.392172\n",
      "-----------------------------------------\n",
      " 3 train (1.9596859316031139, 0.6283333425720533)\n",
      "testing...best_score\n",
      " 3 test 22:58:32 loss:1.921736, acc:0.575000,score:0.615529\n",
      "-----------------------------------------\n",
      " 4 train (1.6571853856245677, 0.7333333343267441)\n",
      " 4 test 22:59:43 loss:1.711809, acc:0.580000,score:0.592924\n",
      "-----------------------------------------\n",
      " 5 train (1.3397763470808666, 0.7783333162466685)\n",
      "testing...best_score\n",
      " 5 test 23:00:54 loss:1.591468, acc:0.620000,score:0.631364\n",
      "-----------------------------------------\n",
      " 6 train (1.108549326658249, 0.8316666632890701)\n",
      " 6 test 23:02:05 loss:1.531158, acc:0.590000,score:0.620268\n",
      "-----------------------------------------\n",
      " 7 train (0.8859962373971939, 0.8983333259820938)\n",
      "testing...best_score\n",
      " 7 test 23:03:16 loss:1.428677, acc:0.640000,score:0.640886\n",
      "-----------------------------------------\n",
      " 8 train (0.6966259628534317, 0.9316666622956594)\n",
      " 8 test 23:04:27 loss:1.575056, acc:0.605000,score:0.614055\n",
      "-----------------------------------------\n",
      " 9 train (0.5604868084192276, 0.94500000278155)\n",
      " 9 test 23:05:37 loss:1.683933, acc:0.590000,score:0.560809\n",
      "-----------------------------------------\n",
      " 10 train (0.4574138770500819, 0.9666666636864344)\n",
      " 10 test 23:06:48 loss:1.488573, acc:0.565000,score:0.594728\n",
      "-----------------------------------------\n",
      " 11 train (0.40466173986593884, 0.9600000033775965)\n",
      " 11 test 23:07:59 loss:1.468630, acc:0.600000,score:0.602679\n",
      "-----------------------------------------\n",
      " 12 train (0.3631432230273883, 0.9566666632890701)\n",
      " 12 test 23:09:09 loss:1.470236, acc:0.605000,score:0.623490\n",
      "-----------------------------------------\n",
      " 13 train (0.2900458288689454, 0.975000003973643)\n",
      " 13 test 23:10:20 loss:1.548925, acc:0.585000,score:0.602657\n",
      "-----------------------------------------\n",
      " 14 train (0.25354933614532155, 0.9833333392937978)\n",
      " 14 test 23:11:31 loss:1.504492, acc:0.570000,score:0.579420\n",
      "-----------------------------------------\n",
      " 15 train (0.20593382542332014, 0.9916666696468989)\n",
      " 15 test 23:12:42 loss:1.453292, acc:0.570000,score:0.610492\n",
      "-----------------------------------------\n",
      " 16 train (0.1633019850899776, 0.9866666694482168)\n",
      " 16 test 23:13:53 loss:1.531367, acc:0.595000,score:0.596559\n",
      "-----------------------------------------\n",
      " 17 train (0.14215822207430998, 0.9883333394924799)\n",
      " 17 test 23:15:04 loss:1.576319, acc:0.575000,score:0.601166\n",
      "-----------------------------------------\n",
      " 18 train (0.12028191300729911, 0.9916666746139526)\n",
      " 18 test 23:16:15 loss:1.592903, acc:0.570000,score:0.578344\n",
      "-----------------------------------------\n",
      " 19 train (0.1009846565624078, 0.9933333347241083)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " 19 test 23:17:25 loss:1.672245, acc:0.575000,score:0.602721\n",
      "-----------------------------------------\n",
      " 20 train (0.09028305982549985, 0.9950000047683716)\n",
      " 20 test 23:18:36 loss:1.642045, acc:0.570000,score:0.594216\n",
      "-----------------------------------------\n",
      " 21 train (0.09490241234501202, 0.9850000043710073)\n",
      " 21 test 23:19:47 loss:1.636522, acc:0.545000,score:0.586171\n",
      "-----------------------------------------\n",
      " 22 train (0.08617648265014093, 0.9900000095367432)\n",
      " 22 test 23:20:58 loss:1.559408, acc:0.560000,score:0.578358\n",
      "-----------------------------------------\n",
      " 23 train (0.09364790034790833, 0.9800000041723251)\n",
      " 23 test 23:22:09 loss:1.570071, acc:0.575000,score:0.607283\n",
      "-----------------------------------------\n",
      " 24 train (0.07048575486987829, 0.9933333396911621)\n",
      " 24 test 23:23:19 loss:1.451773, acc:0.595000,score:0.604382\n",
      "-----------------------------------------\n",
      " 25 train (0.10650528129190207, 0.9816666642824808)\n",
      "testing...best_score\n",
      " 25 test 23:24:30 loss:1.426225, acc:0.605000,score:0.646011\n",
      "-----------------------------------------\n",
      " 26 train (0.06601044939210017, 0.9916666696468989)\n",
      " 26 test 23:25:41 loss:1.504187, acc:0.595000,score:0.613995\n",
      "-----------------------------------------\n",
      " 27 train (0.0724665243178606, 0.9883333345254263)\n",
      " 27 test 23:26:52 loss:1.558290, acc:0.580000,score:0.595178\n",
      "-----------------------------------------\n",
      " 28 train (0.06885771732777357, 0.9900000045696894)\n",
      " 28 test 23:28:03 loss:1.541531, acc:0.580000,score:0.603041\n",
      "-----------------------------------------\n",
      " 29 train (0.05895123817026615, 0.9866666694482168)\n",
      " 29 test 23:29:14 loss:1.504297, acc:0.590000,score:0.616392\n",
      "-----------------------------------------\n",
      "best: 25 1.4262250757217407 0.605 0.646011396011396\n",
      "(1.4674957036972045, 0.6, 0.6387066541705717)\n",
      "##############\n",
      "0.0005 300 20\n",
      " 1 train (2.4434488296508787, 0.4900000035762787)\n",
      "testing...best_score\n",
      " 1 test 23:30:50 loss:2.143647, acc:0.555000,score:0.543723\n",
      "-----------------------------------------\n",
      " 2 train (1.8161318937937418, 0.6316666722297668)\n",
      "testing...best_score\n",
      " 2 test 23:31:53 loss:1.676338, acc:0.575000,score:0.607536\n",
      "-----------------------------------------\n",
      " 3 train (1.3026567379633585, 0.7650000035762787)\n",
      "testing...best_score\n",
      " 3 test 23:32:55 loss:1.366507, acc:0.605000,score:0.640506\n",
      "-----------------------------------------\n",
      " 4 train (0.9029784142971039, 0.8316666662693024)\n",
      " 4 test 23:33:57 loss:1.377989, acc:0.575000,score:0.528567\n",
      "-----------------------------------------\n",
      " 5 train (0.667766864101092, 0.8799999992052714)\n",
      " 5 test 23:35:00 loss:1.210490, acc:0.585000,score:0.604007\n",
      "-----------------------------------------\n",
      " 6 train (0.4750136464834213, 0.9183333277702331)\n",
      " 6 test 23:36:02 loss:1.378851, acc:0.585000,score:0.639453\n",
      "-----------------------------------------\n",
      " 7 train (0.3500050947070122, 0.9349999904632569)\n",
      "testing...best_score\n",
      " 7 test 23:37:05 loss:1.137018, acc:0.625000,score:0.654039\n",
      "-----------------------------------------\n",
      " 8 train (0.24817654490470886, 0.9683333277702332)\n",
      " 8 test 23:38:07 loss:1.308135, acc:0.605000,score:0.617011\n",
      "-----------------------------------------\n",
      " 9 train (0.23107775698105495, 0.9599999904632568)\n",
      " 9 test 23:39:10 loss:1.143767, acc:0.630000,score:0.635182\n",
      "-----------------------------------------\n",
      " 10 train (0.1497832367817561, 0.9816666622956594)\n",
      " 10 test 23:40:12 loss:1.154262, acc:0.625000,score:0.651619\n",
      "-----------------------------------------\n",
      " 11 train (0.09678488026062647, 0.9933333317438762)\n",
      " 11 test 23:41:15 loss:1.486075, acc:0.600000,score:0.606273\n",
      "-----------------------------------------\n",
      " 12 train (0.1342948320011298, 0.9783333281675974)\n",
      " 12 test 23:42:17 loss:1.241422, acc:0.590000,score:0.620344\n",
      "-----------------------------------------\n",
      " 13 train (0.11250734279553096, 0.9816666622956594)\n",
      " 13 test 23:43:19 loss:1.104344, acc:0.590000,score:0.607652\n",
      "-----------------------------------------\n",
      " 14 train (0.07839597910642623, 0.9899999976158143)\n",
      " 14 test 23:44:22 loss:1.303728, acc:0.595000,score:0.605020\n",
      "-----------------------------------------\n",
      " 15 train (0.07653734882672628, 0.9883333305517833)\n",
      " 15 test 23:45:24 loss:1.377530, acc:0.630000,score:0.625160\n",
      "-----------------------------------------\n",
      " 16 train (0.0598408247033755, 0.9966666658719381)\n",
      " 16 test 23:46:26 loss:1.326614, acc:0.610000,score:0.629582\n",
      "-----------------------------------------\n",
      " 17 train (0.05111117648581664, 0.9966666658719381)\n",
      " 17 test 23:47:29 loss:1.365388, acc:0.610000,score:0.621887\n",
      "-----------------------------------------\n",
      " 18 train (0.0570532791937391, 0.9899999976158143)\n",
      " 18 test 23:48:31 loss:1.282760, acc:0.600000,score:0.623892\n",
      "-----------------------------------------\n",
      " 19 train (0.04432917206237714, 0.9949999988079071)\n",
      " 19 test 23:49:34 loss:1.492128, acc:0.595000,score:0.625729\n",
      "-----------------------------------------\n",
      " 20 train (0.04553695004433393, 0.9916666646798452)\n",
      " 20 test 23:50:37 loss:1.562176, acc:0.605000,score:0.628744\n",
      "-----------------------------------------\n",
      " 21 train (0.08646094786624113, 0.9783333301544189)\n",
      " 21 test 23:51:39 loss:1.434961, acc:0.615000,score:0.652967\n",
      "-----------------------------------------\n",
      " 22 train (0.10439888158192237, 0.9816666642824808)\n",
      " 22 test 23:52:41 loss:1.655972, acc:0.580000,score:0.605200\n",
      "-----------------------------------------\n",
      " 23 train (0.08730644608537357, 0.9833333293596903)\n",
      " 23 test 23:53:44 loss:1.323199, acc:0.605000,score:0.614778\n",
      "-----------------------------------------\n",
      " 24 train (0.04515931246181329, 0.9966666658719381)\n",
      " 24 test 23:54:46 loss:1.360278, acc:0.625000,score:0.653053\n",
      "-----------------------------------------\n",
      " 25 train (0.0376281521593531, 0.9949999988079071)\n",
      " 25 test 23:55:49 loss:1.432118, acc:0.595000,score:0.628167\n",
      "-----------------------------------------\n",
      " 26 train (0.054691669158637525, 0.9849999964237213)\n",
      " 26 test 23:56:51 loss:1.545406, acc:0.585000,score:0.601395\n",
      "-----------------------------------------\n",
      " 27 train (0.04664989256610473, 0.9899999976158143)\n",
      " 27 test 23:57:54 loss:1.590958, acc:0.565000,score:0.614199\n",
      "-----------------------------------------\n",
      " 28 train (0.06921876557171344, 0.9883333305517833)\n",
      " 28 test 23:58:56 loss:1.521757, acc:0.565000,score:0.598039\n",
      "-----------------------------------------\n",
      "early stop\n",
      "best: 7 1.1370176815986632 0.625 0.6540386405959031\n",
      "(1.2047200107574463, 0.595, 0.6356088046809696)\n"
     ]
    }
   ],
   "source": [
    "# 调参 adam 0.001,100,50\n",
    "tan_config['optimizer'] = 'Adam'\n",
    "tan_config['epochs'] = 30\n",
    "tan_config['dropout'] = 0.5\n",
    "for lr in [0.001,0.0005]:\n",
    "    for lstm_out in [100,300]:\n",
    "        for batch_s in [100,50,20]:\n",
    "            print('##############')\n",
    "            print(lr,lstm_out,batch_s)\n",
    "            tan_config['batch_size'] = batch_s\n",
    "            tan_config['lr'] = lr\n",
    "            tan_config['lstm_output_size'] = lstm_out\n",
    "            tat = TargetAttention2(tan_config)\n",
    "            tat.build(embedding_matrix)\n",
    "            retrain = '1'\n",
    "            if retrain =='1':\n",
    "                tat.fit(train_x_t,train_y_t,valid_x_t,valid_y_t)\n",
    "            tat.model.load_weights('best.model')\n",
    "            print(tat.evaluate(test_x_t,test_y_t))\n",
    "            del tat"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
