{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/lawbda/env/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
      "  from ._conv import register_converters as _register_converters\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import pathlib\n",
    "import pickle\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "gpu_id='1'\n",
    "def init_env():\n",
    "    os.environ[\"CUDA_DEVICE_ORDER\"]=\"PCI_BUS_ID\"\n",
    "    os.environ['CUDA_VISIBLE_DEVICES']=gpu_id"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "init_env()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.feature_extraction.text import TfidfVectorizer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import KFold"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.metrics import accuracy_score"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 构建一个给词编码的对象"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "class WordsNameNumber:\n",
    "    '''\n",
    "    输入一个篇章 处理 得到词频字典\n",
    "    对字典进行排序 按照词频由大到小\n",
    "    按照排序结果给词进行编码 由 1 开始（便于在cnn中使用）\n",
    "    可以设置停用词词典 若有词典则在进行排序时就删除这些词\n",
    "    '''\n",
    "    def __init__(self,data=None,stopwords=None,tokenizer=None):\n",
    "        '''\n",
    "        输入有三个\n",
    "        data 表示输入的篇章 和sklearn中的feature_extraction保持一致\n",
    "        输入一个list list中为string句子 词之间用空格分开\n",
    "        stopwords 为一个list 一个元素代表一个停用词\n",
    "        tokenizer表示分词器 默认用空格进行分词 可以传入一个函数 之后会用于进行分词\n",
    "        '''\n",
    "        self.data = data\n",
    "        self.stopwords = stopwords\n",
    "        self.tokenizer = tokenizer\n",
    "        \n",
    "        self.wordsdict = {}\n",
    "        \n",
    "        # 词表相关\n",
    "        self.vocab = []\n",
    "        self.vocab_size = 0\n",
    "        self.word2index = {}\n",
    "    \n",
    "    def set_vocab(self,vocab=None,wordsdict=None,ratio=1.0,max_num=None):\n",
    "        '''\n",
    "        统一的词表计算接口\n",
    "        分为导入词表和计算词表两种\n",
    "        当输入wordsdict不为None时 根据wordsdict中的词和词频对应关系进行计算\n",
    "        取整体词前ratio百分比的词 按照降序作为词表\n",
    "        当 wordsdict 为 None 而vocab不为None时 直接进行指定\n",
    "        '''\n",
    "        if None == wordsdict:\n",
    "            if not None == vocab:\n",
    "                self.vocab = vocab\n",
    "        elif not None == wordsdict:\n",
    "            # 先对词进行排序\n",
    "            self.vocab = sorted(self.wordsdict,key=lambda x:self.wordsdict[x],reverse=True)\n",
    "            num_words = 0\n",
    "            print(max_num)\n",
    "            if not None == max_num:\n",
    "                # 当设定了取多少词 就不用再进行ratio的比较计算了\n",
    "                num_words = max_num\n",
    "            else:\n",
    "                for num,i in enumerate(range(32),start=1):\n",
    "                    num_words = sum([0 if wordsdict[i]<num*num else 1 for i in wordsdict.keys()])\n",
    "                    ratio_i = float(num_words) /len(wordsdict.keys())\n",
    "                    if ratio_i < ratio:\n",
    "                        break\n",
    "            \n",
    "            self.vocab = self.vocab[:num_words]\n",
    "            self.vocab_size = num_words\n",
    "        \n",
    "        # 上述计算中得到的vocab是有序的 因此下面进行的编码过程也是有序的\n",
    "        for index,w in enumerate(self.vocab):\n",
    "            self.word2index[w] = index\n",
    "        \n",
    "    def fit(self, data=None, ratio=1.0, max_num=None):\n",
    "        '''\n",
    "        构建词典\n",
    "        之后根据stopwords进行修正\n",
    "        之后按照词频排序\n",
    "        之后输出编码词典\n",
    "        '''\n",
    "        if not None == data:\n",
    "            self.data = data\n",
    "        if None == self.data:\n",
    "            print('No data')\n",
    "            return {}\n",
    "        self.__check()\n",
    "        for line in self.data:\n",
    "            words = self.tokenizer(line)\n",
    "            if len(words) == 0:\n",
    "                continue\n",
    "            for word in words:\n",
    "                if word in self.stopwords:\n",
    "                    continue\n",
    "                if not word in self.wordsdict.keys():\n",
    "                    self.wordsdict[word] = 1\n",
    "                else:\n",
    "                    self.wordsdict[word] += 1\n",
    "        \n",
    "        self.set_vocab(wordsdict=self.wordsdict,ratio=1.0, max_num=max_num)\n",
    "        \n",
    "        \n",
    "    def transform(self,data,padding=False):\n",
    "        '''\n",
    "        输入一个篇章 使用tokenizer进行分词之后\n",
    "        按照已经fit得到的编号字典对篇章进行编号\n",
    "        输出一个词编号的矩阵 格式为numpy.array\n",
    "        每一行表示一个篇章中的句子的编码形式\n",
    "        当padding为True时 输出需要保持所有的行长度一致\n",
    "        此处需要注意 当transform输入的词存在集外词的时候 默认将其编号为0\n",
    "        '''\n",
    "        if None == data:\n",
    "            print('No data')\n",
    "            return np.array([])\n",
    "        if not type(data) == list:\n",
    "            data = [data]\n",
    "        self.__check()\n",
    "        numMatrix = []\n",
    "        maxLen = 0\n",
    "        \n",
    "        for line in data:\n",
    "            words = self.tokenizer(line)\n",
    "            maxLen = len(words) if maxLen < len(words) else maxLen\n",
    "            # 此处修改一下 使用vocab进行编码 这样的话就可以使用自定义的词汇表了\n",
    "            numMatrix.append([0 if i not in self.word2index.keys() else self.word2index[i] for i in words])\n",
    "        \n",
    "        if True == padding:\n",
    "            for num,_ in enumerate(numMatrix):\n",
    "                numMatrix[num] += (maxLen - len(numMatrix[num]))*[0]\n",
    "        self.sentence_length = maxLen\n",
    "        return np.array(numMatrix)\n",
    "    \n",
    "    def __check(self):\n",
    "        '''\n",
    "        用于检查目前将要执行的操作是否条件完备\n",
    "        一些程序运行共有条件的check机制\n",
    "        主要是类中的tokenizer是否正确\n",
    "        '''\n",
    "        if None == self.tokenizer:\n",
    "            self.tokenizer = lambda x:x.split(' ')\n",
    "        else:\n",
    "            # cut一次 对部分分词器进行初始化 之后在分词时就不用了\n",
    "            self.tokenizer('我爱北京天安门，天安门上太阳升')\n",
    "        if None == self.stopwords:\n",
    "            self.stopwords = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "# #旧的给词编码的实现\n",
    "# class WordsNameNumber:\n",
    "#     '''\n",
    "#     输入一个篇章 处理 得到词频字典\n",
    "#     对字典进行排序 按照词频由大到小\n",
    "#     按照排序结果给词进行编码 由 1 开始（便于在cnn中使用）\n",
    "#     可以设置停用词词典 若有词典则在进行排序时就删除这些词\n",
    "#     '''\n",
    "#     def __init__(self,data=None,stopwords=None,tokenizer=None):\n",
    "#         '''\n",
    "#         输入有三个\n",
    "#         data 表示输入的篇章 和sklearn中的feature_extraction保持一致\n",
    "#         输入一个list list中为string句子 词之间用空格分开\n",
    "#         stopwords 为一个list 一个元素代表一个停用词\n",
    "#         tokenizer表示分词器 默认用空格进行分词 可以传入一个函数 之后会用于进行分词\n",
    "#         '''\n",
    "#         self.data = data\n",
    "#         self.stopwords = stopwords\n",
    "#         self.tokenizer = tokenizer\n",
    "        \n",
    "#         self.wordsdict = {}\n",
    "#         self.numdict = {}\n",
    "        \n",
    "#         # 词表相关\n",
    "#         self.vocab = []\n",
    "#         self.vocab_size = 0\n",
    "        \n",
    "#     def fit(self,data=None):\n",
    "#         '''\n",
    "#         构建词典\n",
    "#         之后根据stopwords进行修正\n",
    "#         之后按照词频排序\n",
    "#         之后输出编码词典\n",
    "#         '''\n",
    "#         if not None == data:\n",
    "#             self.data = data\n",
    "#         if None == self.data:\n",
    "#             print('No data')\n",
    "#             return {}\n",
    "#         self.__check()\n",
    "#         for line in self.data:\n",
    "#             words = self.tokenizer(line)\n",
    "#             if len(words) == 0:\n",
    "#                 continue\n",
    "#             for word in words:\n",
    "#                 if word in self.stopwords:\n",
    "#                     continue\n",
    "#                 if not word in self.wordsdict.keys():\n",
    "#                     self.wordsdict[word] = 1\n",
    "#                 else:\n",
    "#                     self.wordsdict[word] += 1\n",
    "#         wordsList = sorted(self.wordsdict,key=lambda x:self.wordsdict[x],reverse=True)\n",
    "#         self.vocab = wordsList\n",
    "#         self.vocab_size = len(wordsList)\n",
    "        \n",
    "#         for num,key in enumerate(wordsList,start=1):\n",
    "#             self.numdict[key] = num\n",
    "#         print('Complete!')\n",
    "        \n",
    "#     def transform(self,data,padding=False):\n",
    "#         '''\n",
    "#         输入一个篇章 使用tokenizer进行分词之后\n",
    "#         按照已经fit得到的编号字典对篇章进行编号\n",
    "#         输出一个词编号的矩阵 格式为numpy.array\n",
    "#         每一行表示一个篇章中的句子的编码形式\n",
    "#         当padding为True时 输出需要保持所有的行长度一致\n",
    "#         此处需要注意 当transform输入的词存在集外词的时候 默认将其编号为0\n",
    "#         '''\n",
    "#         if None == data:\n",
    "#             print('No data')\n",
    "#             return np.array([])\n",
    "#         if not type(data) == list:\n",
    "#             data = [data]\n",
    "#         self.__check()\n",
    "#         numMatrix = []\n",
    "#         maxLen = 0\n",
    "#         for line in data:\n",
    "#             words = self.tokenizer(line)\n",
    "#             maxLen = len(words) if maxLen < len(words) else maxLen\n",
    "#             numMatrix.append([0 if i not in self.numdict.keys() else self.numdict[i] for i in words])\n",
    "#         if True == padding:\n",
    "#             for num,_ in enumerate(numMatrix):\n",
    "#                 numMatrix[num] += (maxLen - len(numMatrix[num]))*[0]\n",
    "#         self.sentence_length = maxLen\n",
    "#         return np.array(numMatrix)\n",
    "    \n",
    "#     def __check(self):\n",
    "#         '''\n",
    "#         用于检查目前将要执行的操作是否条件完备\n",
    "#         一些程序运行共有条件的check机制\n",
    "#         主要是类中的tokenizer是否正确\n",
    "#         '''\n",
    "#         if None == self.tokenizer:\n",
    "#             self.tokenizer = lambda x:x.split(' ')\n",
    "#         if None == self.stopwords:\n",
    "#             self.stopwords = []"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 语料准备"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "content = [line.strip() for line in open('./data/DoubanZH.txt','r',encoding='utf-8').readlines()]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "label = []\n",
    "sentence = []\n",
    "for line in content:\n",
    "    l,s = line.split(',')\n",
    "    label.append(0 if l=='10' else 1)\n",
    "    sentence.append(s)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "df = pd.DataFrame(data=[i for i in zip(sentence,label)],columns=['sentence','label'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "document = [i for i in df['sentence']]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 词编号 为cnn结构进行准备"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "wnn = WordsNameNumber()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "None\n"
     ]
    }
   ],
   "source": [
    "wnn.fit(document)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "numbering = wnn.transform(document,padding=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(274012, 192)"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "numbering.shape"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 定义一个取样本batch的对象"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "class batcher:\n",
    "    '''\n",
    "    分batch\n",
    "    输入数据 根据容量上限进行batch划分\n",
    "    需要指定batch_size\n",
    "    '''\n",
    "    def __init__(self,seed=17):\n",
    "        '''\n",
    "        初始化 设定随机数种子\n",
    "        以及取用数据时的集合\n",
    "        '''\n",
    "        np.random.seed(seed)\n",
    "        \n",
    "    def __randint(self,maxlen,batch_size):\n",
    "        '''\n",
    "        输出batch_size的随机数\n",
    "        '''\n",
    "        number = []\n",
    "        while len(number) < batch_size:\n",
    "            rd = np.random.randint(0,maxlen)\n",
    "            if not rd in number:\n",
    "                number.append(rd)\n",
    "        return number\n",
    "    \n",
    "    def get_batch(self,data,label,batch_size):\n",
    "        '''\n",
    "        输出给定的数据的batch_size大小的对象\n",
    "        '''\n",
    "        feature = data\n",
    "        mark = np.array(label)\n",
    "        maxlen = len(label)\n",
    "        randint = self.__randint(maxlen,batch_size,)\n",
    "        return feature[randint],mark[randint]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "# numbering = numbering[:20000]\n",
    "# label = label[:20000]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "border = int(numbering.shape[0] * 0.8)\n",
    "border_dev = int(numbering.shape[0] * 0.9)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "newLabel = np.array([[1 if i == 0 else 0,0 if i== 0 else 1] for i in label])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_x =numbering[:border]\n",
    "test_x = numbering[border:border_dev]\n",
    "dev_x = numbering[border_dev:]\n",
    "\n",
    "train_y = newLabel[:border]\n",
    "test_y = newLabel[border:border_dev]\n",
    "dev_y = newLabel[border_dev:]\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 开始构建CNN"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 论文 Convolutional Neural Networks for Sentence Classification\n",
    "https://arxiv.org/abs/1408.5882\n",
    "\n",
    "使用CNN进行文本分类，本次结构参照论文中结构，使用三种卷积核大小：3，4，5。"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "bcher_train = batcher()\n",
    "bcher_test = batcher()\n",
    "bcher_dev = batcher()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "class model_text_cnn:\n",
    "    def __init__(self,sentence_length,output_classes,vocabe_size,embedding_size,filter_sizes,num_filters,l2_reg_lambda=0.0):\n",
    "        '''\n",
    "        在进行模型指定时需要输入这几个数值\n",
    "        sentence_length 为句子长度 即使用cnn时输入的句子的最大长度 默认已经padding\n",
    "        output_classes 为输出的类别种类 2分类就写2\n",
    "        vocabe_size 为词表大小\n",
    "        embedding_size 为句子的embedding的大小\n",
    "        fileter_sizes 为卷积核的大小\n",
    "        num_fileters 为卷积核数目\n",
    "        '''\n",
    "        self.x = tf.placeholder(tf.int32, shape = [None,sentence_length], name='input_x')\n",
    "        self.y = tf.placeholder(tf.float32, shape = [None,output_classes], name='output_y')\n",
    "        self.dropout_keep_prob = tf.placeholder(tf.float32,name='dropout_keep_prob')\n",
    "        \n",
    "        l2_loss = tf.constant(0.0)\n",
    "        \n",
    "        \n",
    "        with tf.name_scope('embedding'):\n",
    "            W = tf.Variable(tf.random_uniform([vocabe_size,embedding_size],-1.0,1.0),name='W')\n",
    "            # 随机初始化的矩阵W 和输入的x 对x标号对应的\n",
    "            self.embedded_chars = tf.nn.embedding_lookup(W,self.x)\n",
    "            # 这一步应该是进行padding 将上面的查找表进行扩展 axis为-1表示从末尾进行加一维度操作\n",
    "            self.embedded_chars_expanded = tf.expand_dims(self.embedded_chars, -1)\n",
    "            \n",
    "            # 这样的维度增加的操作主要是为了符合CNN在使用过程中的需求 2D的conv层输入要求为4D batch height width channel\n",
    "        \n",
    "        \n",
    "        with tf.device('/gpu:0'),tf.name_scope('Conv'):\n",
    "            \n",
    "            # 设定一个数组 用来存储多种卷积核pooling之后的结果 以备后续进行concat 向量合并\n",
    "            pooled_outputs = []\n",
    "            for i,filter_size in enumerate(filter_sizes):\n",
    "                # name_scope用来进行 tf中的op的定义\n",
    "                with tf.name_scope('conv-maxpool-%s' % filter_size):\n",
    "                    # 此处设定卷积核的形状，卷积核为 filter_size * embedding_size 大小的 共有num_fileters 个卷积核\n",
    "                    filter_shape = [filter_size, embedding_size, 1,num_filters]\n",
    "                    # 这个权重矩阵就是滤波器 一个kernel大小为上述大小 kernel数目为 size中的情况\n",
    "                    W_c = tf.Variable(tf.truncated_normal(filter_shape, stddev=0.1),name='W_c')\n",
    "                    b_c = tf.Variable(tf.constant(0.1,shape=[num_filters],name='b'))\n",
    "                    \n",
    "                    conv = tf.nn.conv2d(\n",
    "                        self.embedded_chars_expanded,\n",
    "                        W_c,\n",
    "                        strides=[1, 1, 1, 1],\n",
    "                        padding='VALID',\n",
    "                        name='conv'\n",
    "                    )\n",
    "                    \n",
    "                    h = tf.nn.relu(tf.nn.bias_add(conv,b_c),name='relu')\n",
    "                    \n",
    "                    pooled = tf.nn.max_pool(\n",
    "                        h,\n",
    "                        # 此处的第二个参数 表示卷积之后的feature_map的大小 即卷积之后产生的矩阵的size\n",
    "                        ksize=[1, sentence_length - filter_size + 1, 1,1],\n",
    "                        # 步长的设定\n",
    "                        strides=[1,1,1,1],\n",
    "                        padding='VALID',\n",
    "                        name='pool'\n",
    "                    )\n",
    "                    # \n",
    "                    pooled_outputs.append(pooled)\n",
    "\n",
    "            # 计算总的卷积核的数目 即 用作一次卷积操作时的kernel的数目和kernel的种类\n",
    "            num_filters_total = num_filters * len(filter_sizes)\n",
    "            # 将pool的结果合并\n",
    "            self.h_pool = tf.concat(pooled_outputs, 3)\n",
    "            # 合并之后的结果进行reshape 这样得到的矩阵应该是 \n",
    "            self.h_pool_flat = tf.reshape(self.h_pool, [-1, num_filters_total])\n",
    "\n",
    "            # 加入一个dropout层\n",
    "            with tf.name_scope('dropout'):\n",
    "                self.h_drop = tf.nn.dropout(self.h_pool_flat,self.dropout_keep_prob)\n",
    "\n",
    "            with tf.name_scope('output'):\n",
    "                W_o = tf.get_variable(\n",
    "                    'W_o',\n",
    "                    shape=[num_filters_total,output_classes],\n",
    "                    initializer=tf.contrib.layers.xavier_initializer()\n",
    "                )\n",
    "                b_o = tf.Variable(tf.constant(0.1,shape=[output_classes]),name='b_o')\n",
    "                l2_loss += tf.nn.l2_loss(W_o)\n",
    "                l2_loss += tf.nn.l2_loss(b_o)\n",
    "                self.scores = tf.nn.xw_plus_b(self.h_drop,W_o,b_o,name='scores')\n",
    "                # 此处过了argmax 实际上就已经是 数组的标签了\n",
    "                self.predictions = tf.argmax(self.scores,1,name='predictions')\n",
    "\n",
    "            with tf.name_scope('loss'):\n",
    "                losses = tf.nn.softmax_cross_entropy_with_logits(labels=self.y, logits=self.scores)\n",
    "                self.loss = tf.reduce_mean(losses) + l2_reg_lambda * l2_loss\n",
    "\n",
    "            with tf.name_scope('accuracy'):\n",
    "                # 此处做比较 的是数组标签是否一致 即argmax之后的结果 \n",
    "                # 这样就要求输出标签的形式为 None * output_classes  这样一个矩阵\n",
    "                correct_prediction = tf.equal(self.predictions, tf.argmax(self.y, 1))\n",
    "                self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, 'float'), name='accuracy')\n",
    "\n",
    "            with tf.name_scope('num_correct'):\n",
    "                correct_prediction = tf.equal(self.predictions, tf.argmax(self.y, 1))\n",
    "                self.num_correct = tf.reduce_sum(tf.cast(correct_prediction, 'float'), name='num_correct')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 训练和测试过程"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "sentence_length = wnn.sentence_length\n",
    "vocabe_size = wnn.vocab_size\n",
    "embedding_size = 100\n",
    "filter_sizes = [3,4,5]\n",
    "num_filters = 32"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = model_text_cnn(sentence_length=sentence_length, output_classes=2, vocabe_size=vocabe_size,embedding_size=embedding_size,\n",
    "                  filter_sizes=filter_sizes, num_filters=num_filters)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch:0\n",
      "Dev acc:0.8801912269177432\n",
      "Epoch:1\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-26-d797edd38baa>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     63\u001b[0m                 \u001b[0mbatch_train_x\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mbatch_train_y\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbcher_train\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_batch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_x\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtrain_y\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m512\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     64\u001b[0m                 \u001b[0;31m# 训练轮数为 train_y总长度 / batch_size\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 65\u001b[0;31m                 \u001b[0mtrain_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_train_x\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mbatch_train_y\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     66\u001b[0m                 \u001b[0mcurrent_step\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mglobal_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msess\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglobal_step\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     67\u001b[0m             \u001b[0;31m# 训练完一次之后在dev上进行测试\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-26-d797edd38baa>\u001b[0m in \u001b[0;36mtrain_step\u001b[0;34m(batch_x, batch_y)\u001b[0m\n\u001b[1;32m     41\u001b[0m                 \u001b[0mcnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdropout_keep_prob\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;36m1.0\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     42\u001b[0m             }\n\u001b[0;32m---> 43\u001b[0;31m             \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0macc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mtrain_op\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mglobal_step\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maccuracy\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     44\u001b[0m         \u001b[0;31m# 定义一个函数 针对每一次测试和验证 进行一个batch的测试或者验证\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     45\u001b[0m         \u001b[0;32mdef\u001b[0m \u001b[0mdev_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdev_x\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mdev_y\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/env/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    887\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    888\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 889\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    890\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    891\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/env/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1118\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1119\u001b[0m       results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m-> 1120\u001b[0;31m                              feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[1;32m   1121\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1122\u001b[0m       \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/env/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1315\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1316\u001b[0m       return self._do_call(_run_fn, self._session, feeds, fetches, targets,\n\u001b[0;32m-> 1317\u001b[0;31m                            options, run_metadata)\n\u001b[0m\u001b[1;32m   1318\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1319\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/env/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m   1321\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1322\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1323\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1324\u001b[0m     \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1325\u001b[0m       \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/env/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(session, feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m   1300\u001b[0m           return tf_session.TF_Run(session, options,\n\u001b[1;32m   1301\u001b[0m                                    \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1302\u001b[0;31m                                    status, run_metadata)\n\u001b[0m\u001b[1;32m   1303\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1304\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msession\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "graph = tf.Graph()\n",
    "with graph.as_default():\n",
    "    session_conf = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)\n",
    "    sess = tf.Session(config=session_conf)\n",
    "    \n",
    "    # 在这里开启session\n",
    "    with sess.as_default():\n",
    "        writer = tf.summary.FileWriter('./tfb_file/cnn/1')\n",
    "        # 构建CNN模型\n",
    "        cnn = model_text_cnn(\n",
    "            sentence_length=sentence_length, \n",
    "            output_classes=2, \n",
    "            vocabe_size=vocabe_size,\n",
    "            embedding_size=embedding_size,\n",
    "            filter_sizes=filter_sizes, \n",
    "            num_filters=num_filters)\n",
    "        \n",
    "        \n",
    "        global_step = tf.Variable(0,name='global_step', trainable=False)\n",
    "        optimizer = tf.train.AdamOptimizer(0.01)\n",
    "        \n",
    "        \n",
    "        # compute_gradients 和 apply_gradients 为minimize函数的两个部分\n",
    "        # 实际上就是将minimize函数拆分了\n",
    "        \n",
    "        # compute_gradients 返回一个以元组(gradient, variable)组成的列表\n",
    "        grads_and_vars = optimizer.compute_gradients(cnn.loss)\n",
    "        \n",
    "        # apply_gradients 将计算出的梯度应用到变量上 \n",
    "        # 是函数minimize()的第二部分，返回一个应用指定的梯度的操作Operation 对global_step做自增操作\n",
    "        \n",
    "        train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)\n",
    "        \n",
    "        \n",
    "        \n",
    "        # 定义一个函数 针对每一次训练 每次调用时 进行一个batch的训练\n",
    "        def train_step(batch_x,batch_y):\n",
    "            feed_dict = {\n",
    "                cnn.x:batch_x,\n",
    "                cnn.y:batch_y,\n",
    "                cnn.dropout_keep_prob:1.0\n",
    "            }\n",
    "            _,step,loss,acc = sess.run([train_op, global_step, cnn.loss, cnn.accuracy],feed_dict)\n",
    "        # 定义一个函数 针对每一次测试和验证 进行一个batch的测试或者验证\n",
    "        def dev_step(dev_x,dev_y):\n",
    "            feed_dict = {\n",
    "                cnn.x:dev_x,\n",
    "                cnn.y:dev_y,\n",
    "                cnn.dropout_keep_prob:1.0\n",
    "            }\n",
    "            step,loss,acc,num_correct = sess.run([global_step,cnn.loss,cnn.accuracy,cnn.num_correct],feed_dict)\n",
    "            return num_correct\n",
    "        \n",
    "        max_acc = - np.inf\n",
    "        early_stop = 0\n",
    "        \n",
    "        writer.add_graph(sess.graph)\n",
    "        sess.run(tf.global_variables_initializer())\n",
    "        for i in range(1000):\n",
    "            print('Epoch:{0}'.format(i))\n",
    "            # -------------------------训练过程-----------------------------\n",
    "            for p in range(0,int(len(train_y)/512)):\n",
    "                batch_train_x,batch_train_y = bcher_train.get_batch(train_x,train_y,512)\n",
    "                # 训练轮数为 train_y总长度 / batch_size\n",
    "                train_step(batch_train_x,batch_train_y)\n",
    "                current_step = tf.train.global_step(sess, global_step)\n",
    "            # 训练完一次之后在dev上进行测试\n",
    "            \n",
    "            #--------------------------开发集上测试过程---------------------\n",
    "            total_dev_correct = 0\n",
    "            for q in range(0,int(len(dev_y)/128)):\n",
    "                batch_dev_x,batch_dev_y = bcher_dev.get_batch(dev_x,dev_y,128)\n",
    "                num_dev_correct = dev_step(batch_dev_x,batch_dev_y)\n",
    "                total_dev_correct += num_dev_correct\n",
    "            dev_acc = float(total_dev_correct) / len(dev_y)\n",
    "            print('Dev acc:{0}'.format(dev_acc))\n",
    "            \n",
    "            # ------------------------Early stop 检测--------------------------\n",
    "            # 找出开发集上的最优的acc\n",
    "            if max_acc > dev_acc:\n",
    "                early_stop += 1\n",
    "            else:\n",
    "                max_acc = dev_acc\n",
    "                early_stop = 0\n",
    "            \n",
    "            # 当5轮都没有提升 就结束训练\n",
    "            if early_stop > 20:\n",
    "                break\n",
    "        # ------------------------------测试集上测试过程------------------------------\n",
    "        # 训练过程结束之后 进行最后的一次测试\n",
    "        print('Best acc:{0}'.format(max_acc))\n",
    "        total_test_correct = 0\n",
    "        for r in range(0,int(len(dev_y)/128)):\n",
    "            batch_test_x,batch_test_y = bcher_test.get_batch(test_x,test_y,128)\n",
    "            num_test_correct = dev_step(batch_test_x,batch_test_y)\n",
    "            total_test_correct += num_test_correct\n",
    "        test_acc = float(total_test_correct) / len(test_y)\n",
    "        print('Test acc:{0}'.format(test_acc))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [],
   "source": [
    "class model_text_rnn:\n",
    "    def __init__(self,sentence_length,output_classes,vocab_size,embedding_size,num_units):\n",
    "        '''\n",
    "        首先明确一下RNN结构的输入到底为何\n",
    "        此处设定的RNN的输入是 batch*sentence_length的。为简便，将sentence_length写为sl\n",
    "        即一次输入batch个句子，每个句子的长度为sl，基本上在输入时就已经padding\n",
    "        但是在RNNcell中的输入则需要以词为单位进行输入\n",
    "        即对一个句子中的单词进行batch的划分，则实际上进行输入的词tensor为\n",
    "        sl*embedding_dim\n",
    "        即词的batch为一个句子长 以一个句子为batch进行词的输入（padding部分使用<unk>或者<end>表示句子结尾）\n",
    "        而对应的词需要过一个embedding化为一个向量 这样才能进行正确的RNN的操作\n",
    "        和CNN不同的地方就在这里 网络结构不同对于输入的确定就不一样 这个是之后值得思考的\n",
    "        因此需要重新设计网络结构\n",
    "        '''\n",
    "        \n",
    "        self.x = tf.placeholder(dtype = tf.int32, shape=[None, sentence_length],name='input_x')\n",
    "        self.y = tf.placeholder(dtype = tf.int32, shape=[None, output_classes], name='input_y')\n",
    "        self.dropout_keep_prob = tf.placeholder(dtype= tf.float32, name='dropout_keep_prob')\n",
    "        \n",
    "        with tf.name_scope('Embedding'):\n",
    "            W = tf.Variable(tf.random_uniform([vocab_size,embedding_size],-1.0,1.0),name='W')\n",
    "            # 进行查找\n",
    "            self.embedded_chars = tf.nn.embedding_lookup(W,self.x)\n",
    "            \n",
    "            \n",
    "            \n",
    "#             only_conv = tf.expand_dims(self.embedded_chars,axis=-1)\n",
    "#         with tf.name_scope('Real_CNN_layer'):\n",
    "#             filter_cell = tf.Variable(tf.truncated_normal([3,embedding_size,1,1], stddev=0.1))\n",
    "#             conv_layer = tf.nn.conv2d(only_conv,filter=filter_cell,strides=[1,1,1,1],padding='VALID')\n",
    "#             maxpool = tf.nn.max_pool(conv_layer,ksize=[1,1,1,1],strides=[1,1,1,1],padding='VALID')\n",
    "#             maxpool = tf.squeeze(maxpool,axis=-1)\n",
    "#             maxpool = tf.squeeze(maxpool,axis=-1)\n",
    "        \n",
    "        \n",
    "        \n",
    "        \n",
    "        with tf.name_scope('Main_rnn'):\n",
    "            # 设定一个rnn cell\n",
    "#             rnn_cell = tf.contrib.rnn.BasicRNNCell(num_units=num_units,activation=tf.nn.relu)\n",
    "            \n",
    "            rnn_cell = tf.contrib.rnn.BasicLSTMCell(num_units=num_units,activation=tf.nn.relu)\n",
    "            # initial_state = rnn_cell.zero_state(sentence_length, dtype=tf.float32)\n",
    "#             print('init state shape',initial_state.get_shape())\n",
    "            rnn_with_dropout = tf.contrib.rnn.DropoutWrapper(rnn_cell,output_keep_prob=self.dropout_keep_prob)\n",
    "            out, state = tf.nn.dynamic_rnn(cell=rnn_with_dropout,\n",
    "                                           inputs=self.embedded_chars,\n",
    "                                           time_major=False,\n",
    "                                           dtype=tf.float32)\n",
    "        \n",
    "        # 此处需要思考一个问题 就是输入句子时 实际上是有每一个句子的label的\n",
    "        # 但是进行RNNcell的输入时 每次是以一个句子长的batch进行词的输入的 此时粒度变化了\n",
    "        # 该如何进行label和每个句子的对应呢\n",
    "        \n",
    "        # 使用cnn对embedding层进行处理\n",
    "        \n",
    "        with tf.name_scope('Cnn_reduce_layer'):\n",
    "            \n",
    "            # filter 应用于2D的conv层时 四个维度为 filter_height, filter_width, in_channels, out_channels\n",
    "            filter_m = tf.Variable(tf.truncated_normal([3,num_units,1,1], stddev=0.1))\n",
    "            \n",
    "            # 添加一维 组成 batch height width channel四维\n",
    "            out = tf.expand_dims(out,axis=-1)\n",
    "            \n",
    "            conv2Reduce = tf.nn.conv2d(input=out,filter=filter_m, strides=[1,1,1,1], padding='VALID',)\n",
    "            conv2MaxPooling = tf.nn.max_pool(conv2Reduce,ksize=[1,1,1,1],strides=[1,1,1,1],padding='VALID')\n",
    "            conv2MaxPooling = tf.squeeze(conv2MaxPooling,axis=-1)\n",
    "            conv2MaxPooling = tf.squeeze(conv2MaxPooling,axis=-1)\n",
    "            \n",
    "            \n",
    "        with tf.name_scope('Dense'):\n",
    "            W = tf.Variable(tf.random_uniform([sentence_length - 3 + 1,32],dtype=tf.float32))\n",
    "            b = tf.Variable(tf.random_uniform([32],dtype=tf.float32))\n",
    "            d_out = tf.add(tf.matmul(conv2MaxPooling,W),b)\n",
    "            d_out = tf.sigmoid(d_out)\n",
    "            \n",
    "        with tf.name_scope('Output'):\n",
    "            W_o = tf.Variable(tf.random_uniform([32,2],dtype=tf.float32))\n",
    "            b_o = tf.Variable(tf.random_uniform([2],dtype=tf.float32))\n",
    "\n",
    "            d_out = tf.add(tf.matmul(d_out,W_o),b_o)\n",
    "            self.d_out = tf.sigmoid(d_out)\n",
    "        \n",
    "        with tf.name_scope('loss'):\n",
    "            losses = tf.losses.sigmoid_cross_entropy(multi_class_labels=self.y,logits=d_out)\n",
    "            self.loss = tf.reduce_mean(losses)\n",
    "\n",
    "        with tf.name_scope('accuracy'):\n",
    "            prediction = tf.argmax(self.d_out, 1)\n",
    "            label = tf.argmax(self.y,1)\n",
    "            self.accuracy = tf.reduce_mean(tf.cast(tf.equal(prediction, label),tf.float32))\n",
    "            \n",
    "        with tf.name_scope('num_correct'):\n",
    "            prediction = tf.argmax(self.d_out, 1)\n",
    "            label = tf.argmax(self.y,1)\n",
    "            self.num_correct = tf.reduce_sum(tf.cast(tf.equal(prediction,label),tf.float32))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [],
   "source": [
    "sentence_length = wnn.sentence_length\n",
    "vocab_size = wnn.vocab_size\n",
    "embedding_size = 100\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step:0\n",
      "Train acc:  0.55859375 loss:1.2862372398376465 cnum:286.0\n",
      "Train acc:  0.64453125 loss:1.0131597518920898 cnum:330.0\n",
      "Train acc: 0.607421875 loss:0.9189011454582214 cnum:311.0\n",
      "Train acc: 0.607421875 loss:0.9977747201919556 cnum:311.0\n",
      "Train acc: 0.599609375 loss:0.908352255821228 cnum:307.0\n",
      "Train acc:  0.61328125 loss:0.8313025236129761 cnum:314.0\n",
      "Train acc:    0.640625 loss:0.85401451587677 cnum:328.0\n",
      "Train acc:     0.59375 loss:0.8645997047424316 cnum:304.0\n",
      "Train acc: 0.576171875 loss:0.8545021414756775 cnum:295.0\n",
      "Train acc:  0.58984375 loss:0.852411687374115 cnum:302.0\n",
      "Train acc:  0.63671875 loss:0.8341726064682007 cnum:326.0\n",
      "Train acc: 0.583984375 loss:0.8309336304664612 cnum:299.0\n",
      "Train acc:   0.6328125 loss:0.8267132639884949 cnum:324.0\n",
      "Train acc:     0.59375 loss:0.8233731389045715 cnum:304.0\n",
      "Train acc:  0.60546875 loss:0.8470981121063232 cnum:310.0\n",
      "Train acc: 0.611328125 loss:0.8157405853271484 cnum:313.0\n",
      "Train acc: 0.611328125 loss:0.7981991767883301 cnum:313.0\n",
      "Train acc:  0.63671875 loss:0.8100196123123169 cnum:326.0\n",
      "Train acc:  0.61328125 loss:0.8484610319137573 cnum:314.0\n",
      "Train acc: 0.587890625 loss:0.8264822959899902 cnum:301.0\n",
      "Train acc: 0.611328125 loss:0.8394591808319092 cnum:313.0\n",
      "Train acc: 0.619140625 loss:0.7919040322303772 cnum:317.0\n",
      "Train acc: 0.638671875 loss:0.8166471123695374 cnum:327.0\n",
      "Train acc: 0.603515625 loss:0.8054405450820923 cnum:309.0\n",
      "Train acc:   0.6328125 loss:0.793215274810791 cnum:324.0\n",
      "Train acc:  0.63671875 loss:0.7983556985855103 cnum:326.0\n",
      "Train acc:  0.62890625 loss:0.8148018717765808 cnum:322.0\n",
      "Train acc: 0.626953125 loss:0.7867525815963745 cnum:321.0\n",
      "Train acc: 0.630859375 loss:0.8197521567344666 cnum:323.0\n",
      "Train acc:  0.58203125 loss:0.8134588003158569 cnum:298.0\n",
      "Train acc: 0.607421875 loss:0.8050366044044495 cnum:311.0\n",
      "Train acc:   0.6328125 loss:0.7957504987716675 cnum:324.0\n",
      "Train acc: 0.615234375 loss:0.7899300456047058 cnum:315.0\n",
      "Train acc: 0.634765625 loss:0.7898494005203247 cnum:325.0\n",
      "Train acc:  0.62890625 loss:0.8034487366676331 cnum:322.0\n",
      "Train acc: 0.654296875 loss:0.7943385243415833 cnum:335.0\n",
      "Train acc:  0.61328125 loss:0.8024169206619263 cnum:314.0\n",
      "Train acc: 0.599609375 loss:0.7885473370552063 cnum:307.0\n",
      "Train acc:  0.58984375 loss:0.8045538663864136 cnum:302.0\n",
      "Train acc: 0.576171875 loss:0.7816485166549683 cnum:295.0\n",
      "Train acc:  0.62109375 loss:0.7959370613098145 cnum:318.0\n",
      "Train acc: 0.607421875 loss:0.781943142414093 cnum:311.0\n",
      "Train acc: 0.591796875 loss:0.792415976524353 cnum:303.0\n",
      "Train acc: 0.642578125 loss:0.7785800099372864 cnum:329.0\n",
      "Train acc:  0.58984375 loss:0.7901300191879272 cnum:302.0\n",
      "Train acc: 0.591796875 loss:0.8067022562026978 cnum:303.0\n",
      "Train acc:   0.6484375 loss:0.8058111667633057 cnum:332.0\n",
      "Train acc: 0.568359375 loss:0.7812719941139221 cnum:291.0\n",
      "Train acc: 0.591796875 loss:0.779831051826477 cnum:303.0\n",
      "Train acc: 0.583984375 loss:0.7914525866508484 cnum:299.0\n",
      "Train acc: 0.595703125 loss:0.7985888719558716 cnum:305.0\n",
      "Train acc: 0.619140625 loss:0.7869307994842529 cnum:317.0\n",
      "Train acc: 0.619140625 loss:0.781352162361145 cnum:317.0\n",
      "Train acc:    0.609375 loss:0.7919310927391052 cnum:312.0\n",
      "Train acc: 0.673828125 loss:0.7806296348571777 cnum:345.0\n",
      "Train acc: 0.607421875 loss:0.7862082719802856 cnum:311.0\n",
      "Train acc:  0.64453125 loss:0.7807718515396118 cnum:330.0\n",
      "Train acc:  0.63671875 loss:0.7791555523872375 cnum:326.0\n",
      "Train acc:  0.66796875 loss:0.814618706703186 cnum:342.0\n",
      "Train acc: 0.564453125 loss:0.7957160472869873 cnum:289.0\n",
      "Train acc:   0.6171875 loss:0.7766444683074951 cnum:316.0\n",
      "Train acc: 0.587890625 loss:0.7869252562522888 cnum:301.0\n",
      "Train acc:  0.60546875 loss:0.7990803718566895 cnum:310.0\n",
      "Train acc:  0.61328125 loss:0.7857310175895691 cnum:314.0\n",
      "Train acc: 0.658203125 loss:0.8153561353683472 cnum:337.0\n",
      "Train acc: 0.615234375 loss:0.7919579744338989 cnum:315.0\n",
      "Train acc:   0.6640625 loss:0.7776087522506714 cnum:340.0\n",
      "Train acc: 0.615234375 loss:0.7865861654281616 cnum:315.0\n",
      "Train acc:    0.609375 loss:0.7786276340484619 cnum:312.0\n",
      "Train acc:     0.59375 loss:0.7797502875328064 cnum:304.0\n",
      "Train acc: 0.623046875 loss:0.7984389066696167 cnum:319.0\n",
      "Train acc: 0.619140625 loss:0.7876788973808289 cnum:317.0\n",
      "Train acc: 0.654296875 loss:0.7763922810554504 cnum:335.0\n",
      "Train acc:    0.609375 loss:0.7782431840896606 cnum:312.0\n",
      "Train acc:  0.66796875 loss:0.7786295413970947 cnum:342.0\n",
      "Train acc:  0.59765625 loss:0.784291684627533 cnum:306.0\n",
      "Train acc:   0.6171875 loss:0.7801083922386169 cnum:316.0\n",
      "Train acc: 0.634765625 loss:0.7855876088142395 cnum:325.0\n",
      "Train acc: 0.654296875 loss:0.7758046984672546 cnum:335.0\n",
      "Train acc: 0.626953125 loss:0.7796077132225037 cnum:321.0\n",
      "Train acc:   0.6171875 loss:0.778643786907196 cnum:316.0\n",
      "Train acc: 0.646484375 loss:0.781591534614563 cnum:331.0\n",
      "Train acc:       0.625 loss:0.788970947265625 cnum:320.0\n",
      "Train acc:       0.625 loss:0.7794036865234375 cnum:320.0\n",
      "Train acc:       0.625 loss:0.7803754210472107 cnum:320.0\n",
      "Train acc: 0.626953125 loss:0.781730055809021 cnum:321.0\n",
      "Train acc:  0.64453125 loss:0.7787189483642578 cnum:330.0\n",
      "Train acc:    0.640625 loss:0.7872803807258606 cnum:328.0\n",
      "Train acc:   0.6171875 loss:0.7911708354949951 cnum:316.0\n",
      "Train acc:    0.640625 loss:0.7812750339508057 cnum:328.0\n",
      "Train acc:  0.58203125 loss:0.7969227433204651 cnum:298.0\n",
      "Train acc:     0.59375 loss:0.7830345034599304 cnum:304.0\n",
      "Train acc: 0.607421875 loss:0.7876520752906799 cnum:311.0\n",
      "Train acc: 0.619140625 loss:0.7858423590660095 cnum:317.0\n",
      "Train acc:  0.62890625 loss:0.7799444198608398 cnum:322.0\n",
      "Train acc:  0.60546875 loss:0.7814694046974182 cnum:310.0\n",
      "Train acc: 0.599609375 loss:0.7892701625823975 cnum:307.0\n",
      "Train acc:   0.6015625 loss:0.7838689684867859 cnum:308.0\n",
      "Train acc: 0.587890625 loss:0.7805159091949463 cnum:301.0\n",
      "Train acc: 0.591796875 loss:0.7767832279205322 cnum:303.0\n",
      "Train acc: 0.623046875 loss:0.7798285484313965 cnum:319.0\n",
      "Train acc: 0.587890625 loss:0.7806955575942993 cnum:301.0\n",
      "Train acc:     0.65625 loss:0.7740603089332581 cnum:336.0\n",
      "Train acc: 0.626953125 loss:0.7812418341636658 cnum:321.0\n",
      "Train acc: 0.669921875 loss:0.775070071220398 cnum:343.0\n",
      "Train acc: 0.564453125 loss:0.7801187634468079 cnum:289.0\n",
      "Train acc:   0.5859375 loss:0.7861851453781128 cnum:300.0\n",
      "Train acc: 0.685546875 loss:0.7757126092910767 cnum:351.0\n",
      "Train acc:  0.65234375 loss:0.777083158493042 cnum:334.0\n",
      "Train acc: 0.595703125 loss:0.7788289785385132 cnum:305.0\n",
      "Train acc: 0.607421875 loss:0.7763282656669617 cnum:311.0\n",
      "Train acc: 0.634765625 loss:0.7812511324882507 cnum:325.0\n",
      "Train acc:  0.64453125 loss:0.7777066230773926 cnum:330.0\n",
      "Train acc:  0.61328125 loss:0.7760595679283142 cnum:314.0\n",
      "Train acc:   0.6015625 loss:0.7749847173690796 cnum:308.0\n",
      "Train acc: 0.572265625 loss:0.7775765657424927 cnum:293.0\n",
      "Train acc:    0.609375 loss:0.7761270999908447 cnum:312.0\n",
      "Train acc: 0.638671875 loss:0.775562584400177 cnum:327.0\n",
      "Train acc:       0.625 loss:0.7770633101463318 cnum:320.0\n",
      "Train acc: 0.572265625 loss:0.7922652959823608 cnum:293.0\n",
      "Train acc: 0.611328125 loss:0.792657196521759 cnum:313.0\n",
      "Train acc:  0.59765625 loss:0.7831445336341858 cnum:306.0\n",
      "Train acc:  0.59765625 loss:0.7897233963012695 cnum:306.0\n",
      "Train acc: 0.658203125 loss:0.7761508226394653 cnum:337.0\n",
      "Train acc: 0.611328125 loss:0.7800585627555847 cnum:313.0\n",
      "Train acc:     0.59375 loss:0.7818041443824768 cnum:304.0\n",
      "Train acc:       0.625 loss:0.7780410051345825 cnum:320.0\n",
      "Train acc:    0.640625 loss:0.782423734664917 cnum:328.0\n",
      "Train acc:   0.6015625 loss:0.7846463322639465 cnum:308.0\n",
      "Train acc: 0.599609375 loss:0.7786097526550293 cnum:307.0\n",
      "Train acc:  0.59765625 loss:0.7925028204917908 cnum:306.0\n",
      "Train acc:  0.65234375 loss:0.7749330997467041 cnum:334.0\n",
      "Train acc: 0.607421875 loss:0.7789930105209351 cnum:311.0\n",
      "Train acc:   0.5859375 loss:0.7772195339202881 cnum:300.0\n",
      "Train acc: 0.666015625 loss:0.787720799446106 cnum:341.0\n",
      "Train acc:       0.625 loss:0.7837221622467041 cnum:320.0\n",
      "Train acc: 0.607421875 loss:0.7780070900917053 cnum:311.0\n",
      "Train acc:  0.64453125 loss:0.7898131608963013 cnum:330.0\n",
      "Train acc:   0.6328125 loss:0.7834312915802002 cnum:324.0\n",
      "Train acc: 0.599609375 loss:0.7872012257575989 cnum:307.0\n",
      "Train acc: 0.611328125 loss:0.7783998250961304 cnum:313.0\n",
      "Train acc:     0.59375 loss:0.7770217657089233 cnum:304.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc:    0.609375 loss:0.7810792922973633 cnum:312.0\n",
      "Train acc: 0.638671875 loss:0.7739278078079224 cnum:327.0\n",
      "Train acc: 0.619140625 loss:0.7756600379943848 cnum:317.0\n",
      "Train acc: 0.611328125 loss:0.7930314540863037 cnum:313.0\n",
      "Train acc:   0.5859375 loss:0.78579181432724 cnum:300.0\n",
      "Train acc:  0.58203125 loss:0.7777754068374634 cnum:298.0\n",
      "Train acc: 0.638671875 loss:0.7770494222640991 cnum:327.0\n",
      "Train acc:    0.609375 loss:0.7785567045211792 cnum:312.0\n",
      "Train acc: 0.634765625 loss:0.7756813764572144 cnum:325.0\n",
      "Train acc:  0.59765625 loss:0.7956217527389526 cnum:306.0\n",
      "Train acc: 0.646484375 loss:0.7745522856712341 cnum:331.0\n",
      "Train acc: 0.619140625 loss:0.7793909311294556 cnum:317.0\n",
      "Train acc: 0.619140625 loss:0.7915371060371399 cnum:317.0\n",
      "Train acc:  0.64453125 loss:0.7727113962173462 cnum:330.0\n",
      "Train acc:   0.6015625 loss:0.7761656045913696 cnum:308.0\n",
      "Train acc:  0.55859375 loss:0.7787380218505859 cnum:286.0\n",
      "Train acc: 0.619140625 loss:0.7804495692253113 cnum:317.0\n",
      "Train acc:  0.61328125 loss:0.7754936218261719 cnum:314.0\n",
      "Train acc:  0.61328125 loss:0.7758017778396606 cnum:314.0\n",
      "Train acc: 0.611328125 loss:0.7774841785430908 cnum:313.0\n",
      "Train acc: 0.623046875 loss:0.779366135597229 cnum:319.0\n",
      "Train acc:     0.59375 loss:0.7802891731262207 cnum:304.0\n",
      "Train acc: 0.599609375 loss:0.7827700972557068 cnum:307.0\n",
      "Train acc: 0.626953125 loss:0.7765132784843445 cnum:321.0\n",
      "Train acc: 0.642578125 loss:0.7785224914550781 cnum:329.0\n",
      "Train acc:  0.60546875 loss:0.7751367688179016 cnum:310.0\n",
      "Train acc:  0.58203125 loss:0.7763403058052063 cnum:298.0\n",
      "Train acc:  0.60546875 loss:0.775324821472168 cnum:310.0\n",
      "Train acc: 0.611328125 loss:0.7782055139541626 cnum:313.0\n",
      "Train acc: 0.623046875 loss:0.774247407913208 cnum:319.0\n",
      "Train acc: 0.603515625 loss:0.7759006023406982 cnum:309.0\n",
      "Train acc:   0.6015625 loss:0.7804782390594482 cnum:308.0\n",
      "Train acc:  0.64453125 loss:0.7743809223175049 cnum:330.0\n",
      "Train acc:  0.61328125 loss:0.7768111228942871 cnum:314.0\n",
      "Train acc:  0.58984375 loss:0.7787832617759705 cnum:302.0\n",
      "Train acc: 0.623046875 loss:0.7759324908256531 cnum:319.0\n",
      "Train acc: 0.599609375 loss:0.7766386866569519 cnum:307.0\n",
      "Train acc: 0.623046875 loss:0.7742818593978882 cnum:319.0\n",
      "Train acc: 0.615234375 loss:0.7772141695022583 cnum:315.0\n",
      "Train acc: 0.615234375 loss:0.776684045791626 cnum:315.0\n",
      "Train acc:  0.67578125 loss:0.7990119457244873 cnum:346.0\n",
      "Train acc:    0.609375 loss:0.7752837538719177 cnum:312.0\n",
      "Train acc:  0.57421875 loss:0.7982125878334045 cnum:294.0\n",
      "Train acc: 0.623046875 loss:0.7751948237419128 cnum:319.0\n",
      "Train acc:       0.625 loss:0.7756582498550415 cnum:320.0\n",
      "Train acc: 0.623046875 loss:0.775875985622406 cnum:319.0\n",
      "Train acc:  0.63671875 loss:0.7728497982025146 cnum:326.0\n",
      "Train acc: 0.603515625 loss:0.7921733260154724 cnum:309.0\n",
      "Train acc:  0.62109375 loss:0.7740155458450317 cnum:318.0\n",
      "Train acc:    0.578125 loss:0.7760061621665955 cnum:296.0\n",
      "Train acc: 0.615234375 loss:0.7870275378227234 cnum:315.0\n",
      "Train acc:  0.59765625 loss:0.7865985631942749 cnum:306.0\n",
      "Train acc:   0.5703125 loss:0.7771049737930298 cnum:292.0\n",
      "Train acc:  0.59765625 loss:0.7753250598907471 cnum:306.0\n",
      "Train acc: 0.619140625 loss:0.7761995792388916 cnum:317.0\n",
      "Train acc: 0.626953125 loss:0.7739091515541077 cnum:321.0\n",
      "Train acc: 0.626953125 loss:0.7878240346908569 cnum:321.0\n",
      "Train acc: 0.638671875 loss:0.7768997550010681 cnum:327.0\n",
      "Train acc:  0.62109375 loss:0.7758735418319702 cnum:318.0\n",
      "Train acc:  0.65234375 loss:0.7744296789169312 cnum:334.0\n",
      "Train acc: 0.623046875 loss:0.7798139452934265 cnum:319.0\n",
      "Train acc:       0.625 loss:0.7821774482727051 cnum:320.0\n",
      "Train acc: 0.646484375 loss:0.772831916809082 cnum:331.0\n",
      "Train acc: 0.638671875 loss:0.7743949890136719 cnum:327.0\n",
      "Train acc: 0.638671875 loss:0.7747699022293091 cnum:327.0\n",
      "Train acc:  0.59765625 loss:0.7750110030174255 cnum:306.0\n",
      "Train acc: 0.634765625 loss:0.7889165878295898 cnum:325.0\n",
      "Train acc: 0.642578125 loss:0.7869065403938293 cnum:329.0\n",
      "Train acc: 0.619140625 loss:0.7753950357437134 cnum:317.0\n",
      "Train acc: 0.603515625 loss:0.7789100408554077 cnum:309.0\n",
      "Train acc:  0.63671875 loss:0.7797790169715881 cnum:326.0\n",
      "Train acc: 0.626953125 loss:0.7736356258392334 cnum:321.0\n",
      "Train acc:  0.61328125 loss:0.7885689735412598 cnum:314.0\n",
      "Train acc:  0.62890625 loss:0.7806994318962097 cnum:322.0\n",
      "Train acc:  0.62890625 loss:0.776853084564209 cnum:322.0\n",
      "Train acc: 0.619140625 loss:0.7787379026412964 cnum:317.0\n",
      "Train acc: 0.646484375 loss:0.7729921340942383 cnum:331.0\n",
      "Train acc:  0.62890625 loss:0.7752040028572083 cnum:322.0\n",
      "Train acc: 0.642578125 loss:0.7762233018875122 cnum:329.0\n",
      "Train acc: 0.611328125 loss:0.7822780609130859 cnum:313.0\n",
      "Train acc: 0.580078125 loss:0.776574969291687 cnum:297.0\n",
      "Train acc:  0.62109375 loss:0.7757484912872314 cnum:318.0\n",
      "Train acc:   0.6015625 loss:0.7803361415863037 cnum:308.0\n",
      "Train acc:    0.609375 loss:0.7790111899375916 cnum:312.0\n",
      "Train acc: 0.630859375 loss:0.7766669392585754 cnum:323.0\n",
      "Train acc: 0.634765625 loss:0.7821006774902344 cnum:325.0\n",
      "Train acc: 0.666015625 loss:0.7748795747756958 cnum:341.0\n",
      "Train acc:   0.6171875 loss:0.774543821811676 cnum:316.0\n",
      "Train acc: 0.642578125 loss:0.7730070948600769 cnum:329.0\n",
      "Train acc:  0.61328125 loss:0.7747686505317688 cnum:314.0\n",
      "Train acc:   0.6484375 loss:0.7842389941215515 cnum:332.0\n",
      "Train acc: 0.583984375 loss:0.7833927273750305 cnum:299.0\n",
      "Train acc: 0.611328125 loss:0.777887761592865 cnum:313.0\n",
      "Train acc:  0.59765625 loss:0.775501012802124 cnum:306.0\n",
      "Train acc:    0.640625 loss:0.7768962383270264 cnum:328.0\n",
      "Train acc:  0.59765625 loss:0.7899577021598816 cnum:306.0\n",
      "Train acc: 0.611328125 loss:0.7757132053375244 cnum:313.0\n",
      "Train acc: 0.583984375 loss:0.779495120048523 cnum:299.0\n",
      "Train acc: 0.626953125 loss:0.7820342779159546 cnum:321.0\n",
      "Train acc: 0.603515625 loss:0.7793864607810974 cnum:309.0\n",
      "Train acc:  0.61328125 loss:0.7810620665550232 cnum:314.0\n",
      "Train acc:  0.58203125 loss:0.776104211807251 cnum:298.0\n",
      "Train acc: 0.619140625 loss:0.7746361494064331 cnum:317.0\n",
      "Train acc: 0.599609375 loss:0.7747564911842346 cnum:307.0\n",
      "Train acc: 0.599609375 loss:0.7923132181167603 cnum:307.0\n",
      "Train acc:   0.6171875 loss:0.7755045294761658 cnum:316.0\n",
      "Train acc: 0.623046875 loss:0.774232029914856 cnum:319.0\n",
      "Train acc: 0.607421875 loss:0.791626513004303 cnum:311.0\n",
      "Train acc:    0.609375 loss:0.7797479033470154 cnum:312.0\n",
      "Train acc:  0.61328125 loss:0.7743139266967773 cnum:314.0\n",
      "Train acc: 0.595703125 loss:0.7887267470359802 cnum:305.0\n",
      "Train acc: 0.591796875 loss:0.7752634286880493 cnum:303.0\n",
      "Train acc:   0.6484375 loss:0.7987139821052551 cnum:332.0\n",
      "Train acc:    0.609375 loss:0.7795928120613098 cnum:312.0\n",
      "Train acc:    0.609375 loss:0.7805039286613464 cnum:312.0\n",
      "Train acc: 0.615234375 loss:0.7738674283027649 cnum:315.0\n",
      "Train acc:   0.6484375 loss:0.7861248254776001 cnum:332.0\n",
      "Train acc:    0.609375 loss:0.7837352156639099 cnum:312.0\n",
      "Train acc: 0.650390625 loss:0.7767144441604614 cnum:333.0\n",
      "Train acc:     0.59375 loss:0.7769633531570435 cnum:304.0\n",
      "Train acc: 0.630859375 loss:0.7746819853782654 cnum:323.0\n",
      "Train acc: 0.619140625 loss:0.783728837966919 cnum:317.0\n",
      "Train acc: 0.607421875 loss:0.7748662829399109 cnum:311.0\n",
      "Train acc:  0.60546875 loss:0.78499436378479 cnum:310.0\n",
      "Train acc:  0.61328125 loss:0.7937755584716797 cnum:314.0\n",
      "Train acc: 0.607421875 loss:0.7763445377349854 cnum:311.0\n",
      "Train acc:  0.65234375 loss:0.7736323475837708 cnum:334.0\n",
      "Train acc: 0.595703125 loss:0.7866094708442688 cnum:305.0\n",
      "Train acc: 0.638671875 loss:0.774349570274353 cnum:327.0\n",
      "Train acc: 0.591796875 loss:0.7801805138587952 cnum:303.0\n",
      "Train acc: 0.595703125 loss:0.7765272259712219 cnum:305.0\n",
      "Train acc: 0.595703125 loss:0.7757697105407715 cnum:305.0\n",
      "Train acc:    0.578125 loss:0.775969386100769 cnum:296.0\n",
      "Train acc:     0.59375 loss:0.7782671451568604 cnum:304.0\n",
      "Train acc: 0.607421875 loss:0.7750077843666077 cnum:311.0\n",
      "Train acc:  0.62890625 loss:0.7744237780570984 cnum:322.0\n",
      "Train acc:  0.62109375 loss:0.7771968841552734 cnum:318.0\n",
      "Train acc: 0.642578125 loss:0.7734116315841675 cnum:329.0\n",
      "Train acc:  0.59765625 loss:0.7756569981575012 cnum:306.0\n",
      "Train acc: 0.615234375 loss:0.7743260264396667 cnum:315.0\n",
      "Train acc:  0.58984375 loss:0.7774209976196289 cnum:302.0\n",
      "Train acc:   0.6015625 loss:0.7792000770568848 cnum:308.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc:  0.63671875 loss:0.7756131887435913 cnum:326.0\n",
      "Train acc: 0.630859375 loss:0.7761959433555603 cnum:323.0\n",
      "Train acc: 0.619140625 loss:0.7786861658096313 cnum:317.0\n",
      "Train acc: 0.607421875 loss:0.777101993560791 cnum:311.0\n",
      "Train acc:   0.6015625 loss:0.7761198282241821 cnum:308.0\n",
      "Train acc: 0.623046875 loss:0.7785243391990662 cnum:319.0\n",
      "Train acc: 0.615234375 loss:0.7737627029418945 cnum:315.0\n",
      "Train acc:     0.59375 loss:0.7752139568328857 cnum:304.0\n",
      "Train acc:     0.65625 loss:0.771994948387146 cnum:336.0\n",
      "Train acc: 0.642578125 loss:0.7755907773971558 cnum:329.0\n",
      "Train acc: 0.572265625 loss:0.7796284556388855 cnum:293.0\n",
      "Train acc: 0.599609375 loss:0.7755728960037231 cnum:307.0\n",
      "Train acc: 0.611328125 loss:0.7772760391235352 cnum:313.0\n",
      "Train acc:   0.6328125 loss:0.793787956237793 cnum:324.0\n",
      "Train acc: 0.619140625 loss:0.7779684662818909 cnum:317.0\n",
      "Train acc:    0.640625 loss:0.7732908725738525 cnum:328.0\n",
      "Train acc: 0.623046875 loss:0.7773171663284302 cnum:319.0\n",
      "Train acc: 0.595703125 loss:0.7775405645370483 cnum:305.0\n",
      "Train acc: 0.611328125 loss:0.7746341824531555 cnum:313.0\n",
      "Train acc:  0.63671875 loss:0.7755240797996521 cnum:326.0\n",
      "Train acc:     0.59375 loss:0.7780859470367432 cnum:304.0\n",
      "Train acc:  0.63671875 loss:0.7733536958694458 cnum:326.0\n",
      "Train acc:   0.6171875 loss:0.7742065191268921 cnum:316.0\n",
      "Train acc: 0.583984375 loss:0.7761425375938416 cnum:299.0\n",
      "Train acc:  0.62109375 loss:0.7822213768959045 cnum:318.0\n",
      "Train acc: 0.634765625 loss:0.7740845680236816 cnum:325.0\n",
      "Train acc: 0.607421875 loss:0.7796132564544678 cnum:311.0\n",
      "Train acc: 0.580078125 loss:0.7760065197944641 cnum:297.0\n",
      "Train acc: 0.634765625 loss:0.782159686088562 cnum:325.0\n",
      "Train acc:   0.6171875 loss:0.7742165923118591 cnum:316.0\n",
      "Train acc: 0.630859375 loss:0.77474045753479 cnum:323.0\n",
      "Train acc:   0.6484375 loss:0.7727596163749695 cnum:332.0\n",
      "Train acc: 0.654296875 loss:0.7725186347961426 cnum:335.0\n",
      "Train acc:  0.61328125 loss:0.7742730379104614 cnum:314.0\n",
      "Train acc: 0.623046875 loss:0.7755113244056702 cnum:319.0\n",
      "Train acc:  0.64453125 loss:0.7750765085220337 cnum:330.0\n",
      "Train acc: 0.607421875 loss:0.7743129730224609 cnum:311.0\n",
      "Train acc: 0.626953125 loss:0.7733141183853149 cnum:321.0\n",
      "Train acc: 0.619140625 loss:0.775226354598999 cnum:317.0\n",
      "Train acc: 0.587890625 loss:0.7753597497940063 cnum:301.0\n",
      "Train acc:   0.6328125 loss:0.7730581164360046 cnum:324.0\n",
      "Train acc: 0.626953125 loss:0.7778595089912415 cnum:321.0\n",
      "Train acc: 0.638671875 loss:0.7754133939743042 cnum:327.0\n",
      "Train acc:   0.6171875 loss:0.7895532846450806 cnum:316.0\n",
      "Train acc:     0.59375 loss:0.7757139205932617 cnum:304.0\n",
      "Train acc: 0.658203125 loss:0.7715574502944946 cnum:337.0\n",
      "Train acc: 0.623046875 loss:0.775658130645752 cnum:319.0\n",
      "Train acc: 0.626953125 loss:0.7740389108657837 cnum:321.0\n",
      "Train acc:       0.625 loss:0.7747587561607361 cnum:320.0\n",
      "Train acc:   0.6328125 loss:0.7821091413497925 cnum:324.0\n",
      "Train acc:       0.625 loss:0.7818722128868103 cnum:320.0\n",
      "Train acc:  0.61328125 loss:0.7750303745269775 cnum:314.0\n",
      "Train acc: 0.642578125 loss:0.7769498825073242 cnum:329.0\n",
      "Train acc:    0.578125 loss:0.7764697074890137 cnum:296.0\n",
      "Train acc:       0.625 loss:0.7733806371688843 cnum:320.0\n",
      "Train acc: 0.607421875 loss:0.7805595397949219 cnum:311.0\n",
      "Train acc:  0.63671875 loss:0.7733777761459351 cnum:326.0\n",
      "Train acc:   0.6328125 loss:0.7834151387214661 cnum:324.0\n",
      "Train acc:   0.6171875 loss:0.7753364443778992 cnum:316.0\n",
      "Train acc: 0.591796875 loss:0.7754789590835571 cnum:303.0\n",
      "Train acc:  0.59765625 loss:0.7755934596061707 cnum:306.0\n",
      "Train acc: 0.634765625 loss:0.7799692153930664 cnum:325.0\n",
      "Train acc:  0.60546875 loss:0.7748209238052368 cnum:310.0\n",
      "Train acc: 0.615234375 loss:0.77397620677948 cnum:315.0\n",
      "Train acc:  0.62109375 loss:0.7903609871864319 cnum:318.0\n",
      "Train acc: 0.685546875 loss:0.7701141834259033 cnum:351.0\n",
      "Train acc: 0.630859375 loss:0.772923469543457 cnum:323.0\n",
      "Train acc:   0.5859375 loss:0.7754127979278564 cnum:300.0\n",
      "Train acc:  0.58984375 loss:0.7763566374778748 cnum:302.0\n",
      "Train acc:   0.6015625 loss:0.7771474719047546 cnum:308.0\n",
      "Train acc: 0.583984375 loss:0.7773768901824951 cnum:299.0\n",
      "Train acc: 0.580078125 loss:0.7800653576850891 cnum:297.0\n",
      "Train acc: 0.619140625 loss:0.7741483449935913 cnum:317.0\n",
      "Train acc:  0.63671875 loss:0.7734634876251221 cnum:326.0\n",
      "Train acc: 0.658203125 loss:0.7719078063964844 cnum:337.0\n",
      "Train acc:    0.640625 loss:0.7749671936035156 cnum:328.0\n",
      "Train acc:   0.5859375 loss:0.7845230102539062 cnum:300.0\n",
      "Train acc:   0.6171875 loss:0.7745248675346375 cnum:316.0\n",
      "Train acc: 0.642578125 loss:0.7732850909233093 cnum:329.0\n",
      "Train acc: 0.611328125 loss:0.7782460451126099 cnum:313.0\n",
      "Train acc: 0.626953125 loss:0.7745151519775391 cnum:321.0\n",
      "Train acc: 0.607421875 loss:0.7827564477920532 cnum:311.0\n",
      "Train acc: 0.650390625 loss:0.7724289894104004 cnum:333.0\n",
      "Train acc: 0.615234375 loss:0.7743619680404663 cnum:315.0\n",
      "Train acc: 0.654296875 loss:0.7724418640136719 cnum:335.0\n",
      "Train acc: 0.603515625 loss:0.7745537757873535 cnum:309.0\n",
      "Train acc: 0.630859375 loss:0.7731476426124573 cnum:323.0\n",
      "Train acc: 0.591796875 loss:0.7750855088233948 cnum:303.0\n",
      "Train acc: 0.662109375 loss:0.7715044021606445 cnum:339.0\n",
      "Train acc: 0.611328125 loss:0.7751294374465942 cnum:313.0\n",
      "Train acc: 0.572265625 loss:0.7772758603096008 cnum:293.0\n",
      "Train acc:    0.609375 loss:0.7899660468101501 cnum:312.0\n",
      "Train acc:   0.5859375 loss:0.7753180265426636 cnum:300.0\n",
      "Train acc:     0.59375 loss:0.7785013318061829 cnum:304.0\n",
      "Train acc: 0.591796875 loss:0.7759219408035278 cnum:303.0\n",
      "Train acc:  0.60546875 loss:0.7744694352149963 cnum:310.0\n",
      "Train acc: 0.607421875 loss:0.7763833999633789 cnum:311.0\n",
      "Train acc: 0.615234375 loss:0.773750901222229 cnum:315.0\n",
      "Train acc: 0.646484375 loss:0.7728874683380127 cnum:331.0\n",
      "Train acc: 0.572265625 loss:0.7758941650390625 cnum:293.0\n",
      "Train acc: 0.599609375 loss:0.7767267227172852 cnum:307.0\n",
      "Train acc:  0.60546875 loss:0.7809393405914307 cnum:310.0\n",
      "Train acc:    0.609375 loss:0.7764378786087036 cnum:312.0\n",
      "Train acc:     0.59375 loss:0.7748781442642212 cnum:304.0\n",
      "Train acc:  0.62109375 loss:0.773848295211792 cnum:318.0\n",
      "Train acc:  0.62109375 loss:0.7736990451812744 cnum:318.0\n",
      "Train acc:     0.59375 loss:0.7771086692810059 cnum:304.0\n",
      "Train acc: 0.623046875 loss:0.775130569934845 cnum:319.0\n",
      "Train acc: 0.626953125 loss:0.776164710521698 cnum:321.0\n",
      "Train acc:  0.60546875 loss:0.7773261070251465 cnum:310.0\n",
      "Train acc: 0.615234375 loss:0.7740438580513 cnum:315.0\n",
      "Train acc: 0.595703125 loss:0.7752697467803955 cnum:305.0\n",
      "Train acc:   0.6328125 loss:0.7727258205413818 cnum:324.0\n",
      "Train acc: 0.595703125 loss:0.774825394153595 cnum:305.0\n",
      "Train acc: 0.595703125 loss:0.7776457667350769 cnum:305.0\n",
      "Train acc: 0.619140625 loss:0.7737447023391724 cnum:317.0\n",
      "Train acc:  0.61328125 loss:0.7961928248405457 cnum:314.0\n",
      "Train acc:    0.578125 loss:0.7756935358047485 cnum:296.0\n",
      "Train acc: 0.634765625 loss:0.773693323135376 cnum:325.0\n",
      "Train acc:  0.65234375 loss:0.7726225852966309 cnum:334.0\n",
      "Train acc:    0.609375 loss:0.7761831283569336 cnum:312.0\n",
      "Train acc: 0.619140625 loss:0.776287853717804 cnum:317.0\n",
      "Train acc:  0.64453125 loss:0.7776916027069092 cnum:330.0\n",
      "Train acc:    0.640625 loss:0.7744643688201904 cnum:328.0\n",
      "Train acc:  0.64453125 loss:0.7730551958084106 cnum:330.0\n",
      "Train acc:       0.625 loss:0.7836189270019531 cnum:320.0\n",
      "Train acc:   0.6328125 loss:0.7727513313293457 cnum:324.0\n",
      "Train acc:  0.57421875 loss:0.7757487297058105 cnum:294.0\n",
      "Train acc: 0.591796875 loss:0.7757482528686523 cnum:303.0\n",
      "Train acc:  0.63671875 loss:0.7733125686645508 cnum:326.0\n",
      "Train acc: 0.626953125 loss:0.7730118632316589 cnum:321.0\n",
      "Train acc: 0.623046875 loss:0.7737911343574524 cnum:319.0\n",
      "Train acc:    0.640625 loss:0.7861225008964539 cnum:328.0\n",
      "Train acc: 0.619140625 loss:0.7735440731048584 cnum:317.0\n",
      "Train acc: 0.603515625 loss:0.7753321528434753 cnum:309.0\n",
      "Train acc: 0.638671875 loss:0.7727137804031372 cnum:327.0\n",
      "Train acc:  0.61328125 loss:0.7744452357292175 cnum:314.0\n",
      "Train acc: 0.630859375 loss:0.773054301738739 cnum:323.0\n",
      "Train acc:    0.609375 loss:0.7748568058013916 cnum:312.0\n",
      "Train acc: 0.615234375 loss:0.7737872004508972 cnum:315.0\n",
      "Train acc:   0.6640625 loss:0.7733810544013977 cnum:340.0\n",
      "Train acc: 0.650390625 loss:0.7732588648796082 cnum:333.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc:  0.61328125 loss:0.7744826674461365 cnum:314.0\n",
      "Train acc: 0.619140625 loss:0.7736360430717468 cnum:317.0\n",
      "Dev acc:  0.68359375 loss0.7706074714660645\n",
      "Dev acc:   0.6640625 loss0.7746762037277222\n",
      "Dev acc:  0.63671875 loss0.7730256915092468\n",
      "Dev acc:  0.64453125 loss0.7724362015724182\n",
      "Dev acc:  0.69140625 loss0.8042200803756714\n",
      "Dev acc:  0.66796875 loss0.7720515131950378\n",
      "Dev acc:   0.6953125 loss0.7700556516647339\n",
      "Dev acc:  0.67578125 loss0.7709653973579407\n",
      "Dev acc:   0.6640625 loss0.7745376825332642\n",
      "Dev acc:  0.65234375 loss0.7756069302558899\n",
      "Dev acc:  0.62109375 loss0.7771857976913452\n",
      "Dev acc:  0.62890625 loss0.775324821472168\n",
      "Dev acc:  0.62109375 loss0.7733453512191772\n",
      "Dev acc:   0.6015625 loss0.7754939794540405\n",
      "Dev acc:   0.6484375 loss0.7739446759223938\n",
      "Dev acc:  0.66796875 loss0.7732818126678467\n",
      "Dev acc:   0.6953125 loss0.7701868414878845\n",
      "Dev acc:  0.66796875 loss0.8333163261413574\n",
      "Dev acc:   0.7109375 loss0.7732031941413879\n",
      "Dev acc:  0.62890625 loss0.7740355730056763\n",
      "Dev acc:    0.671875 loss0.7715991139411926\n",
      "Dev acc:  0.65234375 loss0.7734447121620178\n",
      "Dev acc:  0.66796875 loss0.7717821598052979\n",
      "Dev acc:   0.6640625 loss0.7720338702201843\n",
      "Dev acc:      0.6875 loss0.7711638808250427\n",
      "Dev acc:  0.68359375 loss0.7709531784057617\n",
      "Dev acc:   0.7265625 loss0.7690863013267517\n",
      "Dev acc:  0.66015625 loss0.776096761226654\n",
      "Dev acc:     0.65625 loss0.7717111110687256\n",
      "Dev acc:  0.65234375 loss0.7727570533752441\n",
      "Dev acc:   0.5859375 loss0.7769731283187866\n",
      "Dev acc:  0.62890625 loss0.7733461856842041\n",
      "Dev acc:      0.6875 loss0.7703326344490051\n",
      "Dev acc:    0.640625 loss0.773455798625946\n",
      "Dev acc:  0.67578125 loss0.775214433670044\n",
      "Dev acc:   0.6484375 loss0.7721081376075745\n",
      "Dev acc:  0.58984375 loss0.7808418273925781\n",
      "Dev acc:  0.64453125 loss0.7721097469329834\n",
      "Dev acc:  0.63671875 loss0.7729593515396118\n",
      "Dev acc:     0.65625 loss0.7717739343643188\n",
      "Dev acc:  0.69921875 loss0.7700823545455933\n",
      "Dev acc:  0.73828125 loss0.7675457000732422\n",
      "Dev acc:  0.68359375 loss0.7717317938804626\n",
      "Dev acc:   0.6796875 loss0.7706108093261719\n",
      "Dev acc:    0.640625 loss0.774676501750946\n",
      "Dev acc:  0.71484375 loss0.7686341404914856\n",
      "Dev acc:   0.6484375 loss0.7720239758491516\n",
      "Dev acc:     0.65625 loss0.7718178629875183\n",
      "Dev acc:  0.65234375 loss0.774606466293335\n",
      "Dev acc:    0.640625 loss0.7727649211883545\n",
      "Dev acc:     0.65625 loss0.7780880331993103\n",
      "Dev acc:   0.6015625 loss0.7744536399841309\n",
      "Dev acc:      0.6875 loss0.7708088159561157\n",
      "Dev acc:   0.6640625 loss0.7717061042785645\n",
      "Dev acc:  0.66796875 loss0.7713038921356201\n",
      "Dev acc:  0.62890625 loss0.7742917537689209\n",
      "Dev acc:  0.66796875 loss0.7714924216270447\n",
      "Dev acc:   0.6640625 loss0.7784584164619446\n",
      "Dev acc:   0.6953125 loss0.7698701620101929\n",
      "Dev acc:  0.65234375 loss0.7721843719482422\n",
      "Dev acc:    0.640625 loss0.7727223634719849\n",
      "Dev acc:  0.64453125 loss0.7740353345870972\n",
      "Dev acc:  0.62109375 loss0.775920033454895\n",
      "Dev acc:  0.65234375 loss0.7747700214385986\n",
      "Dev acc:  0.66796875 loss0.7711837291717529\n",
      "Dev acc:  0.66796875 loss0.7713078856468201\n",
      "Dev acc:   0.6015625 loss0.7770657539367676\n",
      "Dev acc:  0.64453125 loss0.7723022103309631\n",
      "Dev acc:      0.6875 loss0.7804730534553528\n",
      "Dev acc:  0.71484375 loss0.773444652557373\n",
      "Dev acc:  0.63671875 loss0.7777197360992432\n",
      "Dev acc:    0.671875 loss0.7712619304656982\n",
      "Dev acc:  0.65234375 loss0.7728369235992432\n",
      "Dev acc:  0.70703125 loss0.7715185284614563\n",
      "Dev acc:  0.64453125 loss0.7739259004592896\n",
      "Dev acc:     0.71875 loss0.76945960521698\n",
      "Dev acc:     0.65625 loss0.7718027234077454\n",
      "Dev acc:  0.66015625 loss0.7720459699630737\n",
      "Dev acc:  0.69140625 loss0.7727389335632324\n",
      "Dev acc:    0.703125 loss0.7692034244537354\n",
      "Dev acc:  0.69921875 loss0.7701659202575684\n",
      "Dev acc:   0.6640625 loss0.7737489938735962\n",
      "Dev acc:  0.69140625 loss0.7698224186897278\n",
      "Dev acc:   0.6484375 loss0.7748128771781921\n",
      "Dev acc:  0.74609375 loss0.7675607204437256\n",
      "Dev acc:    0.671875 loss0.7740340232849121\n",
      "Dev acc:   0.6484375 loss0.7725242376327515\n",
      "Dev acc:  0.64453125 loss0.7723525762557983\n",
      "Dev acc:       0.625 loss0.7761881351470947\n",
      "Dev acc:  0.64453125 loss0.7761915922164917\n",
      "Dev acc:   0.6796875 loss0.7712796926498413\n",
      "Dev acc:  0.56640625 loss0.7765160202980042\n",
      "Dev acc:  0.66796875 loss0.7819926142692566\n",
      "Dev acc:  0.65234375 loss0.7724974751472473\n",
      "Dev acc:  0.68359375 loss0.8029577732086182\n",
      "Dev acc:    0.640625 loss0.7724372744560242\n",
      "Dev acc:  0.67578125 loss0.7720617651939392\n",
      "Dev acc:  0.67578125 loss0.7714013457298279\n",
      "Dev acc:   0.6640625 loss0.7740925550460815\n",
      "Dev acc:  0.62109375 loss0.7741419076919556\n",
      "Dev acc:    0.671875 loss0.7716084122657776\n",
      "Dev acc:  0.71484375 loss0.7710105776786804\n",
      "Dev acc:  0.65234375 loss0.7738191485404968\n",
      "Dev acc:     0.65625 loss0.7725681066513062\n",
      "Dev acc:   0.6953125 loss0.7704233527183533\n",
      "Dev acc:  0.61328125 loss0.7742774486541748\n",
      "Dev acc:    0.640625 loss0.7737810015678406\n",
      "Total Dev acc:0.6613609813084113\n",
      "Step:1\n",
      "Train acc:   0.6015625 loss:0.7773720622062683 cnum:308.0\n",
      "Train acc:   0.6171875 loss:0.7735716104507446 cnum:316.0\n",
      "Train acc: 0.642578125 loss:0.7726619839668274 cnum:329.0\n",
      "Train acc: 0.591796875 loss:0.7760111093521118 cnum:303.0\n",
      "Train acc:  0.65234375 loss:0.7719728350639343 cnum:334.0\n",
      "Train acc: 0.611328125 loss:0.7816935181617737 cnum:313.0\n",
      "Train acc: 0.611328125 loss:0.7804163694381714 cnum:313.0\n",
      "Train acc:     0.59375 loss:0.7756183743476868 cnum:304.0\n",
      "Train acc:   0.6328125 loss:0.7730712294578552 cnum:324.0\n",
      "Train acc: 0.662109375 loss:0.7870641946792603 cnum:339.0\n",
      "Train acc: 0.623046875 loss:0.7734873294830322 cnum:319.0\n",
      "Train acc:    0.609375 loss:0.7806950211524963 cnum:312.0\n",
      "Train acc: 0.599609375 loss:0.7805306315422058 cnum:307.0\n",
      "Train acc: 0.611328125 loss:0.7742496728897095 cnum:313.0\n",
      "Train acc:    0.578125 loss:0.7758249044418335 cnum:296.0\n",
      "Train acc: 0.623046875 loss:0.773765504360199 cnum:319.0\n",
      "Train acc: 0.619140625 loss:0.7790566682815552 cnum:317.0\n",
      "Train acc:   0.5859375 loss:0.7754546403884888 cnum:300.0\n",
      "Train acc: 0.611328125 loss:0.7746624946594238 cnum:313.0\n",
      "Train acc:  0.58984375 loss:0.7768822908401489 cnum:302.0\n",
      "Train acc:   0.6328125 loss:0.7733995914459229 cnum:324.0\n",
      "Train acc: 0.666015625 loss:0.7731601595878601 cnum:341.0\n",
      "Train acc: 0.607421875 loss:0.774944007396698 cnum:311.0\n",
      "Train acc:  0.58984375 loss:0.7760406732559204 cnum:302.0\n",
      "Train acc:   0.6171875 loss:0.7739108800888062 cnum:316.0\n",
      "Train acc: 0.615234375 loss:0.7739862203598022 cnum:315.0\n",
      "Train acc: 0.591796875 loss:0.7753185033798218 cnum:303.0\n",
      "Train acc: 0.587890625 loss:0.7756280899047852 cnum:301.0\n",
      "Train acc: 0.615234375 loss:0.7769685983657837 cnum:315.0\n",
      "Train acc: 0.642578125 loss:0.7833889722824097 cnum:329.0\n",
      "Train acc: 0.662109375 loss:0.7730690836906433 cnum:339.0\n",
      "Train acc: 0.646484375 loss:0.7816279530525208 cnum:331.0\n",
      "Train acc:  0.60546875 loss:0.7741904258728027 cnum:310.0\n",
      "Train acc:  0.63671875 loss:0.775703489780426 cnum:326.0\n",
      "Train acc:     0.59375 loss:0.7774420380592346 cnum:304.0\n",
      "Train acc:   0.6328125 loss:0.7745596170425415 cnum:324.0\n",
      "Train acc:  0.61328125 loss:0.7777248024940491 cnum:314.0\n",
      "Train acc:   0.6328125 loss:0.772996723651886 cnum:324.0\n",
      "Train acc: 0.587890625 loss:0.7785242199897766 cnum:301.0\n",
      "Train acc:  0.63671875 loss:0.7758775353431702 cnum:326.0\n",
      "Train acc: 0.642578125 loss:0.7748731970787048 cnum:329.0\n",
      "Train acc: 0.662109375 loss:0.7713327407836914 cnum:339.0\n",
      "Train acc: 0.595703125 loss:0.7751200795173645 cnum:305.0\n",
      "Train acc: 0.630859375 loss:0.7729836702346802 cnum:323.0\n",
      "Train acc: 0.587890625 loss:0.7759255766868591 cnum:301.0\n",
      "Train acc:   0.6171875 loss:0.7745323777198792 cnum:316.0\n",
      "Train acc:  0.58984375 loss:0.7769139409065247 cnum:302.0\n",
      "Train acc: 0.587890625 loss:0.7751684188842773 cnum:301.0\n",
      "Train acc: 0.615234375 loss:0.7738141417503357 cnum:315.0\n",
      "Train acc: 0.634765625 loss:0.7727067470550537 cnum:325.0\n",
      "Train acc:    0.609375 loss:0.7799685597419739 cnum:312.0\n",
      "Train acc:       0.625 loss:0.7732287645339966 cnum:320.0\n",
      "Train acc: 0.587890625 loss:0.7750744819641113 cnum:301.0\n",
      "Train acc: 0.591796875 loss:0.775262713432312 cnum:303.0\n",
      "Train acc: 0.583984375 loss:0.7775761485099792 cnum:299.0\n",
      "Train acc:  0.66796875 loss:0.7728173136711121 cnum:342.0\n",
      "Train acc: 0.580078125 loss:0.776991605758667 cnum:297.0\n",
      "Train acc: 0.642578125 loss:0.7733513116836548 cnum:329.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc:   0.6328125 loss:0.772994875907898 cnum:324.0\n",
      "Train acc:  0.61328125 loss:0.7739977836608887 cnum:314.0\n",
      "Train acc:    0.609375 loss:0.7750158309936523 cnum:312.0\n",
      "Train acc: 0.634765625 loss:0.7730332612991333 cnum:325.0\n",
      "Train acc:  0.60546875 loss:0.774553120136261 cnum:310.0\n",
      "Train acc: 0.650390625 loss:0.7805113196372986 cnum:333.0\n",
      "Train acc: 0.626953125 loss:0.7793076038360596 cnum:321.0\n",
      "Train acc:     0.65625 loss:0.7720997333526611 cnum:336.0\n",
      "Train acc: 0.634765625 loss:0.7734917402267456 cnum:325.0\n",
      "Train acc: 0.611328125 loss:0.7747288942337036 cnum:313.0\n",
      "Train acc: 0.615234375 loss:0.7745038866996765 cnum:315.0\n",
      "Train acc: 0.619140625 loss:0.7734932899475098 cnum:317.0\n",
      "Train acc:  0.57421875 loss:0.7769604921340942 cnum:294.0\n",
      "Train acc: 0.603515625 loss:0.7745934724807739 cnum:309.0\n",
      "Train acc:  0.64453125 loss:0.773173451423645 cnum:330.0\n",
      "Train acc:  0.62109375 loss:0.7735753655433655 cnum:318.0\n",
      "Train acc: 0.650390625 loss:0.7726033926010132 cnum:333.0\n",
      "Train acc:  0.66796875 loss:0.7865630388259888 cnum:342.0\n",
      "Train acc: 0.599609375 loss:0.7757236361503601 cnum:307.0\n",
      "Train acc:   0.6171875 loss:0.7739549279212952 cnum:316.0\n",
      "Train acc:    0.578125 loss:0.7755367755889893 cnum:296.0\n",
      "Train acc: 0.619140625 loss:0.7752721905708313 cnum:317.0\n",
      "Train acc:   0.6171875 loss:0.7735100984573364 cnum:316.0\n",
      "Train acc:  0.63671875 loss:0.7825086712837219 cnum:326.0\n",
      "Train acc: 0.615234375 loss:0.7737305164337158 cnum:315.0\n",
      "Train acc: 0.619140625 loss:0.7735047936439514 cnum:317.0\n",
      "Train acc:   0.6171875 loss:0.7743561863899231 cnum:316.0\n",
      "Train acc: 0.623046875 loss:   0.7734375 cnum:319.0\n",
      "Train acc:   0.6171875 loss:0.7735263109207153 cnum:316.0\n",
      "Train acc: 0.595703125 loss:0.7783641219139099 cnum:305.0\n",
      "Train acc: 0.599609375 loss:0.7748318910598755 cnum:307.0\n",
      "Train acc:  0.62890625 loss:0.774254560470581 cnum:322.0\n",
      "Train acc:   0.6171875 loss:0.7788094282150269 cnum:316.0\n",
      "Train acc:  0.57421875 loss:0.7758388519287109 cnum:294.0\n",
      "Train acc: 0.607421875 loss:0.7740648984909058 cnum:311.0\n",
      "Train acc: 0.595703125 loss:0.7748650312423706 cnum:305.0\n",
      "Train acc: 0.603515625 loss:0.7772486209869385 cnum:309.0\n",
      "Train acc: 0.603515625 loss:0.77573561668396 cnum:309.0\n",
      "Train acc: 0.587890625 loss:0.7766755819320679 cnum:301.0\n",
      "Train acc:     0.59375 loss:0.7754568457603455 cnum:304.0\n",
      "Train acc: 0.634765625 loss:0.774738073348999 cnum:325.0\n",
      "Train acc:  0.62890625 loss:0.778810977935791 cnum:322.0\n",
      "Train acc:  0.63671875 loss:0.772879958152771 cnum:326.0\n",
      "Train acc: 0.595703125 loss:0.7752219438552856 cnum:305.0\n",
      "Train acc:    0.578125 loss:0.7767820358276367 cnum:296.0\n",
      "Train acc: 0.630859375 loss:0.773021399974823 cnum:323.0\n",
      "Train acc: 0.587890625 loss:0.7751241326332092 cnum:301.0\n",
      "Train acc: 0.642578125 loss:0.7721863985061646 cnum:329.0\n",
      "Train acc: 0.576171875 loss:0.7812432050704956 cnum:295.0\n",
      "Train acc: 0.630859375 loss:0.775513231754303 cnum:323.0\n",
      "Train acc: 0.646484375 loss:0.7735414505004883 cnum:331.0\n",
      "Train acc: 0.626953125 loss:0.7768784761428833 cnum:321.0\n",
      "Train acc: 0.599609375 loss:0.7750993371009827 cnum:307.0\n",
      "Train acc: 0.619140625 loss:0.7754064202308655 cnum:317.0\n",
      "Train acc:   0.6015625 loss:0.7750427722930908 cnum:308.0\n",
      "Train acc:  0.66015625 loss:0.7716944813728333 cnum:338.0\n",
      "Train acc:  0.61328125 loss:0.7739201784133911 cnum:314.0\n",
      "Train acc:       0.625 loss:0.7738578915596008 cnum:320.0\n",
      "Train acc:   0.6015625 loss:0.7743822336196899 cnum:308.0\n",
      "Train acc: 0.611328125 loss:0.7748844027519226 cnum:313.0\n",
      "Train acc: 0.673828125 loss:0.7893620133399963 cnum:345.0\n",
      "Train acc: 0.572265625 loss:0.7790946960449219 cnum:293.0\n",
      "Train acc: 0.611328125 loss:0.7747076749801636 cnum:313.0\n",
      "Train acc: 0.564453125 loss:0.7818129658699036 cnum:289.0\n",
      "Train acc:  0.64453125 loss:0.7766135334968567 cnum:330.0\n",
      "Train acc: 0.599609375 loss:0.7750735282897949 cnum:307.0\n",
      "Train acc:   0.6171875 loss:0.7739245891571045 cnum:316.0\n",
      "Train acc: 0.646484375 loss:0.7730163931846619 cnum:331.0\n",
      "Train acc: 0.654296875 loss:0.7746074199676514 cnum:335.0\n",
      "Train acc: 0.630859375 loss:0.773224949836731 cnum:323.0\n",
      "Train acc:  0.62890625 loss:0.7732250690460205 cnum:322.0\n",
      "Train acc:    0.609375 loss:0.7740164399147034 cnum:312.0\n",
      "Train acc:     0.59375 loss:0.7854282855987549 cnum:304.0\n",
      "Train acc:    0.640625 loss:0.7756247520446777 cnum:328.0\n",
      "Train acc: 0.599609375 loss:0.7769062519073486 cnum:307.0\n",
      "Train acc: 0.572265625 loss:0.7772752642631531 cnum:293.0\n",
      "Train acc: 0.615234375 loss:0.7790570259094238 cnum:315.0\n",
      "Train acc: 0.615234375 loss:0.7737254500389099 cnum:315.0\n",
      "Train acc:  0.63671875 loss:0.7923803925514221 cnum:326.0\n",
      "Train acc:    0.640625 loss:0.7765570878982544 cnum:328.0\n",
      "Train acc: 0.630859375 loss:0.7730907201766968 cnum:323.0\n",
      "Train acc:   0.5859375 loss:0.7753134965896606 cnum:300.0\n",
      "Train acc: 0.630859375 loss:0.7728344202041626 cnum:323.0\n",
      "Train acc:  0.62109375 loss:0.7734500169754028 cnum:318.0\n",
      "Train acc: 0.595703125 loss:0.7811393141746521 cnum:305.0\n",
      "Train acc: 0.619140625 loss:0.7738132476806641 cnum:317.0\n",
      "Train acc:   0.6328125 loss:0.772832453250885 cnum:324.0\n",
      "Train acc:  0.64453125 loss:0.7723156213760376 cnum:330.0\n",
      "Train acc: 0.595703125 loss:0.7749446630477905 cnum:305.0\n",
      "Train acc:    0.640625 loss:0.7724575996398926 cnum:328.0\n",
      "Train acc:   0.6015625 loss:0.7746191024780273 cnum:308.0\n",
      "Train acc: 0.634765625 loss:0.7728807926177979 cnum:325.0\n",
      "Train acc:  0.59765625 loss:0.774498462677002 cnum:306.0\n",
      "Train acc: 0.638671875 loss:0.7731470465660095 cnum:327.0\n",
      "Train acc: 0.615234375 loss:0.7913837432861328 cnum:315.0\n",
      "Train acc: 0.642578125 loss:0.7732425928115845 cnum:329.0\n",
      "Train acc: 0.634765625 loss:0.7735550403594971 cnum:325.0\n",
      "Train acc:  0.64453125 loss:0.7746667861938477 cnum:330.0\n",
      "Train acc: 0.642578125 loss:0.7734487652778625 cnum:329.0\n",
      "Train acc:  0.67578125 loss:0.7706915140151978 cnum:346.0\n",
      "Train acc: 0.564453125 loss:0.7822078466415405 cnum:289.0\n",
      "Train acc: 0.607421875 loss:0.7760074734687805 cnum:311.0\n",
      "Train acc: 0.619140625 loss:0.7748249173164368 cnum:317.0\n",
      "Train acc:  0.62890625 loss:0.7730634808540344 cnum:322.0\n",
      "Train acc: 0.630859375 loss:0.7730220556259155 cnum:323.0\n",
      "Train acc:  0.61328125 loss:0.7741090655326843 cnum:314.0\n",
      "Train acc:   0.6328125 loss:0.7727140784263611 cnum:324.0\n",
      "Train acc: 0.599609375 loss:0.7746568918228149 cnum:307.0\n",
      "Train acc: 0.623046875 loss:0.7742116451263428 cnum:319.0\n",
      "Train acc: 0.599609375 loss:0.774707555770874 cnum:307.0\n",
      "Train acc: 0.623046875 loss:0.7746070027351379 cnum:319.0\n",
      "Train acc:  0.61328125 loss:0.7784797549247742 cnum:314.0\n",
      "Train acc: 0.599609375 loss:0.7755014300346375 cnum:307.0\n",
      "Train acc: 0.619140625 loss:0.7781652212142944 cnum:317.0\n",
      "Train acc:  0.62890625 loss:0.7730734348297119 cnum:322.0\n",
      "Train acc: 0.587890625 loss:0.7751572132110596 cnum:301.0\n",
      "Train acc:   0.6171875 loss:0.7787051796913147 cnum:316.0\n",
      "Train acc:  0.61328125 loss:0.7737410068511963 cnum:314.0\n",
      "Train acc:       0.625 loss:0.7854880094528198 cnum:320.0\n",
      "Train acc:   0.6171875 loss:0.7736445665359497 cnum:316.0\n",
      "Train acc:  0.62109375 loss:0.7734392881393433 cnum:318.0\n",
      "Train acc:    0.578125 loss:0.7761914134025574 cnum:296.0\n",
      "Train acc:   0.6328125 loss:0.7730387449264526 cnum:324.0\n",
      "Train acc:   0.6328125 loss:0.773529589176178 cnum:324.0\n",
      "Train acc: 0.599609375 loss:0.7745549082756042 cnum:307.0\n",
      "Train acc:  0.63671875 loss:0.7727779150009155 cnum:326.0\n",
      "Train acc: 0.607421875 loss:0.7748969197273254 cnum:311.0\n",
      "Train acc: 0.611328125 loss:0.7805874347686768 cnum:313.0\n",
      "Train acc: 0.599609375 loss:0.7746009826660156 cnum:307.0\n",
      "Train acc:  0.59765625 loss:0.7749109268188477 cnum:306.0\n",
      "Train acc: 0.634765625 loss:0.7735074758529663 cnum:325.0\n",
      "Train acc:       0.625 loss:0.7749648094177246 cnum:320.0\n",
      "Train acc: 0.615234375 loss:0.7737653255462646 cnum:315.0\n",
      "Train acc:   0.6171875 loss:0.773529052734375 cnum:316.0\n",
      "Train acc: 0.654296875 loss:0.7720804810523987 cnum:335.0\n",
      "Train acc: 0.587890625 loss:0.7904195189476013 cnum:301.0\n",
      "Train acc: 0.607421875 loss:0.7743573188781738 cnum:311.0\n",
      "Train acc:    0.640625 loss:0.7724415063858032 cnum:328.0\n",
      "Train acc: 0.603515625 loss:0.7763646245002747 cnum:309.0\n",
      "Train acc: 0.619140625 loss:0.773601770401001 cnum:317.0\n",
      "Train acc: 0.615234375 loss:0.7736668586730957 cnum:315.0\n",
      "Train acc: 0.623046875 loss:0.7732166647911072 cnum:319.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc: 0.619140625 loss:0.7733721137046814 cnum:317.0\n",
      "Train acc: 0.611328125 loss:0.7738445401191711 cnum:313.0\n",
      "Train acc: 0.603515625 loss:0.7900927066802979 cnum:309.0\n",
      "Train acc:   0.6171875 loss:0.7736740708351135 cnum:316.0\n",
      "Train acc: 0.607421875 loss:0.7739917635917664 cnum:311.0\n",
      "Train acc:  0.59765625 loss:0.7752361297607422 cnum:306.0\n",
      "Train acc:  0.64453125 loss:0.7729803323745728 cnum:330.0\n",
      "Train acc:   0.6015625 loss:0.7776384353637695 cnum:308.0\n",
      "Train acc: 0.626953125 loss:0.7730923295021057 cnum:321.0\n",
      "Train acc:   0.6328125 loss:0.7730269432067871 cnum:324.0\n",
      "Train acc: 0.619140625 loss:0.7739576101303101 cnum:317.0\n",
      "Train acc:  0.58203125 loss:0.7760510444641113 cnum:298.0\n",
      "Train acc:  0.62109375 loss:0.7735158205032349 cnum:318.0\n",
      "Train acc:  0.61328125 loss:0.7736650705337524 cnum:314.0\n",
      "Train acc:    0.640625 loss:0.7731015086174011 cnum:328.0\n",
      "Train acc:    0.609375 loss:0.7756465673446655 cnum:312.0\n",
      "Train acc: 0.603515625 loss:0.7753193378448486 cnum:309.0\n",
      "Train acc:  0.61328125 loss:0.773674726486206 cnum:314.0\n",
      "Train acc: 0.611328125 loss:0.7737739682197571 cnum:313.0\n",
      "Train acc: 0.611328125 loss:0.7849404811859131 cnum:313.0\n",
      "Train acc:   0.6328125 loss:0.7727393507957458 cnum:324.0\n",
      "Train acc: 0.623046875 loss:0.7757292985916138 cnum:319.0\n",
      "Train acc: 0.619140625 loss:0.773505687713623 cnum:317.0\n",
      "Train acc: 0.603515625 loss:0.7742112874984741 cnum:309.0\n",
      "Train acc: 0.646484375 loss:0.7732982635498047 cnum:331.0\n",
      "Train acc:  0.62109375 loss:0.7735033631324768 cnum:318.0\n",
      "Train acc: 0.611328125 loss:0.7738431692123413 cnum:313.0\n",
      "Train acc: 0.595703125 loss:0.7751257419586182 cnum:305.0\n",
      "Train acc: 0.615234375 loss:0.7740994095802307 cnum:315.0\n",
      "Train acc:  0.62109375 loss:0.7734808325767517 cnum:318.0\n",
      "Train acc:       0.625 loss:0.7737365365028381 cnum:320.0\n",
      "Train acc:  0.61328125 loss:0.774072527885437 cnum:314.0\n",
      "Train acc: 0.560546875 loss:0.7768601179122925 cnum:287.0\n",
      "Train acc:  0.62890625 loss:0.7744998335838318 cnum:322.0\n",
      "Train acc:  0.58984375 loss:0.7755528092384338 cnum:302.0\n",
      "Train acc: 0.634765625 loss:0.7760794162750244 cnum:325.0\n",
      "Train acc: 0.591796875 loss:0.7750030159950256 cnum:303.0\n",
      "Train acc:  0.61328125 loss:0.7739341259002686 cnum:314.0\n",
      "Train acc: 0.611328125 loss:0.7737884521484375 cnum:313.0\n",
      "Train acc: 0.603515625 loss:0.7743875980377197 cnum:309.0\n",
      "Train acc: 0.638671875 loss:0.7752646207809448 cnum:327.0\n",
      "Train acc: 0.607421875 loss:0.7740170955657959 cnum:311.0\n",
      "Train acc: 0.615234375 loss:0.775066614151001 cnum:315.0\n",
      "Train acc:  0.59765625 loss:0.7746235132217407 cnum:306.0\n",
      "Train acc: 0.642578125 loss:0.7722399234771729 cnum:329.0\n",
      "Train acc:  0.60546875 loss:0.7906308770179749 cnum:310.0\n",
      "Train acc: 0.638671875 loss:0.772456169128418 cnum:327.0\n",
      "Train acc:  0.60546875 loss:0.7743629217147827 cnum:310.0\n",
      "Train acc:    0.640625 loss:0.7755860090255737 cnum:328.0\n",
      "Train acc: 0.607421875 loss:0.7739410400390625 cnum:311.0\n",
      "Train acc: 0.638671875 loss:0.7727925777435303 cnum:327.0\n",
      "Train acc:     0.65625 loss:0.7716995477676392 cnum:336.0\n",
      "Train acc:  0.61328125 loss:0.7737272381782532 cnum:314.0\n",
      "Train acc:    0.609375 loss:0.7741739749908447 cnum:312.0\n",
      "Train acc:   0.5859375 loss:0.77572101354599 cnum:300.0\n",
      "Train acc: 0.591796875 loss:0.7748613357543945 cnum:303.0\n",
      "Train acc: 0.650390625 loss:0.7779912948608398 cnum:333.0\n",
      "Train acc: 0.572265625 loss:0.7775015830993652 cnum:293.0\n",
      "Train acc:   0.6171875 loss:0.7740548849105835 cnum:316.0\n",
      "Train acc: 0.654296875 loss:0.7721261978149414 cnum:335.0\n",
      "Train acc: 0.630859375 loss:0.7729222178459167 cnum:323.0\n",
      "Train acc:  0.56640625 loss:0.776131808757782 cnum:290.0\n",
      "Train acc:       0.625 loss:0.77349454164505 cnum:320.0\n",
      "Train acc: 0.630859375 loss:0.7733823657035828 cnum:323.0\n",
      "Train acc:   0.5859375 loss:0.775428056716919 cnum:300.0\n",
      "Train acc:  0.62890625 loss:0.7730291485786438 cnum:322.0\n",
      "Train acc:     0.59375 loss:0.7750056385993958 cnum:304.0\n",
      "Train acc:  0.61328125 loss:0.7739642858505249 cnum:314.0\n",
      "Train acc: 0.603515625 loss:0.7747952342033386 cnum:309.0\n",
      "Train acc:       0.625 loss:0.7885518670082092 cnum:320.0\n",
      "Train acc:    0.609375 loss:0.7763575911521912 cnum:312.0\n",
      "Train acc:   0.6015625 loss:0.7747079133987427 cnum:308.0\n",
      "Train acc: 0.634765625 loss:0.7730218172073364 cnum:325.0\n",
      "Train acc: 0.615234375 loss:0.7739742994308472 cnum:315.0\n",
      "Train acc: 0.626953125 loss:0.7751291990280151 cnum:321.0\n",
      "Train acc:   0.6015625 loss:0.7748187780380249 cnum:308.0\n",
      "Train acc: 0.634765625 loss:0.7727727293968201 cnum:325.0\n",
      "Train acc:  0.61328125 loss:0.7777599096298218 cnum:314.0\n",
      "Train acc: 0.568359375 loss:0.7761780023574829 cnum:291.0\n",
      "Train acc:    0.609375 loss:0.7747652530670166 cnum:312.0\n",
      "Train acc: 0.623046875 loss:0.7732023000717163 cnum:319.0\n",
      "Train acc: 0.626953125 loss:0.7735409140586853 cnum:321.0\n",
      "Train acc: 0.599609375 loss:0.7754505276679993 cnum:307.0\n",
      "Train acc: 0.599609375 loss:0.7744085192680359 cnum:307.0\n",
      "Train acc: 0.607421875 loss:0.7740872502326965 cnum:311.0\n",
      "Train acc:    0.609375 loss:0.7740185260772705 cnum:312.0\n",
      "Train acc:   0.6328125 loss:0.7734848856925964 cnum:324.0\n",
      "Train acc:  0.67578125 loss:0.7706692218780518 cnum:346.0\n",
      "Train acc:    0.578125 loss:0.7756220102310181 cnum:296.0\n",
      "Train acc: 0.654296875 loss:0.7716255187988281 cnum:335.0\n",
      "Train acc:       0.625 loss:0.773390531539917 cnum:320.0\n",
      "Train acc: 0.634765625 loss:0.7726635932922363 cnum:325.0\n",
      "Train acc:  0.59765625 loss:0.7772529125213623 cnum:306.0\n",
      "Train acc: 0.642578125 loss:0.7728984951972961 cnum:329.0\n",
      "Train acc:   0.6171875 loss:0.7737396955490112 cnum:316.0\n",
      "Train acc: 0.626953125 loss:0.7731214165687561 cnum:321.0\n",
      "Train acc:  0.62890625 loss:0.7734007835388184 cnum:322.0\n",
      "Train acc: 0.607421875 loss:0.7741713523864746 cnum:311.0\n",
      "Train acc: 0.591796875 loss:0.7765229344367981 cnum:303.0\n",
      "Train acc:  0.57421875 loss:0.7756685614585876 cnum:294.0\n",
      "Train acc:       0.625 loss:0.7754449248313904 cnum:320.0\n",
      "Train acc: 0.591796875 loss:0.7747786641120911 cnum:303.0\n",
      "Train acc:  0.62890625 loss:0.7733582854270935 cnum:322.0\n",
      "Train acc: 0.623046875 loss:0.7738252282142639 cnum:319.0\n",
      "Train acc: 0.630859375 loss:0.7731746435165405 cnum:323.0\n",
      "Train acc: 0.611328125 loss:0.7740355134010315 cnum:313.0\n",
      "Train acc: 0.603515625 loss:0.7748176455497742 cnum:309.0\n",
      "Train acc:  0.62109375 loss:0.77394700050354 cnum:318.0\n",
      "Train acc: 0.580078125 loss:0.7761023640632629 cnum:297.0\n",
      "Train acc: 0.646484375 loss:0.7721884846687317 cnum:331.0\n",
      "Train acc:  0.62109375 loss:0.7738788723945618 cnum:318.0\n",
      "Train acc: 0.634765625 loss:0.7726988792419434 cnum:325.0\n",
      "Train acc: 0.599609375 loss:0.7744507193565369 cnum:307.0\n",
      "Train acc: 0.630859375 loss:0.773120641708374 cnum:323.0\n",
      "Train acc: 0.619140625 loss:0.7738428711891174 cnum:317.0\n",
      "Train acc:     0.59375 loss:0.7751933932304382 cnum:304.0\n",
      "Train acc: 0.619140625 loss:0.7743434906005859 cnum:317.0\n",
      "Train acc: 0.611328125 loss:0.7740508317947388 cnum:313.0\n",
      "Train acc: 0.595703125 loss:0.7766977548599243 cnum:305.0\n",
      "Train acc: 0.611328125 loss:0.7741748094558716 cnum:313.0\n",
      "Train acc:  0.64453125 loss:0.7721579074859619 cnum:330.0\n",
      "Train acc:   0.6015625 loss:0.7767696976661682 cnum:308.0\n",
      "Train acc:  0.58984375 loss:0.7751873731613159 cnum:302.0\n",
      "Train acc: 0.662109375 loss:0.7718528509140015 cnum:339.0\n",
      "Train acc: 0.626953125 loss:0.7742254137992859 cnum:321.0\n",
      "Train acc:  0.62109375 loss:0.7756122350692749 cnum:318.0\n",
      "Train acc: 0.587890625 loss:0.7751140594482422 cnum:301.0\n",
      "Train acc:   0.6328125 loss:0.773322582244873 cnum:324.0\n",
      "Train acc: 0.603515625 loss:0.7741572260856628 cnum:309.0\n",
      "Train acc: 0.634765625 loss:0.7728080749511719 cnum:325.0\n",
      "Train acc: 0.626953125 loss:0.7737651467323303 cnum:321.0\n",
      "Train acc:  0.59765625 loss:0.7744410037994385 cnum:306.0\n",
      "Train acc: 0.611328125 loss:0.7868257164955139 cnum:313.0\n",
      "Train acc: 0.607421875 loss:0.7759615778923035 cnum:311.0\n",
      "Train acc: 0.591796875 loss:0.774746298789978 cnum:303.0\n",
      "Train acc: 0.611328125 loss:0.7738525867462158 cnum:313.0\n",
      "Train acc:    0.609375 loss:0.7739276885986328 cnum:312.0\n",
      "Train acc: 0.650390625 loss:0.7721474766731262 cnum:333.0\n",
      "Train acc: 0.607421875 loss:0.7740581035614014 cnum:311.0\n",
      "Train acc:  0.64453125 loss:0.7720759510993958 cnum:330.0\n",
      "Train acc: 0.646484375 loss:0.7761297225952148 cnum:331.0\n",
      "Train acc: 0.583984375 loss:0.7751895785331726 cnum:299.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc:    0.640625 loss:0.7881345152854919 cnum:328.0\n",
      "Train acc: 0.583984375 loss:0.7754080891609192 cnum:299.0\n",
      "Train acc: 0.595703125 loss:0.774911642074585 cnum:305.0\n",
      "Train acc: 0.587890625 loss:0.7752463221549988 cnum:301.0\n",
      "Train acc:   0.6171875 loss:0.7738121747970581 cnum:316.0\n",
      "Train acc:   0.5859375 loss:0.7771695852279663 cnum:300.0\n",
      "Train acc:    0.578125 loss:0.7757835388183594 cnum:296.0\n",
      "Train acc: 0.599609375 loss:0.776429295539856 cnum:307.0\n",
      "Train acc: 0.603515625 loss:0.7743708491325378 cnum:309.0\n",
      "Train acc:  0.65234375 loss:0.7718933820724487 cnum:334.0\n",
      "Train acc:  0.61328125 loss:0.7741585969924927 cnum:314.0\n",
      "Train acc:  0.59765625 loss:0.7763029336929321 cnum:306.0\n",
      "Train acc: 0.576171875 loss:0.77597975730896 cnum:295.0\n",
      "Train acc: 0.599609375 loss:0.7744908928871155 cnum:307.0\n",
      "Train acc: 0.607421875 loss:0.7743589282035828 cnum:311.0\n",
      "Train acc: 0.630859375 loss:0.7728341221809387 cnum:323.0\n",
      "Train acc:  0.62890625 loss:0.7740045189857483 cnum:322.0\n",
      "Train acc:  0.64453125 loss:0.7720987796783447 cnum:330.0\n",
      "Train acc:  0.66015625 loss:0.7726089954376221 cnum:338.0\n",
      "Train acc:    0.640625 loss:0.7744334936141968 cnum:328.0\n",
      "Train acc:   0.6015625 loss:0.7742765545845032 cnum:308.0\n",
      "Train acc: 0.638671875 loss:0.7726193070411682 cnum:327.0\n",
      "Train acc:   0.6015625 loss:0.775954008102417 cnum:308.0\n",
      "Train acc:  0.61328125 loss:0.7743390798568726 cnum:314.0\n",
      "Train acc: 0.615234375 loss:0.774017333984375 cnum:315.0\n",
      "Train acc: 0.634765625 loss:0.772777795791626 cnum:325.0\n",
      "Train acc: 0.599609375 loss:0.7743260860443115 cnum:307.0\n",
      "Train acc: 0.623046875 loss:0.7732142210006714 cnum:319.0\n",
      "Train acc: 0.642578125 loss:0.7724362015724182 cnum:329.0\n",
      "Train acc: 0.623046875 loss:0.7732057571411133 cnum:319.0\n",
      "Train acc: 0.623046875 loss:0.773482084274292 cnum:319.0\n",
      "Train acc: 0.587890625 loss:0.774946391582489 cnum:301.0\n",
      "Train acc: 0.619140625 loss:0.7867590188980103 cnum:317.0\n",
      "Train acc:  0.62109375 loss:0.773313581943512 cnum:318.0\n",
      "Train acc:   0.6171875 loss:0.7738762497901917 cnum:316.0\n",
      "Train acc: 0.634765625 loss:0.7733029127120972 cnum:325.0\n",
      "Train acc: 0.638671875 loss:0.7793159484863281 cnum:327.0\n",
      "Train acc:   0.6171875 loss:0.7734543681144714 cnum:316.0\n",
      "Train acc: 0.603515625 loss:0.7748863101005554 cnum:309.0\n",
      "Train acc: 0.630859375 loss:0.7730005383491516 cnum:323.0\n",
      "Train acc:  0.67578125 loss:0.7705483436584473 cnum:346.0\n",
      "Train acc:   0.6015625 loss:0.7742597460746765 cnum:308.0\n",
      "Train acc:     0.65625 loss:0.7717013955116272 cnum:336.0\n",
      "Train acc: 0.642578125 loss:0.7723841667175293 cnum:329.0\n",
      "Train acc: 0.626953125 loss:0.7729765772819519 cnum:321.0\n",
      "Train acc:    0.609375 loss:0.7759804725646973 cnum:312.0\n",
      "Train acc:   0.5703125 loss:0.7765556573867798 cnum:292.0\n",
      "Train acc:  0.59765625 loss:0.7748608589172363 cnum:306.0\n",
      "Train acc: 0.623046875 loss:0.7732700109481812 cnum:319.0\n",
      "Train acc:  0.60546875 loss:0.7794608473777771 cnum:310.0\n",
      "Train acc: 0.583984375 loss:0.7753145694732666 cnum:299.0\n",
      "Train acc:   0.6015625 loss:0.7785336971282959 cnum:308.0\n",
      "Train acc:   0.5859375 loss:0.775123119354248 cnum:300.0\n",
      "Train acc:   0.6171875 loss:0.7739245891571045 cnum:316.0\n",
      "Train acc:   0.6015625 loss:0.7777925729751587 cnum:308.0\n",
      "Train acc: 0.638671875 loss:0.7726565599441528 cnum:327.0\n",
      "Train acc:  0.61328125 loss:0.7742807865142822 cnum:314.0\n",
      "Train acc: 0.642578125 loss:0.7730962038040161 cnum:329.0\n",
      "Train acc:   0.6015625 loss:0.7743721008300781 cnum:308.0\n",
      "Train acc:  0.64453125 loss:0.7721679210662842 cnum:330.0\n",
      "Train acc: 0.615234375 loss:0.77357017993927 cnum:315.0\n",
      "Train acc:  0.60546875 loss:0.7744967937469482 cnum:310.0\n",
      "Train acc: 0.595703125 loss:0.7745823264122009 cnum:305.0\n",
      "Train acc:  0.63671875 loss:0.7733117938041687 cnum:326.0\n",
      "Train acc: 0.603515625 loss:0.7745645046234131 cnum:309.0\n",
      "Train acc:  0.60546875 loss:0.7741778492927551 cnum:310.0\n",
      "Train acc:   0.6171875 loss:0.7750191688537598 cnum:316.0\n",
      "Train acc:  0.66796875 loss:0.7752558588981628 cnum:342.0\n",
      "Train acc: 0.619140625 loss:0.7846310138702393 cnum:317.0\n",
      "Train acc:    0.640625 loss:0.7722780704498291 cnum:328.0\n",
      "Train acc:  0.61328125 loss:0.7737265825271606 cnum:314.0\n",
      "Train acc: 0.611328125 loss:0.7738388180732727 cnum:313.0\n",
      "Train acc:  0.59765625 loss:0.774799108505249 cnum:306.0\n",
      "Train acc:  0.62109375 loss:0.7740684747695923 cnum:318.0\n",
      "Train acc: 0.607421875 loss:0.7739979028701782 cnum:311.0\n",
      "Train acc: 0.626953125 loss:0.7731829881668091 cnum:321.0\n",
      "Train acc:  0.62890625 loss:0.7729155421257019 cnum:322.0\n",
      "Train acc:   0.6171875 loss:0.7735733389854431 cnum:316.0\n",
      "Train acc: 0.587890625 loss:0.7778945565223694 cnum:301.0\n",
      "Train acc: 0.634765625 loss:0.7728593349456787 cnum:325.0\n",
      "Train acc:    0.578125 loss:0.7758495807647705 cnum:296.0\n",
      "Train acc:     0.59375 loss:0.7751674652099609 cnum:304.0\n",
      "Train acc:  0.60546875 loss:0.7885343432426453 cnum:310.0\n",
      "Train acc:   0.6328125 loss:0.7729408740997314 cnum:324.0\n",
      "Train acc: 0.623046875 loss:0.7732015252113342 cnum:319.0\n",
      "Train acc:  0.63671875 loss:0.7731277942657471 cnum:326.0\n",
      "Dev acc:     0.65625 loss0.7717800736427307\n",
      "Dev acc:   0.6640625 loss0.7711033821105957\n",
      "Dev acc:    0.640625 loss0.7727429866790771\n",
      "Dev acc:     0.65625 loss0.7728044390678406\n",
      "Dev acc:  0.67578125 loss0.7710023522377014\n",
      "Dev acc:  0.68359375 loss0.7701894044876099\n",
      "Dev acc:   0.6796875 loss0.7707334756851196\n",
      "Dev acc:  0.71484375 loss0.7688203454017639\n",
      "Dev acc:     0.65625 loss0.7716857194900513\n",
      "Dev acc:  0.63671875 loss0.7726812362670898\n",
      "Dev acc:   0.6953125 loss0.7698673605918884\n",
      "Dev acc:   0.6953125 loss0.7706373333930969\n",
      "Dev acc:  0.69140625 loss0.7700427770614624\n",
      "Dev acc:   0.6484375 loss0.7859846353530884\n",
      "Dev acc:   0.6484375 loss0.7718912959098816\n",
      "Dev acc:       0.625 loss0.7731888294219971\n",
      "Dev acc:     0.65625 loss0.7716084718704224\n",
      "Dev acc:  0.65234375 loss0.7717002630233765\n",
      "Dev acc:      0.6875 loss0.7699407339096069\n",
      "Dev acc:    0.671875 loss0.7710906267166138\n",
      "Dev acc:   0.6640625 loss0.7711164355278015\n",
      "Dev acc:  0.66796875 loss0.7715396881103516\n",
      "Dev acc:  0.61328125 loss0.7737368941307068\n",
      "Dev acc:    0.640625 loss0.7728861570358276\n",
      "Dev acc:  0.66796875 loss0.7728207111358643\n",
      "Dev acc:    0.703125 loss0.7693290710449219\n",
      "Dev acc:       0.625 loss0.7731738090515137\n",
      "Dev acc:   0.6328125 loss0.7938094735145569\n",
      "Dev acc:   0.6171875 loss0.773998498916626\n",
      "Dev acc:   0.6328125 loss0.7742831110954285\n",
      "Dev acc:  0.68359375 loss0.7707204818725586\n",
      "Dev acc:  0.65234375 loss0.7716804146766663\n",
      "Dev acc:    0.671875 loss0.7707873582839966\n",
      "Dev acc:     0.71875 loss0.7695870399475098\n",
      "Dev acc:  0.60546875 loss0.7740226984024048\n",
      "Dev acc:       0.625 loss0.7731544971466064\n",
      "Dev acc:  0.72265625 loss0.768308699131012\n",
      "Dev acc:   0.6484375 loss0.7719964385032654\n",
      "Dev acc:    0.703125 loss0.7695275545120239\n",
      "Dev acc:  0.67578125 loss0.7709795236587524\n",
      "Dev acc:       0.625 loss0.775860607624054\n",
      "Dev acc:  0.71484375 loss0.7688812613487244\n",
      "Dev acc:   0.6796875 loss0.7702840566635132\n",
      "Dev acc:  0.65234375 loss0.7717612385749817\n",
      "Dev acc:    0.640625 loss0.7725865244865417\n",
      "Dev acc:  0.63671875 loss0.7725300788879395\n",
      "Dev acc:    0.640625 loss0.7724087834358215\n",
      "Dev acc:     0.65625 loss0.7739126682281494\n",
      "Dev acc:     0.71875 loss0.7686794996261597\n",
      "Dev acc:  0.62890625 loss0.7730743288993835\n",
      "Dev acc:  0.66796875 loss0.7713046669960022\n",
      "Dev acc:   0.6484375 loss0.7719402313232422\n",
      "Dev acc:  0.62890625 loss0.7899016737937927\n",
      "Dev acc:  0.69140625 loss0.7701354622840881\n",
      "Dev acc:  0.64453125 loss0.7721920013427734\n",
      "Dev acc:  0.64453125 loss0.7722427248954773\n",
      "Dev acc:     0.65625 loss0.7721849679946899\n",
      "Dev acc:    0.640625 loss0.7726917266845703\n",
      "Dev acc:   0.6484375 loss0.7719919681549072\n",
      "Dev acc:     0.65625 loss0.8023567199707031\n",
      "Dev acc:  0.66796875 loss0.7712339162826538\n",
      "Dev acc:   0.6640625 loss0.771181046962738\n",
      "Dev acc:       0.625 loss0.7730392217636108\n",
      "Dev acc:   0.6640625 loss0.771111011505127\n",
      "Dev acc:  0.64453125 loss0.7721875309944153\n",
      "Dev acc:    0.671875 loss0.7808313369750977\n",
      "Dev acc:  0.67578125 loss0.7706902623176575\n",
      "Dev acc:    0.578125 loss0.7797046303749084\n",
      "Dev acc:  0.66015625 loss0.7756049036979675\n",
      "Dev acc:  0.68359375 loss0.7702417373657227\n",
      "Dev acc:     0.65625 loss0.7715792059898376\n",
      "Dev acc:   0.6171875 loss0.7735497355461121\n",
      "Dev acc:  0.68359375 loss0.789214015007019\n",
      "Dev acc:      0.6875 loss0.76995450258255\n",
      "Dev acc:  0.67578125 loss0.770600438117981\n",
      "Dev acc:     0.65625 loss0.7726718783378601\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Dev acc:    0.671875 loss0.7713804841041565\n",
      "Dev acc:  0.73046875 loss0.7678526639938354\n",
      "Dev acc:     0.65625 loss0.7716798186302185\n",
      "Dev acc:  0.72265625 loss0.7706717848777771\n",
      "Dev acc:    0.671875 loss0.7708719968795776\n",
      "Dev acc:  0.66015625 loss0.80232834815979\n",
      "Dev acc:     0.65625 loss0.7716879844665527\n",
      "Dev acc:  0.62890625 loss0.7734383344650269\n",
      "Dev acc:   0.6640625 loss0.7718008160591125\n",
      "Dev acc:    0.640625 loss0.7725926041603088\n",
      "Dev acc:  0.62109375 loss0.7733063697814941\n",
      "Dev acc:      0.6875 loss0.7699466943740845\n",
      "Dev acc:  0.63671875 loss0.7727704644203186\n",
      "Dev acc:   0.7109375 loss0.7687869668006897\n",
      "Dev acc:  0.65234375 loss0.7717844247817993\n",
      "Dev acc:  0.68359375 loss0.7706751823425293\n",
      "Dev acc:  0.64453125 loss0.7720850110054016\n",
      "Dev acc:  0.69921875 loss0.7693706750869751\n",
      "Dev acc:   0.6953125 loss0.7695124745368958\n",
      "Dev acc:   0.6328125 loss0.7729002237319946\n",
      "Dev acc:  0.68359375 loss0.770115852355957\n",
      "Dev acc:  0.66796875 loss0.7711660265922546\n",
      "Dev acc:       0.625 loss0.7748828530311584\n",
      "Dev acc:  0.65234375 loss0.7752301096916199\n",
      "Dev acc:  0.62890625 loss0.7759556770324707\n",
      "Dev acc:   0.6328125 loss0.7732745409011841\n",
      "Dev acc:  0.67578125 loss0.7706414461135864\n",
      "Dev acc:  0.66015625 loss0.7716613411903381\n",
      "Dev acc:  0.64453125 loss0.7721559405326843\n",
      "Dev acc:   0.6171875 loss0.775341272354126\n",
      "Dev acc:     0.65625 loss0.7717192769050598\n",
      "Total Dev acc:0.6603022780373832\n",
      "Step:2\n",
      "Train acc: 0.615234375 loss:0.7740597724914551 cnum:315.0\n",
      "Train acc: 0.646484375 loss:0.7769983410835266 cnum:331.0\n",
      "Train acc:   0.6328125 loss:0.7737788558006287 cnum:324.0\n",
      "Train acc: 0.630859375 loss:0.7727608680725098 cnum:323.0\n",
      "Train acc:  0.62109375 loss:0.7732672691345215 cnum:318.0\n",
      "Train acc:    0.609375 loss:0.7739022970199585 cnum:312.0\n",
      "Train acc: 0.650390625 loss:0.7718924283981323 cnum:333.0\n",
      "Train acc:   0.6015625 loss:0.774404764175415 cnum:308.0\n",
      "Train acc:  0.62109375 loss:0.7801766991615295 cnum:318.0\n",
      "Train acc:  0.58203125 loss:0.7753764986991882 cnum:298.0\n",
      "Train acc:   0.6328125 loss:0.7732591032981873 cnum:324.0\n",
      "Train acc: 0.619140625 loss:0.7747023701667786 cnum:317.0\n",
      "Train acc:  0.62890625 loss:0.7729809880256653 cnum:322.0\n",
      "Train acc:    0.609375 loss:0.7749134302139282 cnum:312.0\n",
      "Train acc: 0.615234375 loss:0.7763616442680359 cnum:315.0\n",
      "Train acc:  0.62109375 loss:0.7732679843902588 cnum:318.0\n",
      "Train acc:  0.62109375 loss:0.7738198637962341 cnum:318.0\n",
      "Train acc:  0.62890625 loss:0.7729982733726501 cnum:322.0\n",
      "Train acc: 0.638671875 loss:0.7725530862808228 cnum:327.0\n",
      "Train acc:   0.6328125 loss:0.7729272842407227 cnum:324.0\n",
      "Train acc:   0.6328125 loss:0.7731277346611023 cnum:324.0\n",
      "Train acc: 0.623046875 loss:0.7731516361236572 cnum:319.0\n",
      "Train acc: 0.669921875 loss:0.7710084319114685 cnum:343.0\n",
      "Train acc:   0.6171875 loss:0.7736393213272095 cnum:316.0\n",
      "Train acc: 0.615234375 loss:0.7737287282943726 cnum:315.0\n",
      "Train acc:  0.63671875 loss:0.772494912147522 cnum:326.0\n",
      "Train acc: 0.595703125 loss:0.7746689915657043 cnum:305.0\n",
      "Train acc:       0.625 loss:0.7732372879981995 cnum:320.0\n",
      "Train acc: 0.615234375 loss:0.7762631177902222 cnum:315.0\n",
      "Train acc:  0.62109375 loss:0.7732530832290649 cnum:318.0\n",
      "Train acc: 0.654296875 loss:0.7720195055007935 cnum:335.0\n",
      "Train acc:  0.62109375 loss:0.7733039259910583 cnum:318.0\n",
      "Train acc: 0.654296875 loss:0.7763152122497559 cnum:335.0\n",
      "Train acc:   0.6328125 loss:0.7727638483047485 cnum:324.0\n",
      "Train acc:   0.6015625 loss:0.7752096652984619 cnum:308.0\n",
      "Train acc: 0.638671875 loss:0.7725769877433777 cnum:327.0\n",
      "Train acc: 0.634765625 loss:0.773905873298645 cnum:325.0\n",
      "Train acc:  0.59765625 loss:0.7747771739959717 cnum:306.0\n",
      "Train acc: 0.630859375 loss:0.7730779051780701 cnum:323.0\n",
      "Train acc: 0.642578125 loss:0.772292971611023 cnum:329.0\n",
      "Train acc: 0.599609375 loss:0.7743768095970154 cnum:307.0\n",
      "Train acc: 0.619140625 loss:0.7736462354660034 cnum:317.0\n",
      "Train acc: 0.607421875 loss:0.7746319770812988 cnum:311.0\n",
      "Train acc: 0.607421875 loss:0.7742884159088135 cnum:311.0\n",
      "Train acc:  0.58203125 loss:0.7752773761749268 cnum:298.0\n",
      "Train acc: 0.646484375 loss:0.7832068204879761 cnum:331.0\n",
      "Train acc: 0.638671875 loss:0.7725741863250732 cnum:327.0\n",
      "Train acc:  0.62109375 loss:0.7735529541969299 cnum:318.0\n",
      "Train acc: 0.611328125 loss:0.774294376373291 cnum:313.0\n",
      "Train acc: 0.658203125 loss:0.7714427709579468 cnum:337.0\n",
      "Train acc:  0.60546875 loss:0.7744660377502441 cnum:310.0\n",
      "Train acc:     0.59375 loss:0.7746254205703735 cnum:304.0\n",
      "Train acc: 0.607421875 loss:0.7739993929862976 cnum:311.0\n",
      "Train acc: 0.669921875 loss:0.7711554765701294 cnum:343.0\n",
      "Train acc: 0.626953125 loss:0.7729934453964233 cnum:321.0\n",
      "Train acc: 0.666015625 loss:0.7714030742645264 cnum:341.0\n",
      "Train acc: 0.607421875 loss:0.7742069959640503 cnum:311.0\n",
      "Train acc: 0.611328125 loss:0.7739739418029785 cnum:313.0\n",
      "Train acc: 0.587890625 loss:0.7751824855804443 cnum:301.0\n",
      "Train acc:  0.60546875 loss:0.7748352289199829 cnum:310.0\n",
      "Train acc: 0.642578125 loss:0.772344708442688 cnum:329.0\n",
      "Train acc: 0.611328125 loss:0.7824106812477112 cnum:313.0\n",
      "Train acc:  0.59765625 loss:0.7745001912117004 cnum:306.0\n",
      "Train acc:   0.6015625 loss:0.7743170261383057 cnum:308.0\n",
      "Train acc:  0.58203125 loss:0.7761837244033813 cnum:298.0\n",
      "Train acc: 0.638671875 loss:0.7727547883987427 cnum:327.0\n",
      "Train acc:  0.64453125 loss:0.7721770405769348 cnum:330.0\n",
      "Train acc:     0.59375 loss:0.775290846824646 cnum:304.0\n",
      "Train acc:       0.625 loss:0.7735438346862793 cnum:320.0\n",
      "Train acc: 0.619140625 loss:0.77479088306427 cnum:317.0\n",
      "Train acc: 0.611328125 loss:0.7739355564117432 cnum:313.0\n",
      "Train acc: 0.591796875 loss:0.776145875453949 cnum:303.0\n",
      "Train acc: 0.634765625 loss:0.7730144262313843 cnum:325.0\n",
      "Train acc: 0.615234375 loss:0.7736198902130127 cnum:315.0\n",
      "Train acc:  0.60546875 loss:0.7780188918113708 cnum:310.0\n",
      "Train acc:   0.5703125 loss:0.7758311033248901 cnum:292.0\n",
      "Train acc: 0.646484375 loss:0.7720281481742859 cnum:331.0\n",
      "Train acc: 0.611328125 loss:0.7739031314849854 cnum:313.0\n",
      "Train acc:   0.6328125 loss:0.7727211117744446 cnum:324.0\n",
      "Train acc: 0.626953125 loss:0.7750793695449829 cnum:321.0\n",
      "Train acc:    0.578125 loss:0.775466799736023 cnum:296.0\n",
      "Train acc:  0.66015625 loss:0.7713873982429504 cnum:338.0\n",
      "Train acc:   0.6015625 loss:0.7742525935173035 cnum:308.0\n",
      "Train acc: 0.595703125 loss:0.7745782136917114 cnum:305.0\n",
      "Train acc: 0.642578125 loss:0.7722771167755127 cnum:329.0\n",
      "Train acc:   0.6015625 loss:0.7745126485824585 cnum:308.0\n",
      "Train acc: 0.599609375 loss:0.774402379989624 cnum:307.0\n",
      "Train acc: 0.615234375 loss:0.7736091613769531 cnum:315.0\n",
      "Train acc: 0.623046875 loss:0.7760519981384277 cnum:319.0\n",
      "Train acc: 0.634765625 loss:0.7725893259048462 cnum:325.0\n",
      "Train acc: 0.607421875 loss:0.7739428281784058 cnum:311.0\n",
      "Train acc: 0.595703125 loss:0.7749255895614624 cnum:305.0\n",
      "Train acc:   0.6171875 loss:0.7735782265663147 cnum:316.0\n",
      "Train acc:     0.65625 loss:0.7716731429100037 cnum:336.0\n",
      "Train acc: 0.638671875 loss:0.77241450548172 cnum:327.0\n",
      "Train acc: 0.615234375 loss:0.7737566232681274 cnum:315.0\n",
      "Train acc:    0.609375 loss:0.7741286754608154 cnum:312.0\n",
      "Train acc:  0.61328125 loss:0.7736459970474243 cnum:314.0\n",
      "Train acc: 0.556640625 loss:0.790281355381012 cnum:285.0\n",
      "Train acc:   0.6328125 loss:0.7727335095405579 cnum:324.0\n",
      "Train acc: 0.630859375 loss:0.7729700207710266 cnum:323.0\n",
      "Train acc: 0.591796875 loss:0.7747603058815002 cnum:303.0\n",
      "Train acc: 0.615234375 loss:0.7738165855407715 cnum:315.0\n",
      "Train acc:   0.6171875 loss:0.7734938859939575 cnum:316.0\n",
      "Train acc:  0.58203125 loss:0.7763163447380066 cnum:298.0\n",
      "Train acc: 0.611328125 loss:0.7739313840866089 cnum:313.0\n",
      "Train acc:   0.6171875 loss:0.7773470878601074 cnum:316.0\n",
      "Train acc: 0.615234375 loss:0.7750861644744873 cnum:315.0\n",
      "Train acc:   0.5546875 loss:0.7769122123718262 cnum:284.0\n",
      "Train acc: 0.626953125 loss:0.7732625007629395 cnum:321.0\n",
      "Train acc:    0.578125 loss:0.7754681706428528 cnum:296.0\n",
      "Train acc:  0.63671875 loss:0.7757555842399597 cnum:326.0\n",
      "Train acc:  0.62109375 loss:0.773482084274292 cnum:318.0\n",
      "Train acc: 0.623046875 loss:0.7731566429138184 cnum:319.0\n",
      "Train acc:   0.6015625 loss:0.7750943303108215 cnum:308.0\n",
      "Train acc: 0.650390625 loss:0.7727574110031128 cnum:333.0\n",
      "Train acc:       0.625 loss:0.7734313011169434 cnum:320.0\n",
      "Train acc:   0.6484375 loss:0.7720527648925781 cnum:332.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc: 0.595703125 loss:0.7745487689971924 cnum:305.0\n",
      "Train acc: 0.591796875 loss:0.7747704982757568 cnum:303.0\n",
      "Train acc: 0.638671875 loss:0.7724767327308655 cnum:327.0\n",
      "Train acc: 0.603515625 loss:0.7741913795471191 cnum:309.0\n",
      "Train acc:  0.62109375 loss:0.7732595205307007 cnum:318.0\n",
      "Train acc:  0.64453125 loss:0.7720783352851868 cnum:330.0\n",
      "Train acc: 0.623046875 loss:0.7732986211776733 cnum:319.0\n",
      "Train acc:  0.62890625 loss:0.772890567779541 cnum:322.0\n",
      "Train acc: 0.623046875 loss:0.7741677761077881 cnum:319.0\n",
      "Train acc: 0.615234375 loss:0.7736814022064209 cnum:315.0\n",
      "Train acc:     0.59375 loss:0.7750072479248047 cnum:304.0\n",
      "Train acc:  0.61328125 loss:0.7736984491348267 cnum:314.0\n",
      "Train acc: 0.568359375 loss:0.7766240239143372 cnum:291.0\n",
      "Train acc: 0.595703125 loss:0.7749732732772827 cnum:305.0\n",
      "Train acc: 0.619140625 loss:0.7734215259552002 cnum:317.0\n",
      "Train acc: 0.560546875 loss:0.778161883354187 cnum:287.0\n",
      "Train acc: 0.599609375 loss:0.7750763893127441 cnum:307.0\n",
      "Train acc: 0.626953125 loss:0.7730038166046143 cnum:321.0\n",
      "Train acc: 0.623046875 loss:0.7742580771446228 cnum:319.0\n",
      "Train acc:   0.5859375 loss:0.7752436399459839 cnum:300.0\n",
      "Train acc:   0.6015625 loss:0.774634599685669 cnum:308.0\n",
      "Train acc:  0.60546875 loss:0.7740820050239563 cnum:310.0\n",
      "Train acc:  0.60546875 loss:0.77424556016922 cnum:310.0\n",
      "Train acc:   0.6171875 loss:0.7870153188705444 cnum:316.0\n",
      "Train acc: 0.615234375 loss:0.7735856175422668 cnum:315.0\n",
      "Train acc: 0.638671875 loss:0.7725090980529785 cnum:327.0\n",
      "Train acc: 0.615234375 loss:0.7736340165138245 cnum:315.0\n",
      "Train acc: 0.626953125 loss:0.7737786173820496 cnum:321.0\n",
      "Train acc:   0.6015625 loss:0.7742997407913208 cnum:308.0\n",
      "Train acc: 0.630859375 loss:0.7730390429496765 cnum:323.0\n",
      "Train acc:  0.58984375 loss:0.7753286957740784 cnum:302.0\n",
      "Train acc: 0.650390625 loss:0.7718486785888672 cnum:333.0\n",
      "Train acc:  0.66015625 loss:0.771319568157196 cnum:338.0\n",
      "Train acc:       0.625 loss:0.7731442451477051 cnum:320.0\n",
      "Train acc: 0.611328125 loss:0.7739583253860474 cnum:313.0\n",
      "Train acc:    0.578125 loss:0.775378406047821 cnum:296.0\n",
      "Train acc:    0.578125 loss:0.776531994342804 cnum:296.0\n",
      "Train acc: 0.607421875 loss:0.7758562564849854 cnum:311.0\n",
      "Train acc: 0.638671875 loss:0.7724483013153076 cnum:327.0\n",
      "Train acc:  0.58984375 loss:0.7768106460571289 cnum:302.0\n",
      "Train acc:   0.6171875 loss:0.7748308181762695 cnum:316.0\n",
      "Train acc:    0.578125 loss:0.7755037546157837 cnum:296.0\n",
      "Train acc: 0.591796875 loss:0.7750495076179504 cnum:303.0\n",
      "Train acc: 0.630859375 loss:0.7855132222175598 cnum:323.0\n",
      "Train acc:   0.6171875 loss:0.7742770910263062 cnum:316.0\n",
      "Train acc: 0.646484375 loss:0.7723661661148071 cnum:331.0\n",
      "Train acc:  0.62890625 loss:0.7728687524795532 cnum:322.0\n",
      "Train acc:  0.64453125 loss:0.7722011208534241 cnum:330.0\n",
      "Train acc: 0.611328125 loss:0.7737273573875427 cnum:313.0\n",
      "Train acc: 0.619140625 loss:0.7734099626541138 cnum:317.0\n",
      "Train acc: 0.658203125 loss:0.7715598344802856 cnum:337.0\n",
      "Train acc:  0.62109375 loss:0.7732594609260559 cnum:318.0\n",
      "Train acc: 0.615234375 loss:0.7736645936965942 cnum:315.0\n",
      "Train acc: 0.615234375 loss:0.7738283276557922 cnum:315.0\n",
      "Train acc:   0.5859375 loss:0.7766772508621216 cnum:300.0\n",
      "Train acc:  0.62109375 loss:0.7732971906661987 cnum:318.0\n",
      "Train acc:       0.625 loss:0.7754848003387451 cnum:320.0\n",
      "Train acc: 0.634765625 loss:0.7734587788581848 cnum:325.0\n",
      "Train acc:  0.62109375 loss:0.775907039642334 cnum:318.0\n",
      "Train acc:  0.60546875 loss:0.7809094786643982 cnum:310.0\n",
      "Train acc: 0.615234375 loss:0.7736700177192688 cnum:315.0\n",
      "Train acc:  0.66015625 loss:0.7718446850776672 cnum:338.0\n",
      "Train acc: 0.638671875 loss:0.7726362347602844 cnum:327.0\n",
      "Train acc:  0.62109375 loss:0.7737013101577759 cnum:318.0\n",
      "Train acc: 0.611328125 loss:0.7738208770751953 cnum:313.0\n",
      "Train acc: 0.634765625 loss:0.7726479768753052 cnum:325.0\n",
      "Train acc:  0.62109375 loss:0.7739200592041016 cnum:318.0\n",
      "Train acc: 0.677734375 loss:0.770460307598114 cnum:347.0\n",
      "Train acc:  0.62890625 loss:0.7877345085144043 cnum:322.0\n",
      "Train acc:   0.6171875 loss:0.7737484574317932 cnum:316.0\n",
      "Train acc:   0.6171875 loss:0.7735055685043335 cnum:316.0\n",
      "Train acc:  0.58984375 loss:0.774785041809082 cnum:302.0\n",
      "Train acc:   0.6328125 loss:0.772741436958313 cnum:324.0\n",
      "Train acc:  0.62890625 loss:0.7737026810646057 cnum:322.0\n",
      "Train acc: 0.603515625 loss:0.7743151187896729 cnum:309.0\n",
      "Train acc:  0.62890625 loss:0.7732775211334229 cnum:322.0\n",
      "Train acc: 0.607421875 loss:0.7749989628791809 cnum:311.0\n",
      "Train acc: 0.634765625 loss:0.772624135017395 cnum:325.0\n",
      "Train acc:  0.61328125 loss:0.7738415598869324 cnum:314.0\n",
      "Train acc:  0.62109375 loss:0.7732987403869629 cnum:318.0\n",
      "Train acc:  0.59765625 loss:0.7744489908218384 cnum:306.0\n",
      "Train acc: 0.626953125 loss:0.7732577323913574 cnum:321.0\n",
      "Train acc: 0.634765625 loss:0.7728835940361023 cnum:325.0\n",
      "Train acc: 0.587890625 loss:0.7756232023239136 cnum:301.0\n",
      "Train acc: 0.580078125 loss:0.7753946185112 cnum:297.0\n",
      "Train acc: 0.615234375 loss:0.7745245099067688 cnum:315.0\n",
      "Train acc: 0.607421875 loss:0.7740118503570557 cnum:311.0\n",
      "Train acc:       0.625 loss:0.7886087894439697 cnum:320.0\n",
      "Train acc: 0.615234375 loss:0.7743083238601685 cnum:315.0\n",
      "Train acc:    0.671875 loss:0.7707141637802124 cnum:344.0\n",
      "Train acc: 0.646484375 loss:0.7721322774887085 cnum:331.0\n",
      "Train acc: 0.580078125 loss:0.7753673791885376 cnum:297.0\n",
      "Train acc:   0.6328125 loss:0.7726877927780151 cnum:324.0\n",
      "Train acc: 0.634765625 loss:0.7732232213020325 cnum:325.0\n",
      "Train acc:  0.63671875 loss:0.7725110054016113 cnum:326.0\n",
      "Train acc: 0.611328125 loss:0.7737469673156738 cnum:313.0\n",
      "Train acc: 0.599609375 loss:0.7743169665336609 cnum:307.0\n",
      "Train acc:  0.61328125 loss:0.7760648727416992 cnum:314.0\n",
      "Train acc:   0.6015625 loss:0.7742891907691956 cnum:308.0\n",
      "Train acc: 0.564453125 loss:0.7763655185699463 cnum:289.0\n",
      "Train acc: 0.607421875 loss:0.7740040421485901 cnum:311.0\n",
      "Train acc: 0.607421875 loss:0.7740204930305481 cnum:311.0\n",
      "Train acc:    0.640625 loss:0.7726032137870789 cnum:328.0\n",
      "Train acc: 0.587890625 loss:0.7752959728240967 cnum:301.0\n",
      "Train acc:  0.58984375 loss:0.776081383228302 cnum:302.0\n",
      "Train acc: 0.603515625 loss:0.7751438021659851 cnum:309.0\n",
      "Train acc:  0.62890625 loss:0.7730560898780823 cnum:322.0\n",
      "Train acc: 0.587890625 loss:0.7751558423042297 cnum:301.0\n",
      "Train acc: 0.634765625 loss:0.7818790078163147 cnum:325.0\n",
      "Train acc:  0.62890625 loss:0.7729119062423706 cnum:322.0\n",
      "Train acc:  0.62890625 loss:0.7728940844535828 cnum:322.0\n",
      "Train acc: 0.591796875 loss:0.7748234272003174 cnum:303.0\n",
      "Train acc: 0.626953125 loss:0.7751112580299377 cnum:321.0\n",
      "Train acc:       0.625 loss:0.7732261419296265 cnum:320.0\n",
      "Train acc: 0.603515625 loss:0.7745583653450012 cnum:309.0\n",
      "Train acc:       0.625 loss:0.7730848789215088 cnum:320.0\n",
      "Train acc:   0.6328125 loss:0.7729083895683289 cnum:324.0\n",
      "Train acc: 0.595703125 loss:0.774620771408081 cnum:305.0\n",
      "Train acc:   0.6484375 loss:0.771862268447876 cnum:332.0\n",
      "Train acc:  0.64453125 loss:0.7720884680747986 cnum:330.0\n",
      "Train acc:    0.609375 loss:0.7753157615661621 cnum:312.0\n",
      "Train acc: 0.572265625 loss:0.7841991782188416 cnum:293.0\n",
      "Train acc:  0.57421875 loss:0.7756445407867432 cnum:294.0\n",
      "Train acc:    0.609375 loss:0.7738685011863708 cnum:312.0\n",
      "Train acc:   0.6328125 loss:0.7741919755935669 cnum:324.0\n",
      "Train acc: 0.615234375 loss:0.7738157510757446 cnum:315.0\n",
      "Train acc:   0.6015625 loss:0.7751644253730774 cnum:308.0\n",
      "Train acc:   0.6328125 loss:0.7756119966506958 cnum:324.0\n",
      "Train acc: 0.591796875 loss:0.774924635887146 cnum:303.0\n",
      "Train acc: 0.666015625 loss:0.770962655544281 cnum:341.0\n",
      "Train acc:  0.61328125 loss:0.773675799369812 cnum:314.0\n",
      "Train acc: 0.650390625 loss:0.7719144821166992 cnum:333.0\n",
      "Train acc:    0.609375 loss:0.7738736867904663 cnum:312.0\n",
      "Train acc:  0.62109375 loss:0.7732764482498169 cnum:318.0\n",
      "Train acc: 0.619140625 loss:0.77500319480896 cnum:317.0\n",
      "Train acc: 0.623046875 loss:0.7735794186592102 cnum:319.0\n",
      "Train acc:    0.640625 loss:0.7724354863166809 cnum:328.0\n",
      "Train acc:   0.6171875 loss:0.7734577655792236 cnum:316.0\n",
      "Train acc:  0.60546875 loss:0.7740379571914673 cnum:310.0\n",
      "Train acc:   0.5859375 loss:0.775284469127655 cnum:300.0\n",
      "Train acc:  0.63671875 loss:0.7744598984718323 cnum:326.0\n",
      "Train acc:   0.6015625 loss:0.7741891145706177 cnum:308.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc: 0.642578125 loss:0.7723941802978516 cnum:329.0\n",
      "Train acc:       0.625 loss:0.7731469869613647 cnum:320.0\n",
      "Train acc: 0.623046875 loss:0.7734125852584839 cnum:319.0\n",
      "Train acc:  0.62890625 loss:0.7729830741882324 cnum:322.0\n",
      "Train acc: 0.638671875 loss:0.7735173106193542 cnum:327.0\n",
      "Train acc:   0.5859375 loss:0.7751032114028931 cnum:300.0\n",
      "Train acc: 0.607421875 loss:0.7740935683250427 cnum:311.0\n",
      "Train acc: 0.607421875 loss:0.7740033864974976 cnum:311.0\n",
      "Train acc: 0.587890625 loss:0.7750272154808044 cnum:301.0\n",
      "Train acc: 0.634765625 loss:0.7727288603782654 cnum:325.0\n",
      "Train acc:  0.64453125 loss:0.7776233553886414 cnum:330.0\n",
      "Train acc: 0.623046875 loss:0.7731906175613403 cnum:319.0\n",
      "Train acc:    0.609375 loss:0.7747805118560791 cnum:312.0\n",
      "Train acc:    0.609375 loss:0.773857593536377 cnum:312.0\n",
      "Train acc: 0.634765625 loss:0.7726025581359863 cnum:325.0\n",
      "Train acc:   0.6640625 loss:0.7713203430175781 cnum:340.0\n",
      "Train acc:       0.625 loss:0.7734614610671997 cnum:320.0\n",
      "Train acc: 0.654296875 loss:0.7716911435127258 cnum:335.0\n",
      "Train acc: 0.654296875 loss:0.7715693116188049 cnum:335.0\n",
      "Train acc: 0.630859375 loss:0.772860050201416 cnum:323.0\n",
      "Train acc:  0.61328125 loss:0.7736474275588989 cnum:314.0\n",
      "Train acc: 0.603515625 loss:0.7747535109519958 cnum:309.0\n",
      "Train acc: 0.634765625 loss:0.7733914256095886 cnum:325.0\n",
      "Train acc: 0.623046875 loss:0.7731271982192993 cnum:319.0\n",
      "Train acc: 0.619140625 loss:0.7867311239242554 cnum:317.0\n",
      "Train acc:  0.63671875 loss:0.7741520404815674 cnum:326.0\n",
      "Train acc: 0.626953125 loss:0.7764654159545898 cnum:321.0\n",
      "Train acc: 0.603515625 loss:0.7746646404266357 cnum:309.0\n",
      "Train acc:     0.65625 loss:0.7714759707450867 cnum:336.0\n",
      "Train acc: 0.552734375 loss:0.7774523496627808 cnum:283.0\n",
      "Train acc:  0.58203125 loss:0.7753293514251709 cnum:298.0\n",
      "Train acc: 0.619140625 loss:0.7740646600723267 cnum:317.0\n",
      "Train acc:    0.640625 loss:0.7723245620727539 cnum:328.0\n",
      "Train acc: 0.580078125 loss:0.7753594517707825 cnum:297.0\n",
      "Train acc: 0.626953125 loss:0.7730922698974609 cnum:321.0\n",
      "Train acc: 0.630859375 loss:0.7736001014709473 cnum:323.0\n",
      "Train acc: 0.595703125 loss:0.7746655941009521 cnum:305.0\n",
      "Train acc: 0.615234375 loss:0.7739156484603882 cnum:315.0\n",
      "Train acc:    0.640625 loss:0.7722705006599426 cnum:328.0\n",
      "Train acc:   0.6484375 loss:0.7719161510467529 cnum:332.0\n",
      "Train acc: 0.619140625 loss:0.7734175324440002 cnum:317.0\n",
      "Train acc: 0.591796875 loss:0.7747043371200562 cnum:303.0\n",
      "Train acc:    0.609375 loss:0.7765596508979797 cnum:312.0\n",
      "Train acc: 0.591796875 loss:0.7747268080711365 cnum:303.0\n",
      "Train acc: 0.615234375 loss:0.7745755910873413 cnum:315.0\n",
      "Train acc: 0.619140625 loss:0.7734578847885132 cnum:317.0\n",
      "Train acc: 0.642578125 loss:0.7721759676933289 cnum:329.0\n",
      "Train acc: 0.623046875 loss:0.7760194540023804 cnum:319.0\n",
      "Train acc: 0.599609375 loss:0.7797978520393372 cnum:307.0\n",
      "Train acc:       0.625 loss:0.7731741666793823 cnum:320.0\n",
      "Train acc:  0.62890625 loss:0.7768711447715759 cnum:322.0\n",
      "Train acc: 0.611328125 loss:0.7738199234008789 cnum:313.0\n",
      "Train acc:  0.62890625 loss:0.7729288935661316 cnum:322.0\n",
      "Train acc: 0.619140625 loss:0.7737293243408203 cnum:317.0\n",
      "Train acc:    0.609375 loss:0.7741491794586182 cnum:312.0\n",
      "Train acc:  0.66796875 loss:0.7709095478057861 cnum:342.0\n",
      "Train acc:       0.625 loss:0.7731817364692688 cnum:320.0\n",
      "Train acc:  0.63671875 loss:0.7725526094436646 cnum:326.0\n",
      "Train acc:  0.64453125 loss:0.7724054455757141 cnum:330.0\n",
      "Train acc:  0.55859375 loss:0.7769014239311218 cnum:286.0\n",
      "Train acc:     0.59375 loss:0.7748270034790039 cnum:304.0\n",
      "Train acc: 0.607421875 loss:0.7747756242752075 cnum:311.0\n",
      "Train acc: 0.626953125 loss:0.7733398675918579 cnum:321.0\n",
      "Train acc: 0.599609375 loss:0.7743303179740906 cnum:307.0\n",
      "Train acc: 0.583984375 loss:0.7779651880264282 cnum:299.0\n",
      "Train acc:  0.59765625 loss:0.7781345248222351 cnum:306.0\n",
      "Train acc: 0.623046875 loss:0.7731485962867737 cnum:319.0\n",
      "Train acc: 0.611328125 loss:0.7738226056098938 cnum:313.0\n",
      "Train acc:    0.609375 loss:0.7742268443107605 cnum:312.0\n",
      "Train acc: 0.658203125 loss:0.7713710069656372 cnum:337.0\n",
      "Train acc: 0.626953125 loss:0.7737413644790649 cnum:321.0\n",
      "Train acc:  0.63671875 loss:0.7788617610931396 cnum:326.0\n",
      "Train acc:  0.58984375 loss:0.7904525995254517 cnum:302.0\n",
      "Train acc: 0.587890625 loss:0.7750779390335083 cnum:301.0\n",
      "Train acc:  0.62109375 loss:0.7747287750244141 cnum:318.0\n",
      "Train acc:       0.625 loss:0.7730439901351929 cnum:320.0\n",
      "Train acc:  0.57421875 loss:0.7759330868721008 cnum:294.0\n",
      "Train acc:  0.59765625 loss:0.7744523286819458 cnum:306.0\n",
      "Train acc: 0.603515625 loss:0.7750316262245178 cnum:309.0\n",
      "Train acc: 0.611328125 loss:0.7738240957260132 cnum:313.0\n",
      "Train acc: 0.642578125 loss:0.772503137588501 cnum:329.0\n",
      "Train acc:  0.58984375 loss:0.7749367356300354 cnum:302.0\n",
      "Train acc:  0.59765625 loss:0.7745382785797119 cnum:306.0\n",
      "Train acc: 0.626953125 loss:0.7732633948326111 cnum:321.0\n",
      "Train acc:  0.62109375 loss:0.7732897996902466 cnum:318.0\n",
      "Train acc: 0.654296875 loss:0.771589457988739 cnum:335.0\n",
      "Train acc:   0.6015625 loss:0.7742107510566711 cnum:308.0\n",
      "Train acc:    0.640625 loss:0.7722956538200378 cnum:328.0\n",
      "Train acc: 0.623046875 loss:0.7734267711639404 cnum:319.0\n",
      "Train acc: 0.654296875 loss:0.7718088030815125 cnum:335.0\n",
      "Train acc: 0.626953125 loss:0.773173451423645 cnum:321.0\n",
      "Train acc:   0.6328125 loss:0.7732250690460205 cnum:324.0\n",
      "Train acc: 0.619140625 loss:0.7734493613243103 cnum:317.0\n",
      "Train acc:  0.60546875 loss:0.7741478681564331 cnum:310.0\n",
      "Train acc: 0.615234375 loss:0.7735127210617065 cnum:315.0\n",
      "Train acc:  0.63671875 loss:0.7724908590316772 cnum:326.0\n",
      "Train acc: 0.619140625 loss:0.7734435796737671 cnum:317.0\n",
      "Train acc: 0.630859375 loss:0.7738603949546814 cnum:323.0\n",
      "Train acc:       0.625 loss:0.7885981798171997 cnum:320.0\n",
      "Train acc:  0.62109375 loss:0.7893104553222656 cnum:318.0\n",
      "Train acc:   0.6015625 loss:0.7743159532546997 cnum:308.0\n",
      "Train acc: 0.587890625 loss:0.7750189900398254 cnum:301.0\n",
      "Train acc: 0.646484375 loss:0.7723935842514038 cnum:331.0\n",
      "Train acc: 0.599609375 loss:0.7746837139129639 cnum:307.0\n",
      "Train acc:   0.6328125 loss:0.7727279663085938 cnum:324.0\n",
      "Train acc:  0.62109375 loss:0.7732921838760376 cnum:318.0\n",
      "Train acc:   0.6171875 loss:0.773432731628418 cnum:316.0\n",
      "Train acc: 0.599609375 loss:0.7746337652206421 cnum:307.0\n",
      "Train acc:       0.625 loss:0.7730488777160645 cnum:320.0\n",
      "Train acc:  0.62109375 loss:0.773338258266449 cnum:318.0\n",
      "Train acc:       0.625 loss:0.7731796503067017 cnum:320.0\n",
      "Train acc: 0.623046875 loss:0.7733056545257568 cnum:319.0\n",
      "Train acc:   0.6171875 loss:0.7735292315483093 cnum:316.0\n",
      "Train acc:   0.6171875 loss:0.7734150886535645 cnum:316.0\n",
      "Train acc: 0.607421875 loss:0.7749952077865601 cnum:311.0\n",
      "Train acc:  0.61328125 loss:0.7737439870834351 cnum:314.0\n",
      "Train acc: 0.623046875 loss:0.7731618285179138 cnum:319.0\n",
      "Train acc: 0.638671875 loss:0.7746195793151855 cnum:327.0\n",
      "Train acc: 0.626953125 loss:0.7729089260101318 cnum:321.0\n",
      "Train acc: 0.630859375 loss:0.7727609872817993 cnum:323.0\n",
      "Train acc: 0.650390625 loss:0.772026002407074 cnum:333.0\n",
      "Train acc:       0.625 loss:0.7731868624687195 cnum:320.0\n",
      "Train acc:   0.6015625 loss:0.7742012739181519 cnum:308.0\n",
      "Train acc:   0.5859375 loss:0.7751166820526123 cnum:300.0\n",
      "Train acc: 0.619140625 loss:0.7733258605003357 cnum:317.0\n",
      "Train acc:   0.5859375 loss:0.7750630378723145 cnum:300.0\n",
      "Train acc: 0.607421875 loss:0.7740033864974976 cnum:311.0\n",
      "Train acc: 0.591796875 loss:0.775708794593811 cnum:303.0\n",
      "Train acc: 0.623046875 loss:0.7731264233589172 cnum:319.0\n",
      "Train acc:  0.65234375 loss:0.7716385126113892 cnum:334.0\n",
      "Train acc:  0.58203125 loss:0.7762210369110107 cnum:298.0\n",
      "Train acc:  0.61328125 loss:0.7745329141616821 cnum:314.0\n",
      "Train acc: 0.595703125 loss:0.7761437892913818 cnum:305.0\n",
      "Train acc:     0.59375 loss:0.7747723460197449 cnum:304.0\n",
      "Train acc:  0.62109375 loss:0.7732628583908081 cnum:318.0\n",
      "Train acc: 0.611328125 loss:0.7737418413162231 cnum:313.0\n",
      "Train acc: 0.615234375 loss:0.773597240447998 cnum:315.0\n",
      "Train acc: 0.615234375 loss:0.7736502885818481 cnum:315.0\n",
      "Train acc:   0.6015625 loss:0.774586021900177 cnum:308.0\n",
      "Train acc:  0.64453125 loss:0.772114634513855 cnum:330.0\n",
      "Train acc:  0.60546875 loss:0.7740145921707153 cnum:310.0\n",
      "Train acc:    0.609375 loss:0.774299144744873 cnum:312.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc:   0.5859375 loss:0.7752130627632141 cnum:300.0\n",
      "Train acc:   0.6796875 loss:0.7703772783279419 cnum:348.0\n",
      "Train acc:  0.65234375 loss:0.7716568112373352 cnum:334.0\n",
      "Train acc:   0.6171875 loss:0.7898311614990234 cnum:316.0\n",
      "Train acc: 0.623046875 loss:0.77310711145401 cnum:319.0\n",
      "Train acc:  0.60546875 loss:0.7739823460578918 cnum:310.0\n",
      "Train acc: 0.654296875 loss:0.7719318270683289 cnum:335.0\n",
      "Train acc: 0.646484375 loss:0.7871583104133606 cnum:331.0\n",
      "Train acc:    0.640625 loss:0.7722176313400269 cnum:328.0\n",
      "Train acc: 0.630859375 loss:0.7797644734382629 cnum:323.0\n",
      "Train acc: 0.599609375 loss:0.7899371981620789 cnum:307.0\n",
      "Train acc: 0.650390625 loss:0.7718929648399353 cnum:333.0\n",
      "Train acc:  0.62109375 loss:0.7734595537185669 cnum:318.0\n",
      "Train acc:  0.66015625 loss:0.7715225219726562 cnum:338.0\n",
      "Train acc:   0.6484375 loss:0.7723430395126343 cnum:332.0\n",
      "Train acc:  0.62890625 loss:0.7753295302391052 cnum:322.0\n",
      "Train acc: 0.669921875 loss:0.7708382606506348 cnum:343.0\n",
      "Train acc: 0.607421875 loss:0.7765531539916992 cnum:311.0\n",
      "Train acc:   0.5859375 loss:0.7751022577285767 cnum:300.0\n",
      "Train acc: 0.642578125 loss:0.7721238136291504 cnum:329.0\n",
      "Train acc: 0.595703125 loss:0.7747354507446289 cnum:305.0\n",
      "Train acc: 0.619140625 loss:0.7733812928199768 cnum:317.0\n",
      "Train acc:   0.6328125 loss:0.7726973295211792 cnum:324.0\n",
      "Train acc: 0.619140625 loss:0.7734501957893372 cnum:317.0\n",
      "Train acc:  0.63671875 loss:0.7726211547851562 cnum:326.0\n",
      "Train acc:   0.6484375 loss:0.7718950510025024 cnum:332.0\n",
      "Dev acc:  0.66015625 loss0.7712382674217224\n",
      "Dev acc:  0.65234375 loss0.7717844247817993\n",
      "Dev acc:   0.6640625 loss0.7713723182678223\n",
      "Dev acc:   0.6953125 loss0.7748970985412598\n",
      "Dev acc:  0.66015625 loss0.7721738815307617\n",
      "Dev acc:  0.66796875 loss0.7709840536117554\n",
      "Dev acc:   0.6328125 loss0.7726528644561768\n",
      "Dev acc:  0.64453125 loss0.7720370888710022\n",
      "Dev acc:   0.6640625 loss0.7714421153068542\n",
      "Dev acc:   0.6640625 loss0.7711992859840393\n",
      "Dev acc:  0.64453125 loss0.7720654606819153\n",
      "Dev acc:   0.6328125 loss0.7740564942359924\n",
      "Dev acc:    0.671875 loss0.7710743546485901\n",
      "Dev acc:       0.625 loss0.7731418609619141\n",
      "Dev acc:  0.68359375 loss0.7702306509017944\n",
      "Dev acc:  0.69140625 loss0.7699540853500366\n",
      "Dev acc:  0.64453125 loss0.7722854018211365\n",
      "Dev acc:  0.66796875 loss0.7708432078361511\n",
      "Dev acc:  0.70703125 loss0.7691025137901306\n",
      "Dev acc:    0.640625 loss0.772216796875\n",
      "Dev acc:     0.65625 loss0.7716273665428162\n",
      "Dev acc:   0.6015625 loss0.7741677165031433\n",
      "Dev acc:  0.68359375 loss0.7704668045043945\n",
      "Dev acc:  0.62890625 loss0.7729524970054626\n",
      "Dev acc:  0.62890625 loss0.7733374834060669\n",
      "Dev acc:  0.70703125 loss0.7689908146858215\n",
      "Dev acc:  0.63671875 loss0.7724184989929199\n",
      "Dev acc:     0.71875 loss0.7687293291091919\n",
      "Dev acc:     0.65625 loss0.7722042798995972\n",
      "Dev acc:   0.6484375 loss0.7719083428382874\n",
      "Dev acc:  0.67578125 loss0.7737533450126648\n",
      "Dev acc:  0.64453125 loss0.7720706462860107\n",
      "Dev acc:  0.60546875 loss0.7740361094474792\n",
      "Dev acc:  0.64453125 loss0.7721335887908936\n",
      "Dev acc:      0.6875 loss0.7706488370895386\n",
      "Dev acc:   0.6796875 loss0.7731972932815552\n",
      "Dev acc:   0.6015625 loss0.7743107080459595\n",
      "Dev acc:     0.65625 loss0.7717485427856445\n",
      "Dev acc:  0.66796875 loss0.771327018737793\n",
      "Dev acc:  0.66796875 loss0.7709316611289978\n",
      "Dev acc:  0.60546875 loss0.7739991545677185\n",
      "Dev acc:   0.6640625 loss0.7955409288406372\n",
      "Dev acc:  0.68359375 loss0.770106315612793\n",
      "Dev acc:  0.67578125 loss0.7704364061355591\n",
      "Dev acc:  0.66015625 loss0.7732701301574707\n",
      "Dev acc:     0.65625 loss0.7717965245246887\n",
      "Dev acc:  0.69921875 loss0.7693704962730408\n",
      "Dev acc:  0.73046875 loss0.7677453756332397\n",
      "Dev acc:    0.671875 loss0.7707493901252747\n",
      "Dev acc:  0.63671875 loss0.7724880576133728\n",
      "Dev acc:    0.671875 loss0.7707361578941345\n",
      "Dev acc:   0.6484375 loss0.7721906900405884\n",
      "Dev acc:    0.671875 loss0.770759105682373\n",
      "Dev acc:  0.65234375 loss0.7720658779144287\n",
      "Dev acc:  0.69140625 loss0.769707977771759\n",
      "Dev acc:      0.6875 loss0.7701970934867859\n",
      "Dev acc:   0.5859375 loss0.775397777557373\n",
      "Dev acc:  0.69921875 loss0.7693690061569214\n",
      "Dev acc:    0.703125 loss0.7691605091094971\n",
      "Dev acc:     0.65625 loss0.7715502977371216\n",
      "Dev acc:  0.68359375 loss0.7702869176864624\n",
      "Dev acc:     0.65625 loss0.8024901151657104\n",
      "Dev acc:  0.65234375 loss0.771655261516571\n",
      "Dev acc:    0.671875 loss0.7717848420143127\n",
      "Dev acc:     0.65625 loss0.7717070579528809\n",
      "Dev acc:   0.7109375 loss0.7688286900520325\n",
      "Dev acc:     0.65625 loss0.7716801762580872\n",
      "Dev acc:  0.63671875 loss0.7727571129798889\n",
      "Dev acc:  0.74609375 loss0.767169713973999\n",
      "Dev acc:  0.64453125 loss0.7720472812652588\n",
      "Dev acc:  0.62890625 loss0.7730925679206848\n",
      "Dev acc:   0.6796875 loss0.7702639102935791\n",
      "Dev acc:  0.60546875 loss0.7741074562072754\n",
      "Dev acc:  0.65234375 loss0.7733920216560364\n",
      "Dev acc:  0.59765625 loss0.7745005488395691\n",
      "Dev acc:  0.67578125 loss0.7705115675926208\n",
      "Dev acc:  0.67578125 loss0.7704883813858032\n",
      "Dev acc:   0.6484375 loss0.7719557881355286\n",
      "Dev acc:  0.69921875 loss0.8003875613212585\n",
      "Dev acc:  0.68359375 loss0.7702618837356567\n",
      "Dev acc:  0.71484375 loss0.7690510153770447\n",
      "Dev acc:   0.6015625 loss0.7753068208694458\n",
      "Dev acc:   0.6796875 loss0.7706724405288696\n",
      "Dev acc:   0.6640625 loss0.771114706993103\n",
      "Dev acc:   0.6484375 loss0.7718637585639954\n",
      "Dev acc:   0.6484375 loss0.7720368504524231\n",
      "Dev acc:   0.6796875 loss0.7722651362419128\n",
      "Dev acc:    0.640625 loss0.7722375988960266\n",
      "Dev acc:   0.6484375 loss0.7718473672866821\n",
      "Dev acc:  0.64453125 loss0.7720497250556946\n",
      "Dev acc:     0.71875 loss0.769670844078064\n",
      "Dev acc:  0.66796875 loss0.7710031270980835\n",
      "Dev acc:  0.66796875 loss0.7714518308639526\n",
      "Dev acc:    0.609375 loss0.7738146781921387\n",
      "Dev acc:  0.68359375 loss0.7700870037078857\n",
      "Dev acc:   0.6640625 loss0.771057665348053\n",
      "Dev acc:    0.703125 loss0.7691206336021423\n",
      "Dev acc:   0.6484375 loss0.7718406915664673\n",
      "Dev acc:  0.61328125 loss0.7736385464668274\n",
      "Dev acc:   0.6484375 loss0.7718943953514099\n",
      "Dev acc:    0.640625 loss0.7722757458686829\n",
      "Dev acc:  0.74609375 loss0.7669941782951355\n",
      "Dev acc:  0.69140625 loss0.7729144096374512\n",
      "Dev acc:    0.671875 loss0.7709745764732361\n",
      "Dev acc:  0.62890625 loss0.7728155851364136\n",
      "Dev acc:    0.609375 loss0.7737902998924255\n",
      "Dev acc:  0.66796875 loss0.7708958387374878\n",
      "Total Dev acc:0.6614705023364486\n",
      "Step:3\n",
      "Train acc:       0.625 loss:0.7732831239700317 cnum:320.0\n",
      "Train acc: 0.630859375 loss:0.7728427648544312 cnum:323.0\n",
      "Train acc: 0.630859375 loss:0.7727916240692139 cnum:323.0\n",
      "Train acc: 0.630859375 loss:0.7735124230384827 cnum:323.0\n",
      "Train acc:  0.64453125 loss:0.7720807790756226 cnum:330.0\n",
      "Train acc: 0.587890625 loss:0.7748724818229675 cnum:301.0\n",
      "Train acc:  0.58984375 loss:0.7748204469680786 cnum:302.0\n",
      "Train acc: 0.615234375 loss:0.7735342979431152 cnum:315.0\n",
      "Train acc: 0.572265625 loss:0.7809205651283264 cnum:293.0\n",
      "Train acc: 0.630859375 loss:0.7727282047271729 cnum:323.0\n",
      "Train acc: 0.638671875 loss:0.7726216316223145 cnum:327.0\n",
      "Train acc:   0.6171875 loss:0.7734121084213257 cnum:316.0\n",
      "Train acc: 0.607421875 loss:0.7739715576171875 cnum:311.0\n",
      "Train acc:  0.61328125 loss:0.7736034393310547 cnum:314.0\n",
      "Train acc: 0.599609375 loss:0.7744418978691101 cnum:307.0\n",
      "Train acc: 0.599609375 loss:0.7743043899536133 cnum:307.0\n",
      "Train acc:   0.6171875 loss:0.7734097838401794 cnum:316.0\n",
      "Train acc: 0.638671875 loss:0.772388219833374 cnum:327.0\n",
      "Train acc:  0.60546875 loss:0.774799108505249 cnum:310.0\n",
      "Train acc: 0.599609375 loss:0.7743163108825684 cnum:307.0\n",
      "Train acc:  0.62109375 loss:0.774058997631073 cnum:318.0\n",
      "Train acc:  0.62109375 loss:0.7733433246612549 cnum:318.0\n",
      "Train acc: 0.638671875 loss:0.7723124027252197 cnum:327.0\n",
      "Train acc: 0.564453125 loss:0.7773510813713074 cnum:289.0\n",
      "Train acc: 0.599609375 loss:0.7744331359863281 cnum:307.0\n",
      "Train acc: 0.630859375 loss:0.7731696367263794 cnum:323.0\n",
      "Train acc:   0.6171875 loss:0.7733967900276184 cnum:316.0\n",
      "Train acc: 0.638671875 loss:0.7723644971847534 cnum:327.0\n",
      "Train acc:   0.6328125 loss:0.7726227641105652 cnum:324.0\n",
      "Train acc:   0.6015625 loss:0.7751007080078125 cnum:308.0\n",
      "Train acc: 0.595703125 loss:0.7745211124420166 cnum:305.0\n",
      "Train acc:  0.61328125 loss:0.7735983729362488 cnum:314.0\n",
      "Train acc:   0.6015625 loss:0.7745086550712585 cnum:308.0\n",
      "Train acc:  0.61328125 loss:0.7736489772796631 cnum:314.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc: 0.634765625 loss:0.7727488279342651 cnum:325.0\n",
      "Train acc:  0.62109375 loss:0.7733561396598816 cnum:318.0\n",
      "Train acc: 0.634765625 loss:0.7725478410720825 cnum:325.0\n",
      "Train acc: 0.630859375 loss:0.772870659828186 cnum:323.0\n",
      "Train acc: 0.619140625 loss:0.7733045816421509 cnum:317.0\n",
      "Train acc:   0.5390625 loss:0.7779126167297363 cnum:276.0\n",
      "Train acc:  0.61328125 loss:0.7736043930053711 cnum:314.0\n",
      "Train acc: 0.623046875 loss:0.7730962038040161 cnum:319.0\n",
      "Train acc: 0.638671875 loss:0.772503674030304 cnum:327.0\n",
      "Train acc: 0.626953125 loss:0.7742447853088379 cnum:321.0\n",
      "Train acc:   0.6171875 loss:0.7734869718551636 cnum:316.0\n",
      "Train acc: 0.611328125 loss:0.7737194895744324 cnum:313.0\n",
      "Train acc:   0.6328125 loss:0.7795798182487488 cnum:324.0\n",
      "Train acc: 0.638671875 loss:0.7723768353462219 cnum:327.0\n",
      "Train acc:  0.61328125 loss:0.7736530900001526 cnum:314.0\n",
      "Train acc:     0.59375 loss:0.7897151708602905 cnum:304.0\n",
      "Train acc: 0.607421875 loss:0.7740191221237183 cnum:311.0\n",
      "Train acc:  0.60546875 loss:0.7744712829589844 cnum:310.0\n",
      "Train acc:    0.609375 loss:0.7738025784492493 cnum:312.0\n",
      "Train acc:  0.61328125 loss:0.773708701133728 cnum:314.0\n",
      "Train acc:  0.58984375 loss:0.7747815251350403 cnum:302.0\n",
      "Train acc:  0.59765625 loss:0.7744454741477966 cnum:306.0\n",
      "Train acc:  0.62890625 loss:0.7730125784873962 cnum:322.0\n",
      "Train acc: 0.623046875 loss:0.7738386392593384 cnum:319.0\n",
      "Train acc: 0.572265625 loss:0.7758025527000427 cnum:293.0\n",
      "Train acc: 0.634765625 loss:0.7727499008178711 cnum:325.0\n",
      "Train acc: 0.615234375 loss:0.773525059223175 cnum:315.0\n",
      "Train acc:  0.62109375 loss:0.7733417749404907 cnum:318.0\n",
      "Train acc:  0.64453125 loss:0.772497296333313 cnum:330.0\n",
      "Train acc: 0.658203125 loss:0.771426796913147 cnum:337.0\n",
      "Train acc:   0.6484375 loss:0.7721604704856873 cnum:332.0\n",
      "Train acc:   0.6328125 loss:0.7727904319763184 cnum:324.0\n",
      "Train acc: 0.630859375 loss:0.7728854417800903 cnum:323.0\n",
      "Train acc:    0.640625 loss:0.7722669243812561 cnum:328.0\n",
      "Train acc: 0.603515625 loss:0.7743011713027954 cnum:309.0\n",
      "Train acc:  0.61328125 loss:0.773919939994812 cnum:314.0\n",
      "Train acc: 0.619140625 loss:0.7733898162841797 cnum:317.0\n",
      "Train acc: 0.623046875 loss:0.773264467716217 cnum:319.0\n",
      "Train acc:   0.5859375 loss:0.7749996185302734 cnum:300.0\n",
      "Train acc:  0.62890625 loss:0.772834300994873 cnum:322.0\n",
      "Train acc: 0.615234375 loss:0.7742748856544495 cnum:315.0\n",
      "Train acc: 0.650390625 loss:0.7721199989318848 cnum:333.0\n",
      "Train acc: 0.603515625 loss:0.774104118347168 cnum:309.0\n",
      "Train acc:  0.60546875 loss:0.7764054536819458 cnum:310.0\n",
      "Train acc:    0.578125 loss:0.7753601670265198 cnum:296.0\n",
      "Train acc:  0.65234375 loss:0.7720341682434082 cnum:334.0\n",
      "Train acc: 0.611328125 loss:0.7747429609298706 cnum:313.0\n",
      "Train acc:  0.59765625 loss:0.7744847536087036 cnum:306.0\n",
      "Train acc:   0.6015625 loss:0.7751348614692688 cnum:308.0\n",
      "Train acc: 0.595703125 loss:0.7746672630310059 cnum:305.0\n",
      "Train acc: 0.599609375 loss:0.7743098735809326 cnum:307.0\n",
      "Train acc:   0.5859375 loss:0.7751478552818298 cnum:300.0\n",
      "Train acc: 0.595703125 loss:0.774621844291687 cnum:305.0\n",
      "Train acc:  0.64453125 loss:0.7720119953155518 cnum:330.0\n",
      "Train acc:  0.61328125 loss:0.7737025618553162 cnum:314.0\n",
      "Train acc: 0.634765625 loss:0.7725253105163574 cnum:325.0\n",
      "Train acc: 0.615234375 loss:0.7735235095024109 cnum:315.0\n",
      "Train acc:  0.58984375 loss:0.7749931812286377 cnum:302.0\n",
      "Train acc: 0.591796875 loss:0.7747505903244019 cnum:303.0\n",
      "Train acc: 0.572265625 loss:0.7757920622825623 cnum:293.0\n",
      "Train acc:    0.640625 loss:0.7732890248298645 cnum:328.0\n",
      "Train acc:     0.59375 loss:0.7746453881263733 cnum:304.0\n",
      "Train acc:  0.65234375 loss:0.7741507291793823 cnum:334.0\n",
      "Train acc:  0.62109375 loss:0.773328959941864 cnum:318.0\n",
      "Train acc:  0.59765625 loss:0.7744347453117371 cnum:306.0\n",
      "Train acc:  0.66015625 loss:0.7712860107421875 cnum:338.0\n",
      "Train acc: 0.611328125 loss:0.773715078830719 cnum:313.0\n",
      "Train acc:  0.61328125 loss:0.7736223340034485 cnum:314.0\n",
      "Train acc:    0.609375 loss:0.7893584370613098 cnum:312.0\n",
      "Train acc:  0.62890625 loss:0.7728647589683533 cnum:322.0\n",
      "Train acc: 0.576171875 loss:0.7756636142730713 cnum:295.0\n",
      "Train acc:       0.625 loss:0.7893204689025879 cnum:320.0\n",
      "Train acc:  0.60546875 loss:0.7740219831466675 cnum:310.0\n",
      "Train acc: 0.591796875 loss:0.7747238278388977 cnum:303.0\n",
      "Train acc:   0.6796875 loss:0.7702698111534119 cnum:348.0\n",
      "Train acc:   0.6171875 loss:0.7734870910644531 cnum:316.0\n",
      "Train acc: 0.619140625 loss:0.7737135887145996 cnum:317.0\n",
      "Train acc:  0.65234375 loss:0.7716745138168335 cnum:334.0\n",
      "Train acc:  0.60546875 loss:0.7747520208358765 cnum:310.0\n",
      "Train acc: 0.611328125 loss:0.7736788988113403 cnum:313.0\n",
      "Train acc:  0.59765625 loss:0.7743654847145081 cnum:306.0\n",
      "Train acc:    0.609375 loss:0.7743085026741028 cnum:312.0\n",
      "Train acc:  0.62890625 loss:0.7730523943901062 cnum:322.0\n",
      "Train acc: 0.615234375 loss:0.7734925150871277 cnum:315.0\n",
      "Train acc: 0.587890625 loss:0.7749910354614258 cnum:301.0\n",
      "Train acc: 0.583984375 loss:0.775071918964386 cnum:299.0\n",
      "Train acc: 0.630859375 loss:0.7727603912353516 cnum:323.0\n",
      "Train acc: 0.572265625 loss:0.7757360935211182 cnum:293.0\n",
      "Train acc: 0.591796875 loss:0.774832010269165 cnum:303.0\n",
      "Train acc: 0.619140625 loss:0.7734715938568115 cnum:317.0\n",
      "Train acc:   0.6640625 loss:0.7710692882537842 cnum:340.0\n",
      "Train acc: 0.591796875 loss:0.7762320041656494 cnum:303.0\n",
      "Train acc:   0.6328125 loss:0.772616982460022 cnum:324.0\n",
      "Train acc:   0.6328125 loss:0.7728142738342285 cnum:324.0\n",
      "Train acc: 0.646484375 loss:0.7720434069633484 cnum:331.0\n",
      "Train acc:   0.6328125 loss:0.7726017236709595 cnum:324.0\n",
      "Train acc:   0.5859375 loss:0.7749813199043274 cnum:300.0\n",
      "Train acc:   0.6328125 loss:0.7733777761459351 cnum:324.0\n",
      "Train acc:  0.63671875 loss:0.772621214389801 cnum:326.0\n",
      "Train acc: 0.630859375 loss:0.7728977203369141 cnum:323.0\n",
      "Train acc: 0.658203125 loss:0.7713665962219238 cnum:337.0\n",
      "Train acc: 0.611328125 loss:0.7736749053001404 cnum:313.0\n",
      "Train acc:    0.609375 loss:0.7737804055213928 cnum:312.0\n",
      "Train acc: 0.599609375 loss:0.7897546887397766 cnum:307.0\n",
      "Train acc: 0.615234375 loss:0.7734885215759277 cnum:315.0\n",
      "Train acc:   0.6328125 loss:0.7727356553077698 cnum:324.0\n",
      "Train acc:  0.64453125 loss:0.7721490859985352 cnum:330.0\n",
      "Train acc: 0.623046875 loss:0.7734131217002869 cnum:319.0\n",
      "Train acc: 0.583984375 loss:0.7755408883094788 cnum:299.0\n",
      "Train acc: 0.626953125 loss:0.7729825973510742 cnum:321.0\n",
      "Train acc: 0.623046875 loss:0.7740878462791443 cnum:319.0\n",
      "Train acc: 0.634765625 loss:0.7726894617080688 cnum:325.0\n",
      "Train acc: 0.654296875 loss:0.7718151211738586 cnum:335.0\n",
      "Train acc:    0.609375 loss:0.7738128304481506 cnum:312.0\n",
      "Train acc:  0.62109375 loss:0.7746435403823853 cnum:318.0\n",
      "Train acc: 0.669921875 loss:0.7710977792739868 cnum:343.0\n",
      "Train acc: 0.630859375 loss:0.7727387547492981 cnum:323.0\n",
      "Train acc:  0.65234375 loss:0.7716906070709229 cnum:334.0\n",
      "Train acc: 0.583984375 loss:0.7750624418258667 cnum:299.0\n",
      "Train acc: 0.607421875 loss:0.7740195989608765 cnum:311.0\n",
      "Train acc: 0.599609375 loss:0.7743446826934814 cnum:307.0\n",
      "Train acc:       0.625 loss:0.7730357050895691 cnum:320.0\n",
      "Train acc:  0.59765625 loss:0.7744632959365845 cnum:306.0\n",
      "Train acc: 0.619140625 loss:0.773379921913147 cnum:317.0\n",
      "Train acc:   0.6015625 loss:0.7747483849525452 cnum:308.0\n",
      "Train acc: 0.619140625 loss:0.7772660255432129 cnum:317.0\n",
      "Train acc:     0.65625 loss:0.7738386392593384 cnum:336.0\n",
      "Train acc: 0.650390625 loss:0.7722964882850647 cnum:333.0\n",
      "Train acc: 0.603515625 loss:0.7741416692733765 cnum:309.0\n",
      "Train acc:     0.59375 loss:0.7745948433876038 cnum:304.0\n",
      "Train acc:   0.6328125 loss:0.7726078033447266 cnum:324.0\n",
      "Train acc: 0.607421875 loss:0.7739624977111816 cnum:311.0\n",
      "Train acc:  0.61328125 loss:0.774399995803833 cnum:314.0\n",
      "Train acc: 0.642578125 loss:0.7778577208518982 cnum:329.0\n",
      "Train acc:       0.625 loss:0.7734701633453369 cnum:320.0\n",
      "Train acc:     0.59375 loss:0.7747429609298706 cnum:304.0\n",
      "Train acc: 0.615234375 loss:0.7813700437545776 cnum:315.0\n",
      "Train acc: 0.572265625 loss:0.7757675647735596 cnum:293.0\n",
      "Train acc:   0.5859375 loss:0.7752291560173035 cnum:300.0\n",
      "Train acc:  0.57421875 loss:0.7755946516990662 cnum:294.0\n",
      "Train acc:   0.6171875 loss:0.7734495997428894 cnum:316.0\n",
      "Train acc:  0.62890625 loss:0.7756116986274719 cnum:322.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc: 0.583984375 loss:0.7763813734054565 cnum:299.0\n",
      "Train acc: 0.623046875 loss:0.7730991244316101 cnum:319.0\n",
      "Train acc:       0.625 loss:0.7734156847000122 cnum:320.0\n",
      "Train acc: 0.619140625 loss:0.7732985615730286 cnum:317.0\n",
      "Train acc:   0.6171875 loss:0.7734584808349609 cnum:316.0\n",
      "Train acc:       0.625 loss:0.7731755375862122 cnum:320.0\n",
      "Train acc: 0.611328125 loss:0.7743890881538391 cnum:313.0\n",
      "Train acc:   0.6015625 loss:0.7764408588409424 cnum:308.0\n",
      "Train acc: 0.619140625 loss:0.7732852697372437 cnum:317.0\n",
      "Train acc: 0.607421875 loss:0.7739615440368652 cnum:311.0\n",
      "Train acc:  0.62890625 loss:0.7733665704727173 cnum:322.0\n",
      "Train acc: 0.615234375 loss:0.77396559715271 cnum:315.0\n",
      "Train acc: 0.595703125 loss:0.7746066451072693 cnum:305.0\n",
      "Train acc: 0.603515625 loss:0.774447500705719 cnum:309.0\n",
      "Train acc: 0.623046875 loss:0.7732195854187012 cnum:319.0\n",
      "Train acc: 0.603515625 loss:0.7742659449577332 cnum:309.0\n",
      "Train acc: 0.630859375 loss:0.7731381058692932 cnum:323.0\n",
      "Train acc: 0.607421875 loss:0.774185061454773 cnum:311.0\n",
      "Train acc: 0.634765625 loss:0.7726098895072937 cnum:325.0\n",
      "Train acc:  0.62890625 loss:0.7730476260185242 cnum:322.0\n",
      "Train acc: 0.630859375 loss:0.7802088856697083 cnum:323.0\n",
      "Train acc:   0.6328125 loss:0.7733783721923828 cnum:324.0\n",
      "Train acc:       0.625 loss:0.773053765296936 cnum:320.0\n",
      "Train acc: 0.623046875 loss:0.7731447219848633 cnum:319.0\n",
      "Train acc: 0.626953125 loss:0.7729915380477905 cnum:321.0\n",
      "Train acc:    0.640625 loss:0.7726890444755554 cnum:328.0\n",
      "Train acc:   0.6171875 loss:0.774928629398346 cnum:316.0\n",
      "Train acc: 0.583984375 loss:0.7800803184509277 cnum:299.0\n",
      "Train acc:   0.6484375 loss:0.7718518376350403 cnum:332.0\n",
      "Train acc: 0.591796875 loss:0.7747089862823486 cnum:303.0\n",
      "Train acc: 0.626953125 loss:0.7750956416130066 cnum:321.0\n",
      "Train acc:   0.6328125 loss:0.7727804183959961 cnum:324.0\n",
      "Train acc: 0.587890625 loss:0.7754639387130737 cnum:301.0\n",
      "Train acc: 0.634765625 loss:0.7725709080696106 cnum:325.0\n",
      "Train acc: 0.626953125 loss:0.7729215621948242 cnum:321.0\n",
      "Train acc:   0.6171875 loss:0.7734537720680237 cnum:316.0\n",
      "Train acc:     0.59375 loss:0.775673508644104 cnum:304.0\n",
      "Train acc: 0.630859375 loss:0.772738516330719 cnum:323.0\n",
      "Train acc: 0.603515625 loss:0.7767153382301331 cnum:309.0\n",
      "Train acc:    0.640625 loss:0.7726321816444397 cnum:328.0\n",
      "Train acc:   0.6328125 loss:0.7726057171821594 cnum:324.0\n",
      "Train acc:   0.6328125 loss:0.7728022336959839 cnum:324.0\n",
      "Train acc:   0.6015625 loss:0.7742345333099365 cnum:308.0\n",
      "Train acc:   0.5859375 loss:0.7749980688095093 cnum:300.0\n",
      "Train acc: 0.650390625 loss:0.7717654705047607 cnum:333.0\n",
      "Train acc: 0.583984375 loss:0.7752967476844788 cnum:299.0\n",
      "Train acc:  0.66015625 loss:0.7712842226028442 cnum:338.0\n",
      "Train acc: 0.634765625 loss:0.7725507020950317 cnum:325.0\n",
      "Train acc: 0.630859375 loss:0.7727188467979431 cnum:323.0\n",
      "Train acc:     0.59375 loss:0.7747305631637573 cnum:304.0\n",
      "Train acc: 0.611328125 loss:0.7775689363479614 cnum:313.0\n",
      "Train acc:  0.61328125 loss:0.7736339569091797 cnum:314.0\n",
      "Train acc: 0.607421875 loss:0.7751194834709167 cnum:311.0\n",
      "Train acc: 0.576171875 loss:0.775562584400177 cnum:295.0\n",
      "Train acc: 0.599609375 loss:0.7743216156959534 cnum:307.0\n",
      "Train acc: 0.619140625 loss:0.7750543355941772 cnum:317.0\n",
      "Train acc: 0.654296875 loss:0.7721474766731262 cnum:335.0\n",
      "Train acc:   0.6328125 loss:0.7725867629051208 cnum:324.0\n",
      "Train acc:       0.625 loss:0.7732188701629639 cnum:320.0\n",
      "Train acc: 0.615234375 loss:0.773501992225647 cnum:315.0\n",
      "Train acc:  0.61328125 loss:0.7736736536026001 cnum:314.0\n",
      "Train acc:       0.625 loss:0.7730284929275513 cnum:320.0\n",
      "Train acc:   0.6328125 loss:0.7726758718490601 cnum:324.0\n",
      "Train acc: 0.619140625 loss:0.7732853293418884 cnum:317.0\n",
      "Train acc:    0.609375 loss:0.7738900184631348 cnum:312.0\n",
      "Train acc:  0.61328125 loss:0.7736010551452637 cnum:314.0\n",
      "Train acc: 0.646484375 loss:0.7722510099411011 cnum:331.0\n",
      "Train acc: 0.599609375 loss:0.7898123264312744 cnum:307.0\n",
      "Train acc: 0.607421875 loss:0.7740637063980103 cnum:311.0\n",
      "Train acc: 0.658203125 loss:0.7727080583572388 cnum:337.0\n",
      "Train acc: 0.587890625 loss:0.7749229073524475 cnum:301.0\n",
      "Train acc: 0.654296875 loss:0.7715504765510559 cnum:335.0\n",
      "Train acc:    0.609375 loss:0.774062991142273 cnum:312.0\n",
      "Train acc:  0.59765625 loss:0.7743731737136841 cnum:306.0\n",
      "Train acc: 0.607421875 loss:0.7739070057868958 cnum:311.0\n",
      "Train acc:  0.63671875 loss:0.7729389071464539 cnum:326.0\n",
      "Train acc:       0.625 loss:0.77409428358078 cnum:320.0\n",
      "Train acc:  0.62109375 loss:0.7735377550125122 cnum:318.0\n",
      "Train acc: 0.626953125 loss:0.7729527950286865 cnum:321.0\n",
      "Train acc: 0.619140625 loss:0.7734075784683228 cnum:317.0\n",
      "Train acc: 0.615234375 loss:0.7830603122711182 cnum:315.0\n",
      "Train acc:       0.625 loss:0.7730453610420227 cnum:320.0\n",
      "Train acc: 0.634765625 loss:0.7726908922195435 cnum:325.0\n",
      "Train acc:    0.640625 loss:0.7726072072982788 cnum:328.0\n",
      "Train acc:   0.6640625 loss:0.7710762619972229 cnum:340.0\n",
      "Train acc: 0.615234375 loss:0.7735384702682495 cnum:315.0\n",
      "Train acc:  0.59765625 loss:0.7744289636611938 cnum:306.0\n",
      "Train acc:    0.609375 loss:0.7738304734230042 cnum:312.0\n",
      "Train acc:    0.578125 loss:0.7754359245300293 cnum:296.0\n",
      "Train acc:   0.5859375 loss:0.774977445602417 cnum:300.0\n",
      "Train acc:      0.5625 loss:0.776170551776886 cnum:288.0\n",
      "Train acc:  0.59765625 loss:0.7743895053863525 cnum:306.0\n",
      "Train acc: 0.623046875 loss:0.7731008529663086 cnum:319.0\n",
      "Train acc:   0.6171875 loss:0.7734103202819824 cnum:316.0\n",
      "Train acc:    0.640625 loss:0.7722656726837158 cnum:328.0\n",
      "Train acc:    0.609375 loss:0.7738500833511353 cnum:312.0\n",
      "Train acc:  0.58984375 loss:0.7749003767967224 cnum:302.0\n",
      "Train acc: 0.623046875 loss:0.7735055685043335 cnum:319.0\n",
      "Train acc: 0.615234375 loss:0.7735123634338379 cnum:315.0\n",
      "Train acc:  0.59765625 loss:0.7744193077087402 cnum:306.0\n",
      "Train acc:  0.61328125 loss:0.773914098739624 cnum:314.0\n",
      "Train acc: 0.591796875 loss:0.7746888995170593 cnum:303.0\n",
      "Train acc:  0.59765625 loss:0.774376392364502 cnum:306.0\n",
      "Train acc: 0.623046875 loss:0.7735177278518677 cnum:319.0\n",
      "Train acc: 0.595703125 loss:0.7751498222351074 cnum:305.0\n",
      "Train acc:  0.62109375 loss:0.7757731676101685 cnum:318.0\n",
      "Train acc:   0.6171875 loss:0.7734543681144714 cnum:316.0\n",
      "Train acc:  0.62109375 loss:0.7742424011230469 cnum:318.0\n",
      "Train acc:  0.58203125 loss:0.775201141834259 cnum:298.0\n",
      "Train acc:  0.59765625 loss:0.7747761011123657 cnum:306.0\n",
      "Train acc:     0.59375 loss:0.7766637802124023 cnum:304.0\n",
      "Train acc: 0.599609375 loss:0.7743416428565979 cnum:307.0\n",
      "Train acc:  0.60546875 loss:0.7743631601333618 cnum:310.0\n",
      "Train acc: 0.615234375 loss:0.7739017009735107 cnum:315.0\n",
      "Train acc:  0.58984375 loss:0.7747595906257629 cnum:302.0\n",
      "Train acc: 0.619140625 loss:0.7733041048049927 cnum:317.0\n",
      "Train acc:  0.61328125 loss:0.7736521363258362 cnum:314.0\n",
      "Train acc: 0.658203125 loss:0.7714751362800598 cnum:337.0\n",
      "Train acc: 0.603515625 loss:0.7742923498153687 cnum:309.0\n",
      "Train acc: 0.626953125 loss:0.7732616662979126 cnum:321.0\n",
      "Train acc:  0.64453125 loss:0.7720322608947754 cnum:330.0\n",
      "Train acc: 0.642578125 loss:0.772165834903717 cnum:329.0\n",
      "Train acc: 0.603515625 loss:0.7745956182479858 cnum:309.0\n",
      "Train acc: 0.646484375 loss:0.7719689011573792 cnum:331.0\n",
      "Train acc:    0.609375 loss:0.7739270329475403 cnum:312.0\n",
      "Train acc: 0.623046875 loss:0.7735046148300171 cnum:319.0\n",
      "Train acc:  0.59765625 loss:0.7743900418281555 cnum:306.0\n",
      "Train acc:    0.640625 loss:0.7722396850585938 cnum:328.0\n",
      "Train acc: 0.615234375 loss:0.7734864354133606 cnum:315.0\n",
      "Train acc:  0.64453125 loss:0.7720115780830383 cnum:330.0\n",
      "Train acc:  0.62109375 loss:0.7732046842575073 cnum:318.0\n",
      "Train acc:  0.62890625 loss:0.7727975249290466 cnum:322.0\n",
      "Train acc: 0.603515625 loss:0.7741619944572449 cnum:309.0\n",
      "Train acc: 0.634765625 loss:0.77255779504776 cnum:325.0\n",
      "Train acc: 0.611328125 loss:0.7737771272659302 cnum:313.0\n",
      "Train acc:   0.6328125 loss:0.7736404538154602 cnum:324.0\n",
      "Train acc:   0.6015625 loss:0.7743948101997375 cnum:308.0\n",
      "Train acc:   0.5859375 loss:0.7750954627990723 cnum:300.0\n",
      "Train acc:    0.609375 loss:0.7738022208213806 cnum:312.0\n",
      "Train acc: 0.630859375 loss:0.7726954221725464 cnum:323.0\n",
      "Train acc: 0.595703125 loss:0.7747242450714111 cnum:305.0\n",
      "Train acc: 0.638671875 loss:0.7723513841629028 cnum:327.0\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train acc: 0.615234375 loss:0.7735544443130493 cnum:315.0\n",
      "Train acc:    0.609375 loss:0.7738277912139893 cnum:312.0\n",
      "Train acc:  0.59765625 loss:0.7746087908744812 cnum:306.0\n",
      "Train acc: 0.595703125 loss:0.774537980556488 cnum:305.0\n",
      "Train acc: 0.626953125 loss:0.7737227082252502 cnum:321.0\n",
      "Train acc:  0.62890625 loss:0.7728084325790405 cnum:322.0\n",
      "Train acc:  0.62109375 loss:0.7733801603317261 cnum:318.0\n",
      "Train acc:   0.6171875 loss:0.7734413146972656 cnum:316.0\n",
      "Train acc:   0.6171875 loss:0.7734207510948181 cnum:316.0\n",
      "Train acc:   0.6328125 loss:0.772804856300354 cnum:324.0\n",
      "Train acc:   0.5859375 loss:0.7750740647315979 cnum:300.0\n",
      "Train acc:  0.58203125 loss:0.7753730416297913 cnum:298.0\n",
      "Train acc:  0.62109375 loss:0.7736362218856812 cnum:318.0\n",
      "Train acc: 0.619140625 loss:0.7732767462730408 cnum:317.0\n",
      "Train acc:  0.62109375 loss:0.7736791372299194 cnum:318.0\n",
      "Train acc:   0.6328125 loss:0.7727041840553284 cnum:324.0\n",
      "Train acc:   0.5859375 loss:0.7789540886878967 cnum:300.0\n",
      "Train acc: 0.576171875 loss:0.7836734652519226 cnum:295.0\n",
      "Train acc: 0.650390625 loss:0.7717964053153992 cnum:333.0\n",
      "Train acc:   0.6953125 loss:0.7695600986480713 cnum:356.0\n",
      "Train acc:  0.62890625 loss:0.7728170156478882 cnum:322.0\n",
      "Train acc:  0.62109375 loss:0.7732188701629639 cnum:318.0\n",
      "Train acc:    0.578125 loss:0.7753990888595581 cnum:296.0\n",
      "Train acc: 0.658203125 loss:0.7713543772697449 cnum:337.0\n",
      "Train acc:   0.6015625 loss:0.7743914127349854 cnum:308.0\n",
      "Train acc: 0.623046875 loss:0.7730830907821655 cnum:319.0\n",
      "Train acc:       0.625 loss:0.7749660611152649 cnum:320.0\n",
      "Train acc:    0.578125 loss:0.7753785848617554 cnum:296.0\n",
      "Train acc:  0.58203125 loss:0.7755066752433777 cnum:298.0\n",
      "Train acc: 0.623046875 loss:0.7735512852668762 cnum:319.0\n",
      "Train acc: 0.599609375 loss:0.7894538044929504 cnum:307.0\n",
      "Train acc:  0.59765625 loss:0.7752654552459717 cnum:306.0\n",
      "Train acc:  0.61328125 loss:0.7737256288528442 cnum:314.0\n",
      "Train acc:     0.65625 loss:0.7714474201202393 cnum:336.0\n",
      "Train acc: 0.619140625 loss:0.7737629413604736 cnum:317.0\n",
      "Train acc: 0.646484375 loss:0.7719055414199829 cnum:331.0\n",
      "Train acc: 0.615234375 loss:0.7735199332237244 cnum:315.0\n",
      "Train acc: 0.583984375 loss:0.77503502368927 cnum:299.0\n",
      "Train acc: 0.626953125 loss:0.772983968257904 cnum:321.0\n",
      "Train acc:    0.609375 loss:0.7737804651260376 cnum:312.0\n",
      "Train acc:  0.63671875 loss:0.7724345326423645 cnum:326.0\n",
      "Train acc: 0.611328125 loss:0.7737144827842712 cnum:313.0\n",
      "Train acc:  0.58984375 loss:0.7749642133712769 cnum:302.0\n",
      "Train acc:   0.6171875 loss:0.7752730846405029 cnum:316.0\n",
      "Train acc: 0.607421875 loss:0.7743124961853027 cnum:311.0\n",
      "Train acc: 0.619140625 loss:0.773354709148407 cnum:317.0\n",
      "Train acc:  0.59765625 loss:0.7743823528289795 cnum:306.0\n",
      "Train acc:  0.59765625 loss:0.7758454084396362 cnum:306.0\n",
      "Train acc: 0.607421875 loss:0.7739328742027283 cnum:311.0\n",
      "Train acc: 0.587890625 loss:0.7749184370040894 cnum:301.0\n",
      "Train acc: 0.638671875 loss:0.7726133465766907 cnum:327.0\n",
      "Train acc:       0.625 loss:0.7730152010917664 cnum:320.0\n",
      "Train acc:  0.65234375 loss:0.7721312642097473 cnum:334.0\n",
      "Train acc:   0.6328125 loss:0.772628664970398 cnum:324.0\n",
      "Train acc: 0.591796875 loss:0.7756053805351257 cnum:303.0\n",
      "Train acc:  0.59765625 loss:0.7743833661079407 cnum:306.0\n",
      "Train acc: 0.626953125 loss:0.7729062438011169 cnum:321.0\n",
      "Train acc:   0.5859375 loss:0.776328980922699 cnum:300.0\n",
      "Train acc:  0.62109375 loss:0.7733977437019348 cnum:318.0\n",
      "Train acc: 0.603515625 loss:0.77936190366745 cnum:309.0\n",
      "Train acc: 0.642578125 loss:0.7721506357192993 cnum:329.0\n",
      "Train acc: 0.619140625 loss:0.7733206748962402 cnum:317.0\n",
      "Train acc:   0.6171875 loss:0.7734135389328003 cnum:316.0\n",
      "Train acc: 0.642578125 loss:0.7721160054206848 cnum:329.0\n",
      "Train acc: 0.638671875 loss:0.7723961472511292 cnum:327.0\n",
      "Train acc:  0.61328125 loss:0.7738264799118042 cnum:314.0\n",
      "Train acc:   0.6328125 loss:0.7726256251335144 cnum:324.0\n",
      "Train acc:   0.6484375 loss:0.7720330953598022 cnum:332.0\n",
      "Train acc: 0.634765625 loss:0.7726340889930725 cnum:325.0\n",
      "Train acc:  0.59765625 loss:0.77440345287323 cnum:306.0\n",
      "Train acc:  0.58203125 loss:0.7752966284751892 cnum:298.0\n",
      "Train acc:  0.58984375 loss:0.7753825187683105 cnum:302.0\n",
      "Train acc:  0.56640625 loss:0.7760103940963745 cnum:290.0\n",
      "Train acc:   0.6484375 loss:0.7722228765487671 cnum:332.0\n",
      "Train acc:  0.64453125 loss:0.7722823619842529 cnum:330.0\n",
      "Train acc:   0.5859375 loss:0.7904471158981323 cnum:300.0\n",
      "Train acc: 0.623046875 loss:0.7734416127204895 cnum:319.0\n",
      "Train acc:  0.62109375 loss:0.7738366723060608 cnum:318.0\n",
      "Train acc:     0.59375 loss:0.7748011350631714 cnum:304.0\n",
      "Train acc:  0.64453125 loss:0.7727047801017761 cnum:330.0\n",
      "Train acc: 0.650390625 loss:0.7722724676132202 cnum:333.0\n",
      "Train acc:  0.59765625 loss:0.7746744751930237 cnum:306.0\n",
      "Train acc: 0.634765625 loss:0.7725397348403931 cnum:325.0\n",
      "Train acc:  0.64453125 loss:0.7719980478286743 cnum:330.0\n"
     ]
    }
   ],
   "source": [
    "tf.reset_default_graph() \n",
    "with tf.Session() as sex:\n",
    "    writer = tf.summary.FileWriter('./tfb_file/rnn/1')\n",
    "    rnn_model = model_text_rnn(sentence_length=sentence_length,\n",
    "                               output_classes=2,\n",
    "                               vocab_size=vocab_size,\n",
    "                               embedding_size=150,\n",
    "                               num_units=256)\n",
    "    \n",
    "    optimizer = tf.train.AdagradOptimizer(0.00001)\n",
    "    train_target = optimizer.minimize(rnn_model.loss)\n",
    "    \n",
    "    \n",
    "    sex.run(tf.global_variables_initializer())\n",
    "    def train_step(batch_t_x,batch_t_y):\n",
    "        f_dict={\n",
    "            rnn_model.x:batch_t_x,\n",
    "            rnn_model.y:batch_t_y,\n",
    "            rnn_model.dropout_keep_prob:0.5\n",
    "        }\n",
    "        _,acc,loss,cnum = sex.run([train_target,rnn_model.accuracy,rnn_model.loss,rnn_model.num_correct],feed_dict=f_dict)\n",
    "        return acc,loss,cnum\n",
    "    \n",
    "    def test_step(batch_d_x,batch_d_y):\n",
    "        f_dict={\n",
    "            rnn_model.x:batch_d_x,\n",
    "            rnn_model.y:batch_d_y,\n",
    "            rnn_model.dropout_keep_prob:0.5\n",
    "        }\n",
    "        acc,loss,c_num = sex.run([rnn_model.accuracy,rnn_model.loss,rnn_model.num_correct],feed_dict=f_dict)\n",
    "        return acc,loss,c_num\n",
    "    \n",
    "    max_acc = - np.inf\n",
    "    early_stop = 0\n",
    "    \n",
    "    writer.add_graph(sex.graph)\n",
    "    for i in range(1000):\n",
    "        # 一次训练过程\n",
    "        print('Step:{0}'.format(i))\n",
    "        #--------------------------------Train-----------------------------------------\n",
    "        train_batch_size = 512\n",
    "        for p in range(0,int(len(train_y)/train_batch_size)):\n",
    "            btrx,btry = bcher_train.get_batch(train_x,train_y,train_batch_size)\n",
    "            tracc,trloss,cnum = train_step(btrx,btry)\n",
    "            print('Train acc:{0:12} loss:{1:12} cnum:{2}'.format(tracc,trloss,cnum))\n",
    "        #--------------------------------Dev test---------------------------------------\n",
    "        dev_batch_size = 256\n",
    "        total_dev_acc = 0\n",
    "        for step_num,q in enumerate(range(0,int(len(dev_y)/dev_batch_size))):\n",
    "            bdx,bdy = bcher_dev.get_batch(dev_x,dev_y,dev_batch_size)\n",
    "            dacc,dloss,dcnum = test_step(bdx,bdy)\n",
    "            print('Dev acc:{0:12} loss{1:12}'.format(dacc,dloss))\n",
    "            total_dev_acc += dacc\n",
    "        flag_acc = total_dev_acc/(step_num+1)\n",
    "        print('Total Dev acc:{0:12}'.format(flag_acc))\n",
    "        \n",
    "        #----------------------------------Early stop------------------------------------\n",
    "        if max_acc > flag_acc:\n",
    "            early_stop += 1\n",
    "        else:\n",
    "            max_acc = flag_acc\n",
    "            early_stop = 0\n",
    "        if early_stop > 5:\n",
    "            break\n",
    "    print('Best acc:{0:12}'.format(flag_acc))\n",
    "    \n",
    "    #---------------------------------------Test----------------------------------------\n",
    "    test_batch_size = 128\n",
    "    total_test_acc = 0\n",
    "    for step_num,r in enumerate(range(0,int(len(test_y)/test_batch_size))):\n",
    "        btex,btey = bcher_test.get_batch(test_x,test_y,test_batch_size)\n",
    "        teacc,teloss,tecnum = test_step(btex,btey)\n",
    "        print('Test acc:{0:12} loss:{1:12}'.format(teacc,teloss))\n",
    "        total_test_acc += teacc\n",
    "    max_acc = total_test_acc/(step_num+1)\n",
    "    print('Test final acc:{0:12}'.format(max_acc))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
