{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# DENSENET + CTC"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nclass: 5990\n",
      "柞\n",
      "____________________________________________________________________________________________________\n",
      "Layer (type)                     Output Shape          Param #     Connected to                     \n",
      "====================================================================================================\n",
      "the_input (InputLayer)           (None, 32, None, 1)   0                                            \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_55 (Conv2D)               (None, 16, None, 64)  1600        the_input[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_55 (BatchNor (None, 16, None, 64)  256         conv2d_55[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "activation_55 (Activation)       (None, 16, None, 64)  0           batch_normalization_55[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_56 (Conv2D)               (None, 16, None, 8)   4616        activation_55[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_49 (Concatenate)     (None, 16, None, 72)  0           conv2d_55[0][0]                  \n",
      "                                                                   conv2d_56[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_56 (BatchNor (None, 16, None, 72)  288         concatenate_49[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_56 (Activation)       (None, 16, None, 72)  0           batch_normalization_56[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_57 (Conv2D)               (None, 16, None, 8)   5192        activation_56[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_50 (Concatenate)     (None, 16, None, 80)  0           concatenate_49[0][0]             \n",
      "                                                                   conv2d_57[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_57 (BatchNor (None, 16, None, 80)  320         concatenate_50[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_57 (Activation)       (None, 16, None, 80)  0           batch_normalization_57[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_58 (Conv2D)               (None, 16, None, 8)   5768        activation_57[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_51 (Concatenate)     (None, 16, None, 88)  0           concatenate_50[0][0]             \n",
      "                                                                   conv2d_58[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_58 (BatchNor (None, 16, None, 88)  352         concatenate_51[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_58 (Activation)       (None, 16, None, 88)  0           batch_normalization_58[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_59 (Conv2D)               (None, 16, None, 8)   6344        activation_58[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_52 (Concatenate)     (None, 16, None, 96)  0           concatenate_51[0][0]             \n",
      "                                                                   conv2d_59[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_59 (BatchNor (None, 16, None, 96)  384         concatenate_52[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_59 (Activation)       (None, 16, None, 96)  0           batch_normalization_59[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_60 (Conv2D)               (None, 16, None, 8)   6920        activation_59[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_53 (Concatenate)     (None, 16, None, 104) 0           concatenate_52[0][0]             \n",
      "                                                                   conv2d_60[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_60 (BatchNor (None, 16, None, 104) 416         concatenate_53[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_60 (Activation)       (None, 16, None, 104) 0           batch_normalization_60[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_61 (Conv2D)               (None, 16, None, 8)   7496        activation_60[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_54 (Concatenate)     (None, 16, None, 112) 0           concatenate_53[0][0]             \n",
      "                                                                   conv2d_61[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_61 (BatchNor (None, 16, None, 112) 448         concatenate_54[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_61 (Activation)       (None, 16, None, 112) 0           batch_normalization_61[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_62 (Conv2D)               (None, 16, None, 8)   8072        activation_61[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_55 (Concatenate)     (None, 16, None, 120) 0           concatenate_54[0][0]             \n",
      "                                                                   conv2d_62[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_62 (BatchNor (None, 16, None, 120) 480         concatenate_55[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_62 (Activation)       (None, 16, None, 120) 0           batch_normalization_62[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_63 (Conv2D)               (None, 16, None, 8)   8648        activation_62[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_56 (Concatenate)     (None, 16, None, 128) 0           concatenate_55[0][0]             \n",
      "                                                                   conv2d_63[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_63 (BatchNor (None, 16, None, 128) 512         concatenate_56[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_63 (Activation)       (None, 16, None, 128) 0           batch_normalization_63[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_64 (Conv2D)               (None, 16, None, 128) 16384       activation_63[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "dropout_5 (Dropout)              (None, 16, None, 128) 0           conv2d_64[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "average_pooling2d_5 (AveragePool (None, 8, None, 128)  0           dropout_5[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_64 (BatchNor (None, 8, None, 128)  512         average_pooling2d_5[0][0]        \n",
      "____________________________________________________________________________________________________\n",
      "activation_64 (Activation)       (None, 8, None, 128)  0           batch_normalization_64[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_65 (Conv2D)               (None, 8, None, 8)    9224        activation_64[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_57 (Concatenate)     (None, 8, None, 136)  0           average_pooling2d_5[0][0]        \n",
      "                                                                   conv2d_65[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_65 (BatchNor (None, 8, None, 136)  544         concatenate_57[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_65 (Activation)       (None, 8, None, 136)  0           batch_normalization_65[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_66 (Conv2D)               (None, 8, None, 8)    9800        activation_65[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_58 (Concatenate)     (None, 8, None, 144)  0           concatenate_57[0][0]             \n",
      "                                                                   conv2d_66[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_66 (BatchNor (None, 8, None, 144)  576         concatenate_58[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_66 (Activation)       (None, 8, None, 144)  0           batch_normalization_66[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_67 (Conv2D)               (None, 8, None, 8)    10376       activation_66[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_59 (Concatenate)     (None, 8, None, 152)  0           concatenate_58[0][0]             \n",
      "                                                                   conv2d_67[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_67 (BatchNor (None, 8, None, 152)  608         concatenate_59[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_67 (Activation)       (None, 8, None, 152)  0           batch_normalization_67[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_68 (Conv2D)               (None, 8, None, 8)    10952       activation_67[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_60 (Concatenate)     (None, 8, None, 160)  0           concatenate_59[0][0]             \n",
      "                                                                   conv2d_68[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_68 (BatchNor (None, 8, None, 160)  640         concatenate_60[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_68 (Activation)       (None, 8, None, 160)  0           batch_normalization_68[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_69 (Conv2D)               (None, 8, None, 8)    11528       activation_68[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_61 (Concatenate)     (None, 8, None, 168)  0           concatenate_60[0][0]             \n",
      "                                                                   conv2d_69[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_69 (BatchNor (None, 8, None, 168)  672         concatenate_61[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_69 (Activation)       (None, 8, None, 168)  0           batch_normalization_69[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_70 (Conv2D)               (None, 8, None, 8)    12104       activation_69[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_62 (Concatenate)     (None, 8, None, 176)  0           concatenate_61[0][0]             \n",
      "                                                                   conv2d_70[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_70 (BatchNor (None, 8, None, 176)  704         concatenate_62[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_70 (Activation)       (None, 8, None, 176)  0           batch_normalization_70[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_71 (Conv2D)               (None, 8, None, 8)    12680       activation_70[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_63 (Concatenate)     (None, 8, None, 184)  0           concatenate_62[0][0]             \n",
      "                                                                   conv2d_71[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_71 (BatchNor (None, 8, None, 184)  736         concatenate_63[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_71 (Activation)       (None, 8, None, 184)  0           batch_normalization_71[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_72 (Conv2D)               (None, 8, None, 8)    13256       activation_71[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_64 (Concatenate)     (None, 8, None, 192)  0           concatenate_63[0][0]             \n",
      "                                                                   conv2d_72[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_72 (BatchNor (None, 8, None, 192)  768         concatenate_64[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_72 (Activation)       (None, 8, None, 192)  0           batch_normalization_72[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_73 (Conv2D)               (None, 8, None, 128)  24576       activation_72[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "dropout_6 (Dropout)              (None, 8, None, 128)  0           conv2d_73[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "average_pooling2d_6 (AveragePool (None, 4, None, 128)  0           dropout_6[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_73 (BatchNor (None, 4, None, 128)  512         average_pooling2d_6[0][0]        \n",
      "____________________________________________________________________________________________________\n",
      "activation_73 (Activation)       (None, 4, None, 128)  0           batch_normalization_73[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_74 (Conv2D)               (None, 4, None, 8)    9224        activation_73[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_65 (Concatenate)     (None, 4, None, 136)  0           average_pooling2d_6[0][0]        \n",
      "                                                                   conv2d_74[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_74 (BatchNor (None, 4, None, 136)  544         concatenate_65[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_74 (Activation)       (None, 4, None, 136)  0           batch_normalization_74[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_75 (Conv2D)               (None, 4, None, 8)    9800        activation_74[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_66 (Concatenate)     (None, 4, None, 144)  0           concatenate_65[0][0]             \n",
      "                                                                   conv2d_75[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_75 (BatchNor (None, 4, None, 144)  576         concatenate_66[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_75 (Activation)       (None, 4, None, 144)  0           batch_normalization_75[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_76 (Conv2D)               (None, 4, None, 8)    10376       activation_75[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_67 (Concatenate)     (None, 4, None, 152)  0           concatenate_66[0][0]             \n",
      "                                                                   conv2d_76[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_76 (BatchNor (None, 4, None, 152)  608         concatenate_67[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_76 (Activation)       (None, 4, None, 152)  0           batch_normalization_76[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_77 (Conv2D)               (None, 4, None, 8)    10952       activation_76[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_68 (Concatenate)     (None, 4, None, 160)  0           concatenate_67[0][0]             \n",
      "                                                                   conv2d_77[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_77 (BatchNor (None, 4, None, 160)  640         concatenate_68[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_77 (Activation)       (None, 4, None, 160)  0           batch_normalization_77[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_78 (Conv2D)               (None, 4, None, 8)    11528       activation_77[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_69 (Concatenate)     (None, 4, None, 168)  0           concatenate_68[0][0]             \n",
      "                                                                   conv2d_78[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_78 (BatchNor (None, 4, None, 168)  672         concatenate_69[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_78 (Activation)       (None, 4, None, 168)  0           batch_normalization_78[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_79 (Conv2D)               (None, 4, None, 8)    12104       activation_78[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_70 (Concatenate)     (None, 4, None, 176)  0           concatenate_69[0][0]             \n",
      "                                                                   conv2d_79[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_79 (BatchNor (None, 4, None, 176)  704         concatenate_70[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_79 (Activation)       (None, 4, None, 176)  0           batch_normalization_79[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_80 (Conv2D)               (None, 4, None, 8)    12680       activation_79[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_71 (Concatenate)     (None, 4, None, 184)  0           concatenate_70[0][0]             \n",
      "                                                                   conv2d_80[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_80 (BatchNor (None, 4, None, 184)  736         concatenate_71[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_80 (Activation)       (None, 4, None, 184)  0           batch_normalization_80[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "conv2d_81 (Conv2D)               (None, 4, None, 8)    13256       activation_80[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "concatenate_72 (Concatenate)     (None, 4, None, 192)  0           concatenate_71[0][0]             \n",
      "                                                                   conv2d_81[0][0]                  \n",
      "____________________________________________________________________________________________________\n",
      "batch_normalization_81 (BatchNor (None, 4, None, 192)  768         concatenate_72[0][0]             \n",
      "____________________________________________________________________________________________________\n",
      "activation_81 (Activation)       (None, 4, None, 192)  0           batch_normalization_81[0][0]     \n",
      "____________________________________________________________________________________________________\n",
      "permute (Permute)                (None, None, 4, 192)  0           activation_81[0][0]              \n",
      "____________________________________________________________________________________________________\n",
      "flatten (TimeDistributed)        (None, None, 768)     0           permute[0][0]                    \n",
      "____________________________________________________________________________________________________\n",
      "out (Dense)                      (None, None, 5990)    4606310     flatten[0][0]                    \n",
      "====================================================================================================\n",
      "Total params: 4,896,742\n",
      "Trainable params: 4,889,254\n",
      "Non-trainable params: 7,488\n",
      "____________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "#CRNN\n",
    "#Edit:2017-11-21 \n",
    "#@sima\n",
    "#%%\n",
    "%matplotlib inline\n",
    "%load_ext autoreload\n",
    "%autoreload 2\n",
    "\n",
    "\n",
    "from keras.layers.convolutional import Conv2D,MaxPooling2D,ZeroPadding2D\n",
    "from keras.layers.normalization import BatchNormalization\n",
    "from keras.layers.core import Reshape,Masking,Lambda,Permute\n",
    "from keras.layers import Input,Dense,Flatten\n",
    "from keras.preprocessing.sequence import pad_sequences\n",
    "from keras.layers.recurrent import GRU,LSTM\n",
    "from keras.layers.wrappers import Bidirectional\n",
    "from keras.models import Model\n",
    "from keras import backend as K\n",
    "from keras.preprocessing import image\n",
    "from keras.optimizers import Adam,SGD,Adadelta\n",
    "from keras import losses\n",
    "from keras.layers.wrappers import TimeDistributed\n",
    "from keras.callbacks import EarlyStopping,ModelCheckpoint,TensorBoard\n",
    "from keras.utils import plot_model\n",
    "from matplotlib import pyplot as plt\n",
    "import tensorflow as tf  \n",
    "\n",
    "\n",
    "import numpy as np \n",
    "import os\n",
    "from PIL import Image \n",
    "import json\n",
    "import threading\n",
    "\n",
    "\n",
    "\n",
    "from imp import reload \n",
    "import densenet\n",
    "reload(densenet)\n",
    "\n",
    "def get_session(gpu_fraction=0.6):  \n",
    "    '''''Assume that you have 6GB of GPU memory and want to allocate ~2GB'''  \n",
    "  \n",
    "    num_threads = os.environ.get('OMP_NUM_THREADS')  \n",
    "    gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_fraction)  \n",
    "  \n",
    "    if num_threads:  \n",
    "        return tf.Session(config=tf.ConfigProto(  \n",
    "            gpu_options=gpu_options, intra_op_parallelism_threads=num_threads))  \n",
    "    else:  \n",
    "        return tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))  \n",
    "  \n",
    "K.set_session(get_session()) \n",
    "\n",
    "\n",
    "\n",
    "def ctc_lambda_func(args):\n",
    "    y_pred,labels,input_length,label_length = args\n",
    "    return K.ctc_batch_cost(labels, y_pred, input_length, label_length)\n",
    "\n",
    "\n",
    "char=''\n",
    "with open('D:\\\\char_std_5990.txt',encoding='utf-8') as f:\n",
    "      for ch in f.readlines():\n",
    "            ch = ch.strip('\\r\\n')\n",
    "            char=char+ch\n",
    "            \n",
    "#caffe_ocr中把0作为blank，但是tf 的CTC  the last class is reserved to the blank label.\n",
    "#https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/util/ctc/ctc_loss_calculator.h\n",
    "char =char[1:]+'卍'\n",
    "print('nclass:',len(char))\n",
    "\n",
    "id_to_char = {i:j for i,j in enumerate(char)}\n",
    "print(id_to_char[5988])\n",
    "\n",
    "maxlabellength = 20\n",
    "img_h = 32\n",
    "img_w = 280\n",
    "nclass = len(char)\n",
    "rnnunit=256\n",
    "batch_size =64\n",
    "\n",
    "    \n",
    "class random_uniform_num():\n",
    "    \"\"\"\n",
    "    均匀随机，确保每轮每个只出现一次\n",
    "    \"\"\"\n",
    "    def __init__(self,total):\n",
    "        self.total = total\n",
    "        self.range = [i for i in range(total)]\n",
    "        np.random.shuffle(self.range)\n",
    "        self.index = 0\n",
    "    def get(self,batchsize):\n",
    "        r_n=[]\n",
    "        if(self.index+batchsize>self.total):\n",
    "            r_n_1 = self.range[self.index:self.total]\n",
    "            np.random.shuffle(self.range)\n",
    "            self.index = (self.index+batchsize)-self.total\n",
    "            r_n_2 = self.range[0:self.index]\n",
    "            r_n.extend(r_n_1)\n",
    "            r_n.extend(r_n_2)\n",
    "            \n",
    "        else:\n",
    "            r_n = self.range[self.index:self.index+batchsize]\n",
    "            self.index = self.index+batchsize\n",
    "        return r_n  \n",
    "    \n",
    "def readtrainfile(filename):\n",
    "    res=[]\n",
    "    with open(filename,'r') as f:\n",
    "        lines = f.readlines() \n",
    "        for i in lines:\n",
    "            res.append(i.strip('\\r\\n'))\n",
    "    dic={}\n",
    "    for i in res:\n",
    "        p = i.split(' ')\n",
    "        dic[p[0]] = p[1:]\n",
    "    return dic\n",
    "\n",
    "def gen3(trainfile,batchsize=64,maxlabellength=10,imagesize=(32,280)):\n",
    "    image_label = readtrainfile(trainfile)\n",
    "    _imagefile = [i for i,j in image_label.items()]\n",
    "    x = np.zeros((batchsize, imagesize[0], imagesize[1], 1), dtype=np.float)\n",
    "    labels = np.ones([batchsize,maxlabellength])*10000\n",
    "    input_length = np.zeros([batchsize,1])\n",
    "    label_length = np.zeros([batchsize,1])\n",
    "    \n",
    "    r_n = random_uniform_num(len(_imagefile))\n",
    "    print('图片总量',len(_imagefile))\n",
    "    _imagefile = np.array(_imagefile)\n",
    "    while 1:\n",
    "        shufimagefile = _imagefile[r_n.get(batchsize)]\n",
    "        for i,j in enumerate(shufimagefile):\n",
    "            img1 = Image.open(j).convert('L')\n",
    "            img = np.array(img1,'f')/255.0-0.5\n",
    "\n",
    "            x[i] = np.expand_dims(img,axis=2)\n",
    "            #print('imag:shape',img.shape)\n",
    "            str = image_label[j]\n",
    "            label_length[i] = len(str) \n",
    "            \n",
    "            if(len(str)<=0):\n",
    "                print(\"len<0\",j)\n",
    "            input_length[i] = imagesize[1]//8\n",
    "            #caffe_ocr中把0作为blank，但是tf 的CTC  the last class is reserved to the blank label.\n",
    "            #https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/util/ctc/ctc_loss_calculator.h\n",
    "            #\n",
    "            labels[i,:len(str)] =[int(i)-1 for i in str]\n",
    "\n",
    "        \n",
    "        inputs = {'the_input': x,\n",
    "                'the_labels': labels,\n",
    "                'input_length': input_length,\n",
    "                'label_length': label_length,\n",
    "                }\n",
    "        outputs = {'ctc': np.zeros([batchsize])} \n",
    "        yield (inputs,outputs)\n",
    "\n",
    "\n",
    "\n",
    "        \n",
    "\n",
    "input = Input(shape=(img_h,None,1),name='the_input')\n",
    "\n",
    "y_pred= densenet.dense_cnn(input,nclass)\n",
    "\n",
    "basemodel = Model(inputs=input,outputs=y_pred)\n",
    "basemodel.summary()\n",
    "\n",
    "\n",
    "labels = Input(name='the_labels',shape=[maxlabellength],dtype='float32')\n",
    "input_length = Input(name='input_length', shape=[1], dtype='int64')\n",
    "label_length = Input(name='label_length', shape=[1], dtype='int64')\n",
    "\n",
    "loss_out = Lambda(ctc_lambda_func, output_shape=(1,), name='ctc')([y_pred, labels, input_length, label_length]) \n",
    "\n",
    "model = Model(inputs=[input, labels, input_length, label_length], outputs=loss_out)\n",
    "\n",
    "adam = Adam()\n",
    "\n",
    "model.compile(loss={'ctc': lambda y_true, y_pred: y_pred}, optimizer=adam,metrics=['accuracy'])\n",
    "\n",
    "\n",
    "checkpoint = ModelCheckpoint(r'E:\\deeplearn\\OCR\\Sample\\model\\weights-densent-{epoch:02d}.hdf5',\n",
    "                           save_weights_only=True)\n",
    "earlystop = EarlyStopping(patience=10)\n",
    "tensorboard = TensorBoard(r'E:\\deeplearn\\OCR\\Sample\\model\\tflog-densent',write_graph=True)\n",
    "                           "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "-----------beginfit--\n"
     ]
    }
   ],
   "source": [
    "\n",
    "print('-----------beginfit--')\n",
    "cc1=gen3(r'D:\\train1.txt',batchsize=batch_size,maxlabellength=maxlabellength,imagesize=(img_h,img_w))\n",
    "cc2=gen3(r'D:\\test1.txt',batchsize=batch_size,maxlabellength=maxlabellength,imagesize=(img_h,img_w))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/100\n",
      "图片总量 3279601\n",
      "51242/51243 [============================>.] - ETA: 0s - loss: 0.8914 - acc: 0.8893图片总量 364400\n",
      "51243/51243 [==============================] - 17585s - loss: 0.8913 - acc: 0.8893 - val_loss: 1.3684 - val_acc: 0.9543\n",
      "Epoch 2/100\n",
      "24431/51243 [=============>................] - ETA: 8426s - loss: 0.2350 - acc: 0.9623"
     ]
    }
   ],
   "source": [
    "res = model.fit_generator(cc1,\n",
    "                    steps_per_epoch =3279601// batch_size,\n",
    "                    epochs = 100,\n",
    "                    validation_data =cc2 ,\n",
    "                    validation_steps = 364400// batch_size,\n",
    "                    callbacks =[earlystop,checkpoint,tensorboard],\n",
    "                    verbose=1\n",
    "                    )"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "loss = 0.2353 acc = 0.9623"
   ]
  }
 ],
 "metadata": {
  "anaconda-cloud": {},
  "kernelspec": {
   "display_name": "Python [conda root]",
   "language": "python",
   "name": "conda-root-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
