from __future__ import print_function
import numpy as np
np.random.seed(1337)  # for reproducibility

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
from keras.models import model_from_json
import os,pickle
from PIL import Image

train_data="E:\\data\\验证码\\12306验证码\\traintext\\"

batch_size = 30
nb_classes = 10
nb_epoch = 12
nb_filters = 10
img_row,img_col = 29,60
pool_size=(2,2)
kernel_size=(3,3)
nb_classes=80

outfile = open("type.pk","rb")
table = pickle.load(outfile)
outfile.close()


def load_data():
    imgs = os.listdir(train_data)
    num = len(imgs)
    data = np.empty((num,1,29,60),dtype="float32")
    label = np.empty((num,),dtype="int")

    for i in range(num):
        img = Image.open(train_data+imgs[i])
        img = img.convert("L")
        arr = np.asarray(img,dtype="float32")
        data[i,:,:,:] = arr
        label[i] = table[imgs[i].split('_')[0]]

    #数据预处理
    data=data.reshape(data.shape[0],60,29,1)
    label = np_utils.to_categorical(label, 80)
    return data,label

def create_model():  
    model = Sequential()  
    model.add(Convolution2D(64, 3, 3, border_mode='same',input_shape=(60,29,1)))
    model.add(Activation('relu'))
    model.add(Convolution2D(64, 3, 3, border_mode='same'))
    model.add(Activation('relu'))  
    model.add(MaxPooling2D(pool_size=(2, 2)))  
    model.add(Dropout(0.5))
    model.add(Convolution2D(128, 3, 3, border_mode='same'))
    model.add(Activation('relu'))  
    model.add(Convolution2D(128, 3, 3, border_mode='same'))
    model.add(Activation('relu'))  
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(Convolution2D(256,3, 3, border_mode='valid'))
    model.add(Activation('relu'))  
    model.add(MaxPooling2D(pool_size=(2, 2)))  
    model.add(Flatten())  # this converts our 3D feature maps to 1D feature vectors  
    model.add(Dense(256))  
    model.add(Activation('relu'))  
    model.add(Dropout(0.5))   
    model.add(Dense(80))  
    model.add(Activation('softmax'))  
    return model  

X_train, Y_train=load_data()
model = create_model()

model.compile(loss='categorical_crossentropy',optimizer='adadelta',metrics=['accuracy'])

model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=100,verbose=1)

def datapredict(img,model):
    data = np.empty((1,1,29,60),dtype="float32")
    img = img.convert("L")
    arr = np.asarray(img,dtype="float32")
    data[0] = arr
    data=data.reshape(data.shape[0],60,29,1)
    result = model.predict(data)
    i = np.argwhere(result[0] >= np.max(result[0]))
    res = list(table.keys())[list(table.values()).index(i)]
    return res

def datapredictstr(file,model):
    img = Image.open(file)
    data = np.empty((1,1,29,60),dtype="float32")
    img = img.convert("L")
    arr = np.asarray(img,dtype="float32")
    data[0] = arr
    data=data.reshape(data.shape[0],60,29,1)
    result = model.predict(data)
    i = np.argwhere(result[0] >= np.max(result[0]))
    res = list(table.keys())[list(table.values()).index(i)]
    return res

def test(path,model):
    lis = os.listdir(path)
    for file in lis:
        os.rename(path + "/" + file, path + "/" + datapredictstr(path + "/" + file, model) + "_" + file)

def savemodel(model,file):
    jsonstr = model.to_json()
    open(file + ".json",'w').write(jsonstr)
    model.save_weights(file + ".h5")

def loadmodel(file):
    model = model_from_json(open(file+".json").read())
    model.load_weights(file+".h5")
    return model

#savemodel(model,"model")
#model = loadmodel("model")
#test("./text",model)
