#coding=utf-8
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten, Conv2D
from keras.layers import MaxPooling2D
from keras.optimizers import SGD
from keras.utils import np_utils
#！/user/bin/python
# 加载数据集到内存
import numpy as np

from load_dataset import load_dataset

train_images, train_labels, labels_name = load_dataset("train_dir")

train_images = np.asarray(train_images)
train_labels = np.asarray(train_labels)

# 我们的模型使用categorical_crossentropy作为损失函数，因此需要根据类别数量nb_classes将
# 类别标签进行one-hot编码使其向量化，在这里我们的类别只有两种，经过转化后标签数据变为二维
s = set(train_labels)
nb_classes = len(s)
train_labels = np_utils.to_categorical(train_labels, nb_classes)

# 像素数据浮点化以便归一化
train_images = train_images.astype('float32')

# 将其归一化,图像的各像素值归一化到0~1区间
train_images /= 255.0

# 构建一个空的网络模型，它是一个线性堆叠模型，各神经网络层会被顺序添加，专业名称为序贯模型或线性堆叠模型
model = Sequential()

# 以下代码将顺序添加CNN网络需要的各层，一个add就是一个网络层
model.add(Conv2D(32, (3, 3), padding='same',
                             input_shape=(64, 64, 3)))  # 1 2维卷积层
model.add(Activation('relu'))  # 2 激活函数层

model.add(Conv2D(32, (3, 3)))  # 3 2维卷积层
model.add(Activation('relu'))  # 4 激活函数层

model.add(MaxPooling2D(pool_size=(2, 2)))  # 5 池化层
model.add(Dropout(0.25))  # 6 Dropout层

model.add(Conv2D(64, (3, 3), padding='same'))  # 7  2维卷积层
model.add(Activation('relu'))  # 8  激活函数层

model.add(Conv2D(64, (3, 3)))  # 9  2维卷积层
model.add(Activation('relu'))  # 10 激活函数层

model.add(MaxPooling2D(pool_size=(2, 2)))  # 11 池化层
model.add(Dropout(0.25))  # 12 Dropout层

model.add(Flatten())  # 13 Flatten层
model.add(Dense(512))  # 14 Dense层,又被称作全连接层
model.add(Activation('relu'))  # 15 激活函数层
model.add(Dropout(0.25))  # 16 Dropout层
model.add(Dense(nb_classes))  # 17 Dense层
model.add(Activation('softmax'))  # 18 分类层，输出最终结果

# 输出模型概况
model.summary()


# 训练模型
def train(dataset, labelset, nb_epoch=300):
    sgd = SGD(lr=0.002)  # 采用SGD+momentum的优化器进行训练，首先生成一个优化器对象
    model.compile(loss='categorical_crossentropy',
                       optimizer=sgd,
                       metrics=['accuracy'])  # 完成实际的模型配置工作

    model.fit(dataset, labelset, epochs=nb_epoch)


MODEL_PATH = './model/model.h5'


if __name__ == '__main__':
    train(train_images, train_labels)
    model.save(MODEL_PATH)

