import os
import numpy as np
np.random.seed(1337)
from keras.datasets import mnist
from keras.utils import np_utils
from keras.models import Sequential
from keras.layers import Dropout, Dense, Activation, Convolution2D, MaxPooling2D, Flatten
from keras.optimizers import Adam
from sklearn.model_selection import train_test_split
from PIL import Image
from keras.callbacks import ModelCheckpoint
from matplotlib import pyplot as plt

def read_image(img_name):
    im = Image.open(img_name).convert('L')
    data = np.array(im)
    return data
images = []
imagespath=r'F:\underwater robots\神经网络\train\mypic'
labelpath=r'F:\underwater robots\神经网络\train\newtest.txt'

for fn in os.listdir(imagespath):
    if fn.endswith('.jpg'):
        fd = os.path.join(imagespath,fn)
        images.append(read_image(fd))
print('load success!')
X = np.array(images)
print ("X.shape:",X.shape)

y = np.loadtxt(labelpath)
print ("y.shape:",y.shape)

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state= 30)

num_pixels = X_train.shape[1] * X_train.shape[2]
X_train = X_train.reshape(X_train.shape[0], num_pixels).astype('float32')
X_test = X_test.reshape(X_test.shape[0], num_pixels).astype('float32')
# 格式化数据到0-1之前
X_train = X_train / 255
X_test = X_test / 255
print ("X_train.shape:",X_train.shape)



# one-hot编码
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)

num_classes = y_test.shape[1]
print("num_classes:",num_classes)

def create_model():
    model = Sequential()
    model.add(Dense(units=num_pixels, input_dim=num_pixels, kernel_initializer='normal', activation='relu'))
    model.add(Dense(units=num_classes, kernel_initializer='normal', activation='softmax'))
    # 编译模型
    model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
    return model
model = create_model()

filepath = 'best.h5'
checkpoint = ModelCheckpoint(filepath=filepath,monitor='val_acc',save_best_only=True,mode='max')   #指针 保存最佳模型
callback = [checkpoint]   #回调函数
history = model.fit(X_train, y_train, epochs=4, batch_size=260,callbacks=callback)  #带回调函数的训练


score = model.evaluate(X_test, y_test)
print('MLP: %.2f%%' % (score[1] * 100))
model.save('total.h5')

print(history.history.keys())       #结果可视化
plt.plot(history.history['accuracy'])
plt.xlabel('epochs')
plt.ylabel('accuracy')
plt.title('model accuracy')
plt.show()

plt.plot(history.history['loss'])
plt.xlabel('epochs')
plt.ylabel('loss')
plt.title('model loss')
plt.show()