import numpy as np
import tensorflow as tf
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt

data=np.loadtxt('../img_16_10k.txt',delimiter=',')
x=data[:,0:-1]
y=data[:,[-1]]

# 维度变化
x=x.reshape([-1,16,16,1])

# 数据集切分
train_x,test_x,train_y,test_y=train_test_split(x,y,test_size=0.3)
# print(x.shape)

'''
3.1 进行卷积1处理， 维度变为 12*12*6
3.2 池化处理维度变为  6*6*6
3.3 卷积2处理， 维度变为 4*4*12
3.4 池化处理 ， 维度变为 2*2*12
3.5 全连接处理数据，维度变为添加dropout处理
3.6激活函数使用relu
'''
class Net(tf.keras.models.Model):
    def __init__(self):
        super(Net, self).__init__()
        self.stem=tf.keras.Sequential([
            tf.keras.layers.Conv2D(6, kernel_size=(5, 5), strides=1),
            tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=2),
            tf.keras.layers.Activation('relu'),
            tf.keras.layers.Conv2D(12, kernel_size=(3, 3), strides=1),
            tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=2),
            tf.keras.layers.Activation('relu'),
            tf.keras.layers.Flatten(),
            tf.keras.layers.Dense(1024,activation='relu'),
            tf.keras.layers.Dropout(0.3),
            tf.keras.layers.Dense(100,activation='relu'),
            tf.keras.layers.Dropout(0.3),
            tf.keras.layers.Dense(10),
            tf.keras.layers.Activation('softmax'),
        ])
    def call(self,x):
        x=self.stem(x,training=False)
        return x

model=Net()

model.compile(optimizer=tf.keras.optimizers.Adam(),
              loss=tf.keras.losses.SparseCategoricalCrossentropy(),
              metrics=['accuracy'])

history=model.fit(train_x,train_y,batch_size=64,epochs=10,validation_data=(test_x,test_y))

#model.fit返回值.history中自带的参数
acc=history.history['accuracy']
val_acc=history.history['val_accuracy']

loss=history.history['loss']
val_loss=history.history['val_loss']

plt.plot(acc,'o-',label='accuracy')
plt.plot(val_acc,'o-',label='val_accuracy')
plt.legend()
plt.show()

plt.plot(loss,'o-',label='loss')
plt.plot(val_loss,'o-',label='val_loss')
plt.legend()
plt.show()