import tensorflow as tf
from tensorflow.keras import layers
from tensorflow.keras import Model
import numpy as np
from matplotlib import pyplot as plt
import os
from tensorflow.keras.preprocessing.image import ImageDataGenerator
np.set_printoptions(threshold=np.inf)
import LeNet,AlexNet,VggNet,InceptionNet,ResNet

cifar10 = tf.keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0

# image_gen_train = ImageDataGenerator(
#     rescale=1. / 1.,  # 如为图像，分母为255时，可归至0-1
#     rotation_range=45,  # 随机45度旋转
#     width_shift_range=.15,  # 宽度偏移
#     height_shift_range=.15,  # 高度偏移
#     horizontal_flip=True,  # 水平翻转
#     zoom_range=0.5  # 将图像随机缩放50%
# )
# image_gen_train.fit(x_train)


class Baseline(Model):
    def __init__(self):
        super(Baseline, self).__init__()
        self.c1 = tf.keras.layers.Conv2D(filters=6, kernel_size=(5, 5), padding='same')
        self.b1 = tf.keras.layers.BatchNormalization()
        self.a1 = tf.keras.layers.Activation('relu')
        self.p1 = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=2, padding='same')
        self.d1 = tf.keras.layers.Dropout(0.2)

        self.flatten = tf.keras.layers.Flatten()
        self.f1 = layers.Dense(128, activation='relu')
        self.d2 = layers.Dropout(0.2)
        self.f2 = layers.Dense(10, activation='softmax')

    def call(self, x):
        x = self.c1(x)
        x = self.b1(x)
        x = self.a1(x)
        x = self.p1(x)
        x = self.d1(x)

        x = self.flatten(x)
        x = self.f1(x)
        x = self.d2(x)
        y = self.f2(x)
        return y


# model = Baseline()
# LeNet,AlexNet,VggNet,Inception.Inception10(num_blocks=2, num_classes=10),ResNet.ResNet18(block_list=[2, 2, 2, 2])
modelname = 'ResNet'
model = ResNet.ResNet18(block_list=[2, 2, 2, 2])
model.compile(optimizer='adam',
              loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
              metrics=['sparse_categorical_accuracy'])
checkpoint_savepath = './23 checkpoint/'+ modelname +'.ckpt'
if os.path.exists(checkpoint_savepath + '.index'):
    print('————————————————————load model——————————————————————')
    model.load_weights(checkpoint_savepath)
cp_callback = tf.keras.callbacks.ModelCheckpoint(
    filepath=checkpoint_savepath,
    save_weights_only=True,
    save_best_only=True
)
history = model.fit(x_train, y_train, batch_size=32, epochs=5, validation_data=(x_test, y_test),
                    validation_freq=1, callbacks=[cp_callback])
model.summary()
file = open('./23'+ modelname+'weights.txt', 'w')
for v in model.trainable_variables:
    file.write(str(v.name) + '\n' + str(v.shape) + '\n' + str(v.numpy()) + '\n')
file.close()

acc = history.history['sparse_categorical_accuracy']
val_acc = history.history['val_sparse_categorical_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']

plt.subplot(1, 2, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.title('Training and Validation Accuracy')
plt.legend()

plt.subplot(1, 2, 2)
plt.plot(loss, label='Training Loss')
plt.plot(val_loss, label='Validation Loss')
plt.title('Training and Validation Loss')
plt.legend()
plt.show()
