
import os
import tensorflow as tf
from tensorflow.keras.datasets import cifar10
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, MaxPool2D, BatchNormalization, Activation, GlobalAvgPool2D
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.losses import SparseCategoricalCrossentropy
from tensorflow.keras.metrics import SparseCategoricalAccuracy
from tensorflow.keras.callbacks import ModelCheckpoint
import matplotlib.pyplot as plt

(train_x, train_y), (test_x, test_y) = cifar10.load_data()

train_x, test_x = train_x/255.0, test_x/255.0
print(train_x.shape, train_y.shape, test_x.shape, test_y.shape)


class ConvBnRelu(Model):
    def __init__(self, ch, kernel_size=3, strides=1, padding='same'):
        super(ConvBnRelu, self).__init__()
        self.conv = Conv2D(ch, kernel_size, strides, padding=padding)
        self.bn = BatchNormalization()
        self.ac = Activation('relu')

    def call(self, x):
        x = self.conv(x)
        x = self.bn(x)
        x = self.ac(x)
        return x


class InceptionBlock(Model):
    def __init__(self, ch, strides=1):
        super(InceptionBlock, self).__init__()
        self.ch = ch
        self.strides = strides
        # 1 * 1卷积
        self.c1 = ConvBnRelu(ch, kernel_size=1, strides=strides)

        # 先1*1卷积，在3*3卷积
        self.c2_1 = ConvBnRelu(ch, kernel_size=1, strides=strides, padding='same')
        self.c2_2 = ConvBnRelu(ch, kernel_size=3, strides=1, padding='same')

        # 先1*1卷积，在5*5卷积
        self.c3_1 = ConvBnRelu(ch, kernel_size=1, strides=strides, padding='same')
        self.c3_2 = ConvBnRelu(ch, kernel_size=5, strides=1, padding='same')

        # 先3*3 池化， 在1*1卷积
        self.c4_1 = MaxPool2D(pool_size=3, strides=1, padding='same')
        self.c4_2 = ConvBnRelu(ch, kernel_size=1, strides=strides)

    def call(self, x):
        print(' xxxxxxxxxxx-------------')
        x1 = self.c1(x)

        # x2_1和x2的shape不一致
        x2 = self.c2_1(x)
        x2 = self.c2_2(x2)

        x3 = self.c3_1(x)
        x3 = self.c3_2(x3)

        x4 = self.c4_1(x)
        x4 = self.c4_2(x4)
        # print('x.shape', x.shape)
        # print('x1.shape', x1.shape)
        # print('x2.shape', x2.shape)
        # print('x3.shape', x3.shape)
        # print('x4.shape', x4.shape)
        return tf.concat([x1, x2, x3, x4], axis=3)


class InceptionNetModel(Model):
    def __init__(self, num_blocks, num_classes, init_ch=16, **kwargs):
        super(InceptionNetModel, self).__init__()
        self.in_channels = init_ch
        self.out_channels = init_ch
        self.num_blocks = num_blocks
        self.c1 = ConvBnRelu(init_ch)

        self.blocks = Sequential()
        for block_id in range(num_blocks):
            # block_layer1 = InceptionBlock(self.out_channels, strides=2)
            block_layer2 = InceptionBlock(self.out_channels, strides=1)
            # self.blocks.add(block_layer1)
            self.blocks.add(block_layer2)
            self.out_channels *= 2
        self.gap = GlobalAvgPool2D()
        self.fc1 = Dense(num_classes, activation='softmax')

    def call(self, x):
        print('call x *************************')
        print('x.shape', x.shape)
        x = self.c1(x)
        print('x.shape', x.shape)
        x = self.blocks(x)
        print('x.shape', x.shape)
        x = self.gap(x)
        print('x.shape', x.shape)
        x = self.fc1(x)
        print('x.shape', x.shape)
        print('call x over *************************')
        return x


model = InceptionNetModel(1, 10)
model.compile(optimizer=Adam(),
              loss=SparseCategoricalCrossentropy(from_logits=False),
              metrics=[SparseCategoricalAccuracy()])


# save model
model_save_path = './checkpoint/baseline.ckpt'
model_save_callback = ModelCheckpoint(
    model_save_path,
    monitor='val_loss',
    verbose=0,
    save_best_only=True,
    save_weights_only=True,
    mode='auto',
    save_freq='epoch'
)
# load model
if os.path.exists(model_save_path + '.index'):
    print('*************** load model *********************************')
    model.load_weights(model_save_path)


history = model.fit(train_x, train_y, batch_size=32, epochs=5, validation_data=(test_x, test_y),
                    validation_freq=1,
                    callbacks=[model_save_callback])

model.summary()

# ************************ 保存权重 *************
mode_weight_txt = './weight.txt'
with open(mode_weight_txt, 'w') as f:
    for v in model.trainable_variables:
        f.write(str(v.name) + '\n')
        f.write(str(v.shape) + '\n')
        f.write(str(v.numpy()) + '\n')

# ************ plot loass/acc curve ****************************
loss, val_loss = history.history['loss'], history.history['val_loss']
acc, val_acc = history.history['sparse_categorical_accuracy'], history.history['val_sparse_categorical_accuracy']

plt.subplot(2, 1, 1)
plt.plot(loss, label='train_loss')
plt.plot(val_loss, label='val_loss')
plt.title('loss')
plt.legend()

plt.subplot(2, 1, 2)
plt.plot(acc, label='train_acc')
plt.plot(val_acc, label='val_acc')
plt.title('accuracy')
plt.legend()

plt.show()
