"""
Tensorflow 2.x
"""
import tensorflow as tf
import numpy as np
from python_ai.common.xcommon import *
from tensorflow.keras.datasets import cifar10
from tensorflow import keras
from tensorflow.keras import layers, activations, losses, optimizers, metrics, callbacks


class ConvBnRelu(keras.Model):

    def __init__(self, filters, ksize, strides=(1, 1), padding='same', **kwargs):
        super().__init__(**kwargs)
        self.conv = layers.Conv2D(filters, ksize, strides, padding)
        self.bn = layers.BatchNormalization()
        self.relu = layers.ReLU()

    def call(self, inputs, training=None):
        x = self.conv(inputs, training=training)
        x = self.bn(x, training=training)
        x = self.relu(x, training=training)
        return x


class InceptionNetBlock(keras.Model):

    def __init__(self, filters, strides, **kwargs):
        super().__init__(**kwargs)
        self.group01 = ConvBnRelu(filters, (1, 1), strides)
        self.group0201 = ConvBnRelu(filters, (1, 1), strides)
        self.group0202 = ConvBnRelu(filters, (3, 3))
        self.group0301 = ConvBnRelu(filters, (1, 1), strides)
        self.group0302 = ConvBnRelu(filters, (5, 5))
        self.group0401 = layers.AvgPool2D((3, 3), strides=(1, 1), padding='same')
        self.group0402 = ConvBnRelu(filters, (1, 1), strides)

    def call(self, inputs, training=None):
        x1 = self.group01(inputs)
        x2 = self.group0201(inputs)
        x2 = self.group0202(x2)
        x3 = self.group0301(inputs)
        x3 = self.group0302(x3)
        x4 = self.group0401(inputs)
        x4 = self.group0402(x4)
        x = tf.concat([x1, x2, x3, x4], axis=3)
        return x


class InceptionNet(keras.Model):

    def __init__(self, filters, n_cls, n_blocks, **kwargs):
        super().__init__(**kwargs)
        self.conv = ConvBnRelu(filters, (3, 3))
        self.blocks = keras.Sequential()

        ch = filters
        for block_id in range(n_blocks):
            for layer_id in range(2):
                if layer_id == 0:
                    strides = (2, 2)
                else:
                    strides = (1, 1)
                self.blocks.add(InceptionNetBlock(ch, strides))  # ATTENTION It is error prone for the indent.
            ch *= 2

        self.glbAvg = layers.GlobalAvgPool2D()
        self.flt = layers.Flatten()
        self.fc = layers.Dense(n_cls)

    def call(self, inputs, training=None):
        x = self.conv(inputs)
        x = self.blocks(x)
        x = self.glbAvg(x)
        x = self.flt(x)
        x = self.fc(x)
        return x


if '__main__' == __name__:
    tf.random.set_seed(1)
    np.random.seed(1)

    (x_train, y_train), (x_test, y_test) = cifar10.load_data()
    print('x_train', np.shape(x_train), x_train.dtype)
    print('y_train', np.shape(y_train), y_train.dtype)
    print('x_test', np.shape(x_test))
    print('y_test', np.shape(y_test))
    x_train = x_train.astype(np.float32) / 255.
    x_test = x_test.astype(np.float32) / 255.
    print('x_train', np.shape(x_train), x_train.dtype)
    print('y_train', np.shape(y_train), y_train.dtype)
    print('x_test', np.shape(x_test))
    print('y_test', np.shape(y_test))

    SHUFFLE_BUFFER = 2000
    BATCH_SIZE = 64
    N_EPOCHS = 2
    ALPHA = 0.001
    VER = 'v1.0'

    N_CLS = len(np.unique(y_train))
    FILE_NAME = os.path.basename(__file__)
    LOG_DIR = os.path.join('_log', FILE_NAME, VER)

    ds = tf.data.Dataset.from_tensor_slices((x_train, y_train))\
        .shuffle(SHUFFLE_BUFFER)\
        .batch(batch_size=BATCH_SIZE, drop_remainder=True)\
        .prefetch(buffer_size=tf.data.experimental.AUTOTUNE)

    ds_test = tf.data.Dataset.from_tensor_slices((x_test, y_test))\
        .shuffle(SHUFFLE_BUFFER)\
        .batch(batch_size=BATCH_SIZE, drop_remainder=True)\
        .prefetch(buffer_size=tf.data.experimental.AUTOTUNE)

    model = InceptionNet(32, N_CLS, 2)
    # model = InceptionNet(32, N_CLS, 4)  # ATTENTION Maybe 梯度消失
    model.build(input_shape=(None, 32, 32, 3))
    model.summary()
    model.blocks.summary()
    model.compile(
        loss=losses.SparseCategoricalCrossentropy(from_logits=True),
        optimizer=optimizers.Adam(learning_rate=ALPHA),
        metrics=metrics.sparse_categorical_accuracy
    )

    model.fit(ds, validation_data=ds_test,
              epochs=N_EPOCHS,
              callbacks=callbacks.TensorBoard(log_dir=LOG_DIR, update_freq='batch', profile_batch=0),
              )

    print('Testing ...')
    model.evaluate(ds_test,)
    print('Tested')
    print('Over')
