import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import layers, activations, optimizers, losses
from python_ai.common.xcommon import *


def ConvBnRelu(x, filters, size, strides=(1, 1), name='NNm', padding='same'):
    conv_name = name + '_conv_' + repr2valid_name(size) + '_' + repr2valid_name(strides) + '_' + repr2valid_name(padding)
    conv_name = shrink_underscore(conv_name)
    x = layers.Conv2D(
        filters,
        size,
        strides=strides,
        padding=padding,
        # use_bias=False,
        name=conv_name
    )(x)
    # x = layers.BatchNormalization(scale=False)(x)
    x = layers.BatchNormalization(name=name + '_bn', scale=False)(x)
    x = layers.ReLU(name=name + '_rl')(x)
    return x


def InceptionBlock(x, branch_out_ch, shrink=False, name='NNm'):
    if shrink:
        strides = (2, 2)
    else:
        strides = (1, 1)
    branch01 = ConvBnRelu(x, branch_out_ch, (1, 1), strides, name + '_b01')

    branch02 = ConvBnRelu(x, branch_out_ch, (1, 1), strides, name + '_b02_1')
    branch02 = ConvBnRelu(branch02, branch_out_ch, (3, 3), (1, 1), name + '_b02_2')

    branch03 = ConvBnRelu(x, branch_out_ch, (1, 1), strides, name + '_b03_1')
    # branch03 = ConvBnRelu(branch03, ch, (5, 5), (1, 1), name + '_b03_2')  # 5x5 => 3x3, 3x3
    branch03 = ConvBnRelu(branch03, branch_out_ch, (3, 3), (1, 1), name + '_b03_2')
    branch03 = ConvBnRelu(branch03, branch_out_ch, (3, 3), (1, 1), name + '_b03_3')

    branch04 = layers.AvgPool2D((3, 3), (1, 1), padding='same', name=name + '_b04_1')(x)
    branch04 = ConvBnRelu(branch04, branch_out_ch, (1, 1), strides, name + '_b04_2')

    output = tf.concat((branch01, branch02, branch03, branch04), axis=3, name=name + '_cat')
    return output


inputs = keras.Input((32, 32, 3))
x = ConvBnRelu(inputs, 16, (3, 3), (1, 1), '01')

x = InceptionBlock(x, 32, shrink=True, name='B1')
x = InceptionBlock(x, 32, shrink=False, name='B2')

x = InceptionBlock(x, 64, shrink=True, name='B3')
x = InceptionBlock(x, 64, shrink=False, name='B4')

x = layers.GlobalAveragePooling2D(name='GlbAvgPl')(x)
x = layers.Dense(10, activation='softmax', name='FC')(x)

model = keras.Model(inputs, x)
model.summary()

if '__main__' == __name__:
    tf.random.set_seed(777)

    (x_train,y_train),(x_test,y_test)=tf.keras.datasets.cifar10.load_data()

    x_train=x_train.reshape([-1,32,32,3]).astype('float32') / 255
    x_test=x_test.reshape([-1,32,32,3]).astype('float32') / 255

    model.compile(loss=losses.SparseCategoricalCrossentropy(),
                  optimizer=optimizers.Adam(lr=0.001),
                  metrics=['accuracy'])

    history=model.fit(x_train, y_train, batch_size=64, epochs=3, validation_split=0.1)

    score=model.evaluate(x_test, y_test, batch_size=64)
    print('accuracy',score[1])
    print('loss',score[0])
