import os
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import warnings
# warnings.filterwarnings('ignore')
import tensorflow as tf
# tf.compat.v1.logging.set_verbosity(40)
from python_ai.common.xcommon import *
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, BatchNormalization, Activation, AveragePooling2D, GlobalAveragePooling2D

(x_train,y_train),(x_test,y_test)=tf.keras.datasets.cifar10.load_data()
print_numpy_ndarray_info(x_train, 'x_train')
print_numpy_ndarray_info(x_test, 'x_test')
print_numpy_ndarray_info(y_train, 'y_train')
print_numpy_ndarray_info(y_test, 'y_test')

x_train=x_train.reshape([-1,32,32,3])/255
x_test=x_test.reshape([-1,32,32,3])/255
print_numpy_ndarray_info(x_train, 'x_train')
print_numpy_ndarray_info(x_test, 'x_test')


def convCell(x, filters, num_row, num_col, padding='same', strides=(1, 1)):
    x = Conv2D(filters,(num_row,num_col),strides=strides,padding=padding)(x)
    x = BatchNormalization()(x)
    return x


def resnetBlock(input, filters, strides=(1, 1)):
    x = convCell(input, filters, 3, 3, strides=strides)
    x = Activation('relu')(x)

    x = convCell(x, filters, 3, 3, strides=(1, 1))
    if strides != (1, 1):
        residual = convCell(input, filters, 1, 1, strides=strides)
    else:
        residual = input
    x = x + residual
    x = Activation('relu')(x)

    return x


if __name__ == '__main__':
    inputs = tf.keras.Input([32, 32, 3])
    x = convCell(inputs, 64, 3, 3, strides=(1, 1))  # out:（32，32，64）

    num_blocks = 4
    in_ch = 64
    out_ch = in_ch
    for blocks_id in range(num_blocks):
        if blocks_id == 0:
            for layers_id in range(2):
                x = resnetBlock(x, out_ch, strides=(1, 1))
        else:
            for layers_id in range(2):
                if layers_id == 0:
                    x = resnetBlock(x, out_ch, strides=(2, 2))
                else:
                    x = resnetBlock(x, out_ch, strides=(1, 1))
        out_ch *= 2
    x = GlobalAveragePooling2D()(x)
    outputs = Dense(10, activation='softmax')(x)

    model = tf.keras.Model(inputs, outputs)

    model.summary()

    model.compile(loss=tf.keras.losses.SparseCategoricalCrossentropy(),
                  optimizer=tf.keras.optimizers.Adam(lr=0.0001),
                  metrics=['accuracy'])

    history = model.fit(x_train, y_train, batch_size=64, epochs=1, validation_split=0.3)
