# import warnings
# warnings.filterwarnings('ignore')
import os
# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
# tf.compat.v1.logging.set_verbosity(40)

from tensorflow.keras.models import Sequential
from tensorflow.keras import utils, models, optimizers, losses, metrics, Input, Model
from tensorflow.keras.layers import Dense, Activation, Conv2D, MaxPooling2D, Flatten, Dropout, ReLU, BatchNormalization

tf.random.set_seed(777)

def ConvBnRelu(x, out_ch):
    x = Conv2D(out_ch, (3, 3), padding='same')(x)
    x = BatchNormalization()(x)
    x = Activation('relu')(x)
    return x


def MyMaxPooling(x):
    return MaxPooling2D([2, 2], [2, 2], padding='same')(x)


inputs = Input([224, 224, 3])
x = ConvBnRelu(inputs, 64)
x = ConvBnRelu(x, 64)
x = MyMaxPooling(x)

x = ConvBnRelu(x, 128)
x = ConvBnRelu(x, 128)
x = MyMaxPooling(x)

x = ConvBnRelu(x, 256)
x = ConvBnRelu(x, 256)
x = ConvBnRelu(x, 256)
x = MyMaxPooling(x)

x = ConvBnRelu(x, 512)
x = ConvBnRelu(x, 512)
x = ConvBnRelu(x, 512)
x = MyMaxPooling(x)

x = ConvBnRelu(x, 512)
x = ConvBnRelu(x, 512)
x = ConvBnRelu(x, 512)
x = MyMaxPooling(x)

x = Flatten()(x)
x = Dense(4096, activation='relu')(x)
x = Dense(4096, activation='relu')(x)
x = Dense(1000, activation='softmax')(x)

model = Model(inputs, x)
model.summary()
