import warnings
warnings.filterwarnings('ignore')
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
tf.compat.v1.logging.set_verbosity(40)

from tensorflow.keras.models import Sequential
from tensorflow.keras import utils, models, optimizers, losses, metrics
from tensorflow.keras.layers import Dense, Activation, Conv2D, MaxPooling2D, Flatten, Dropout

(x_train,y_train),(x_test,y_test)=tf.keras.datasets.cifar10.load_data()

x_train=x_train.reshape([-1,32,32,3]).astype('float32') / 255
x_test=x_test.reshape([-1,32,32,3]).astype('float32') / 255

model = Sequential([
    Conv2D(64, (3, 3), activation='relu', padding='same'),
    Conv2D(64, (3, 3), activation='relu', padding='same'),
    MaxPooling2D((2, 2)),# 16, 16, 64

    Conv2D(128, (3, 3), activation='relu', padding='same'),
    Conv2D(128, (3, 3), activation='relu', padding='same'),
    MaxPooling2D((2, 2)),# 8, 8, 128

    Conv2D(256, (3, 3), activation='relu', padding='same'),
    Conv2D(256, (3, 3), activation='relu', padding='same'),
    Conv2D(256, (3, 3), activation='relu', padding='same'),
    MaxPooling2D((2, 2)),  # 4, 4, 256

    Conv2D(512, (3, 3), activation='relu', padding='same'),
    Conv2D(512, (3, 3), activation='relu', padding='same'),
    Conv2D(512, (3, 3), activation='relu', padding='same'),
    MaxPooling2D((2, 2)),  # 2, 2, 512

    Conv2D(512, (3, 3), activation='relu', padding='same'),
    Conv2D(512, (3, 3), activation='relu', padding='same'),
    Conv2D(512, (3, 3), activation='relu', padding='same'),
    MaxPooling2D((2, 2)),  # 1, 1, 512

    Flatten(),

    Dense(64, activation='relu'),
    Dropout(0.5),
    Dense(64, activation='relu'),
    Dropout(0.5),
    Dense(10, activation='softmax')
])

model.compile(loss=losses.SparseCategoricalCrossentropy(),
              optimizer=optimizers.Adam(lr=0.001),
              metrics=['accuracy'])

history=model.fit(x_train, y_train, batch_size=64, epochs=1, validation_split=0.3)

score=model.evaluate(x_test, y_test)
print('accuracy',score[1])
print('loss',score[0])
