import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
from keras import Model, Sequential, models, layers, activations, optimizers, losses

img_train = ImageDataGenerator(rescale=1.0 / 255).flow_from_directory(directory='./cifar2/train', target_size=(32, 32),
                                                                      class_mode='binary')
img_test = ImageDataGenerator(rescale=1.0 / 255).flow_from_directory(directory='./cifar2/test', target_size=(32, 32),
                                                                     class_mode='binary')

class Alexnet(Model):

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.conv = Sequential([
            layers.Conv2D(filters=96, kernel_size=(3, 3)),
            layers.MaxPooling2D(),
            layers.Conv2D(filters=256, kernel_size=(3, 3)),
            layers.MaxPooling2D(),
        ])
        self.flat = Sequential([layers.Flatten()])
        self.fc = Sequential([
            layers.Dense(units=128, activation=activations.relu),
            layers.Dense(units=128, activation=activations.relu),
            layers.Dense(units=1, activation=activations.sigmoid)
        ])

    def call(self, inputs, training=None, mask=None):
        out = self.conv(inputs)
        out = self.flat(out)
        out = self.fc(out)
        return out


model = Alexnet()
model.build(input_shape=(None, 32, 32, 3))
model.summary()
model.compile(optimizer=optimizers.Adam(), loss=losses.binary_crossentropy, metrics='acc')
log = model.fit(img_train, batch_size=100, epochs=5, validation_data=img_test)
train_loss = log.history['acc']
val_loss = log.history['val_acc']
plt.plot(train_loss, c='r')
plt.plot(val_loss, c='g')
plt.show()
