import tensorflow as tf
from tensorflow.python.keras import regularizers, Input, Model

from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, Reshape, Flatten, GlobalMaxPooling2D, \
    MaxPooling2D, Conv2D

import tensorflow as tf
from tensorflow.python.keras import Input, Model
from tensorflow.python.keras.applications.inception_resnet_v2 import InceptionResNetV2
from tensorflow.python.keras.callbacks import EarlyStopping
from tensorflow.python.keras.applications.resnet import ResNet152

from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, GlobalMaxPooling2D, Concatenate, Dropout
from tensorflow.python.keras.layers.normalization import BatchNormalization
from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, Rescaling
from util import read_data, get_data, get_data_new

AUTOTUNE = tf.data.experimental.AUTOTUNE

if __name__ == '__main__':
    train_ds, val_ds = get_data()

    train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size = AUTOTUNE)
    val_ds = val_ds.cache().prefetch(buffer_size = AUTOTUNE)
    normalization_layer = Rescaling(1. / 255)
    normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
    image_batch, labels_batch = next(iter(normalized_ds))
    img_input = Input(shape = (300, 300, 3))
    ResNet152 = ResNet152(include_top = False,
                                          input_shape = (300, 300, 3), weights = 'imagenet')
    h1 = ResNet152(img_input)
    h1.trainable = False
    h1 = MaxPooling2D(pool_size = 2)(h1)
    h1 = BatchNormalization()(h1)

    hs = GlobalAveragePooling2D()(h1)
    hs = Reshape((1, 1, hs.shape[1]))(hs)
    hs = Conv2D(2048 // 16, kernel_size = 1, strides = 1, padding = "same", kernel_regularizer = regularizers.l2(1e-4),
                use_bias = True, activation = "relu")(hs)
    hs = Conv2D(2048, kernel_size = 1, strides = 1,
                padding = "same",
                kernel_regularizer = regularizers.l2(1e-4),
                use_bias = True)(hs)
    # 全局最大
    hb = GlobalMaxPooling2D()(h1)
    hb = Reshape((1, 1, hb.shape[1]))(hb)
    hb = Conv2D(2048 // 16, kernel_size = 1, strides = 1, padding = "same", kernel_regularizer = regularizers.l2(1e-4),
                use_bias = True, activation = "relu")(hb)
    hb = Conv2D(2048, kernel_size = 1, strides = 1, padding = "same", kernel_regularizer = regularizers.l2(1e-4),
                use_bias = True)(hb)
    out = hs + hb  # 最大加平均
    out = tf.nn.sigmoid(out)
    out = out * h1
    out = Flatten()(out)
    out = Dense(1000, activation = "relu")(out)
    out = BatchNormalization()(out)
    out = Dense(200, activation = "relu")(out)
    out = BatchNormalization()(out)
    outputs = Dense(2, activation = "softmax")(out)
    model = Model(inputs = img_input, outputs = outputs)

    model.compile(optimizer = 'adam',
                  loss = 'sparse_categorical_crossentropy',
                  metrics = ['accuracy'])
    reduce_lr = tf.keras.callbacks.ReduceLROnPlateau(min_lr=0.00001,
                                                     factor=0.2)
    early_stopping = EarlyStopping(
        monitor = 'val_accuracy',
        verbose = 1,
        patience = 40,
        restore_best_weights = True
    )
    # 迭代次数2000，准确率还可以，耐心等待
    history = model.fit(train_ds, epochs=2000, callbacks=[early_stopping, reduce_lr],
                        validation_data=val_ds)
    model.save('InceptionResNetV2-SE.h5')
