import os

import tensorflow as tf
from tensorflow.python.keras.applications.densenet import DenseNet169
from tensorflow.python.keras.applications.mobilenet_v2 import MobileNetV2
from tensorflow.python.keras.applications.xception import Xception
from tensorflow.python.keras.applications.resnet import ResNet50
from tensorflow.python.keras.applications.resnet import ResNet152

from tensorflow.python.keras.callbacks import EarlyStopping

from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, Rescaling, MaxPooling2D, BatchNormalization, \
    Flatten, \
    GlobalMaxPooling2D
from tensorflow.python.keras.models import Sequential

from util import read_data, get_data

AUTOTUNE = tf.data.experimental.AUTOTUNE

if __name__ == '__main__':
    train_ds, val_ds = get_data()

    train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size = AUTOTUNE)
    val_ds = val_ds.cache().prefetch(buffer_size = AUTOTUNE)
    normalization_layer = Rescaling(1. / 255)
    normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
    image_batch, labels_batch = next(iter(normalized_ds))
    # 初始化DenseNet169网络(卷积神经网络的一种)
    mobile_net = Xception(input_shape = (300, 300, 3), include_top = False)
    # 固定参数
    mobile_net.trainable = False

    model = Sequential([
        mobile_net,
        GlobalMaxPooling2D(),
        Flatten(),
        Dense(1000, activation = 'relu'),
        BatchNormalization(),
        Dense(200, activation = 'relu'),
        BatchNormalization(),
        Dense(2, activation = 'softmax')])
    reduce_lr = tf.keras.callbacks.ReduceLROnPlateau(min_lr=0.00001,
                                                     factor=0.2)

    model.compile(optimizer = 'adam',
                  loss = 'sparse_categorical_crossentropy',
                  metrics = ['accuracy'])
    num_0 = len(os.listdir('original_data/0'))
    num_1 = len(os.listdir('original_data/1'))
    total = num_0 + num_1
    weight_for_0 = total / num_0 / 2.0
    weight_for_1 = total / num_1 / 2.0
    class_weight = {0: weight_for_0, 1: weight_for_1}
    print(class_weight)
    early_stopping = EarlyStopping(
        monitor = 'val_accuracy',
        verbose = 1,
        patience = 40,
        restore_best_weights = True
    )
    # 迭代次数2000，准确率还可以，耐心等待
    history = model.fit(train_ds, epochs = 2000, callbacks = [early_stopping,reduce_lr],
                        validation_data = val_ds)
    model.save('ResNet50.h5')
