import math
import os

import tensorflow as tf
from tensorflow.python.keras import Input, Model
from tensorflow.python.keras.applications.densenet import DenseNet169, DenseNet121
from tensorflow.python.keras.applications.mobilenet_v2 import MobileNetV2
from tensorflow.python.keras.applications.xception import Xception
from tensorflow.python.keras.applications.inception_resnet_v2 import InceptionResNetV2
from tensorflow.python.keras.applications.resnet import ResNet50, ResNet152
from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, Rescaling, BatchNormalization, Conv1D, \
    Activation, multiply
from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, Reshape, Flatten, GlobalMaxPooling2D, \
    MaxPooling2D, Conv2D
from tensorflow.python.keras import regularizers, Input, Model

import tensorflow as tf
from tensorflow.python.keras import Input, Model
from tensorflow.python.keras.applications.inception_resnet_v2 import InceptionResNetV2
from tensorflow.python.keras.callbacks import EarlyStopping
from tensorflow.python.keras.applications.resnet import ResNet152

from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, GlobalMaxPooling2D, Concatenate, Dropout
from tensorflow.python.keras.layers.normalization import BatchNormalization
from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, Rescaling
from util import read_data, get_data, get_data_new
from tensorflow.python.keras.layers import GlobalAveragePooling2D, Dense, GlobalMaxPooling2D, Concatenate, Dropout
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.callbacks import EarlyStopping

from util import read_data, get_data

AUTOTUNE = tf.data.experimental.AUTOTUNE
def eca_block(input_feature, b=1, gamma=2, name=""):
	channel = input_feature.shape[-1]
	kernel_size = int(abs((math.log(channel, 2) + b) / gamma))
	kernel_size = kernel_size if kernel_size % 2 else kernel_size + 1
	
	avg_pool = GlobalAveragePooling2D()(input_feature)
	
	x = Reshape((-1,1))(avg_pool)
	x = Conv1D(1, kernel_size=kernel_size, padding="same", name = "eca_layer_"+str(name), use_bias=False,)(x)
	x = Activation('sigmoid')(x)
	x = Reshape((1, 1, -1))(x)

	output = multiply([input_feature,x])
	return output

if __name__ == '__main__':
    train_ds, val_ds = get_data()

    train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size = AUTOTUNE)
    val_ds = val_ds.cache().prefetch(buffer_size = AUTOTUNE)
    normalization_layer = Rescaling(1. / 255)
    normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
    image_batch, labels_batch = next(iter(normalized_ds))
    input_layer = Input(shape = (300, 300, 3))
    dense = ResNet152(include_top = False , input_tensor = input_layer,
                        input_shape = (300, 300, 3))
    xception = Xception(include_top = False, input_tensor = input_layer,
                        input_shape = (300, 300, 3))
    top1_model = MaxPooling2D(input_shape = (7, 7, 1024), data_format = 'channels_last')(dense.output)
    top2_model = MaxPooling2D(input_shape = (7, 7, 1024), data_format = 'channels_last')(xception.output)
    concatenate_model = Concatenate(axis = 1)([top1_model, top2_model])
    concatenate_model.trainable = False
    out = eca_block(concatenate_model)
    out = Flatten()(out)
    top_model = Dense(units = 512, activation = "relu")(out)
    top_model = BatchNormalization()(top_model)
    top_model = Dense(units = 256, activation = "relu")(top_model)
    top_model = BatchNormalization()(top_model)
    top_model = Dense(units = 2, activation = "softmax")(top_model)
    model = Model(inputs = input_layer, outputs = top_model)
    
    model.compile(optimizer = 'adam',
                  loss = 'sparse_categorical_crossentropy',
                  metrics = ['accuracy'])
    early_stopping = EarlyStopping(
        monitor = 'val_accuracy',
        verbose = 1,
        patience = 40,
        restore_best_weights = True
    )
    reduce_lr = tf.keras.callbacks.ReduceLROnPlateau(min_lr=0.00001,
                                                     factor=0.2)
    num_0 = len(os.listdir('original_data/0'))
    num_1 = len(os.listdir('original_data/1'))
    total = num_0 + num_1
    weight_for_0 = total / num_0 / 2.0
    weight_for_1 = total / num_1 / 2.0
    class_weight = {0: weight_for_0, 1: weight_for_1}
    print(class_weight)
    # 迭代次数2000，准确率还可以，耐心等待
    history = model.fit(train_ds, epochs=2000, callbacks=[early_stopping, reduce_lr],
                        validation_data=val_ds, class_weight=class_weight)
    model.save('ResNet50.h5')
