import os,shutil

# 数据预处理
d5_original_dataset_dir = "5in_2"
d5_base_dir             = "d5"
if not os.path.exists(d5_base_dir):
    os.mkdir(d5_base_dir)
d5_files                = "d5.txt"
d5_file_names           = []
d5_file_labels          = []

#训练集 - 测试集 -验证集
d5_train_dir            = os.path.join(d5_base_dir, 'train')
if not os.path.exists(d5_train_dir):
    os.mkdir(d5_train_dir)
d5_validation_dir = os.path.join(d5_base_dir, 'validation')
if not os.path.exists(d5_validation_dir):
    os.mkdir(d5_validation_dir)
d5_test_dir = os.path.join(d5_base_dir, 'test')
if not os.path.exists(d5_test_dir):
    os.mkdir(d5_test_dir)

#训练集
d5_train_secu_dir = os.path.join(d5_train_dir, 'Secure')
if not os.path.exists(d5_train_secu_dir):
    os.mkdir(d5_train_secu_dir)
d5_train_atta_dir = os.path.join(d5_train_dir, 'Attack')
if not os.path.exists(d5_train_atta_dir):
    os.mkdir(d5_train_atta_dir)
#测试集
d5_test_secu_dir = os.path.join(d5_test_dir, 'Secure')
if not os.path.exists(d5_test_secu_dir):
    os.mkdir(d5_test_secu_dir)
d5_test_atta_dir = os.path.join(d5_test_dir, 'Attack')
if not os.path.exists(d5_test_atta_dir):
    os.mkdir(d5_test_atta_dir)
#验证集
d5_validation_secu_dir = os.path.join(d5_validation_dir, 'Secure')
if not os.path.exists(d5_validation_secu_dir):
    os.mkdir(d5_validation_secu_dir)
d5_validation_atta_dir = os.path.join(d5_validation_dir, 'Attack')
if not os.path.exists(d5_validation_atta_dir):
    os.mkdir(d5_validation_atta_dir)


with open(d5_files,'r') as f:
    for line in f.readlines():
        d5_file_names.append(line.split(' ')[0])
        if line.split(' ')[1]=="No":
            d5_file_labels.append("Secure")
        else:
            d5_file_labels.append("Attack")
d5_file_names = d5_file_names[0:-1]
d5_file_labels = d5_file_labels[0:-1]
d5_secu_names = []
d5_atta_names = []
for i,k in zip(d5_file_names,d5_file_labels):
    if k == "Secure":
        d5_secu_names.append(i)
    else:
        d5_atta_names.append(i)
#训练集Security
print(len(d5_secu_names),len(d5_atta_names))

step_train = len(d5_secu_names)//5
step_attac = len(d5_atta_names)//5

names = d5_secu_names[0:step_train*4]
for fname in names:
    src = os.path.join(d5_original_dataset_dir, fname+".dot.png")
    dst = os.path.join(d5_train_secu_dir, fname+".dot.png")
    if not os.path.exists(dst):
        shutil.copyfile(src, dst)
#验证集Security
names = d5_secu_names[step_train*4:step_train*5]
for fname in names:
    src = os.path.join(d5_original_dataset_dir, fname+".dot.png")
    dst = os.path.join(d5_validation_secu_dir, fname+".dot.png")
    if not os.path.exists(dst):
        shutil.copyfile(src, dst)
#训练集Attack
names = d5_atta_names[0:step_attac*4]
for fname in names:
    src = os.path.join(d5_original_dataset_dir, fname+".dot.png")
    dst = os.path.join(d5_train_atta_dir, fname+".dot.png")
    if not os.path.exists(dst):
        shutil.copyfile(src, dst)
#验证集Attack
names = d5_atta_names[step_attac*4:step_attac*5]
for fname in names:
    src = os.path.join(d5_original_dataset_dir, fname+".dot.png")
    dst = os.path.join(d5_validation_atta_dir, fname+".dot.png")
    if not os.path.exists(dst):
        shutil.copyfile(src, dst)



#模型
import tensorflow as tf 
from tensorflow import keras as tk
from tensorflow.keras import layers,models,optimizers
# from tensorflow.keras import models

def mlp_random(classes, number_of_samples, activation, neurons, layers, learning_rate):
    model = Sequential()
    model.add(BatchNormalization(input_shape=(number_of_samples,)))
    for l_i in range(layers):
        model.add(Dense(neurons, activation=activation, kernel_initializer='he_uniform', bias_initializer='zeros'))
    model.add(Dense(classes, activation='softmax'))
    # model.add(Dense(classes))
    model.summary()
    optimizer = RMSprop(learning_rate=learning_rate)#categorical_crossentropy
    model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])

    return model

def run_mlp(X_profiling, Y_profiling, X_validation, Y_validation,classes):
    mini_batch = random.randrange(500, 1000, 100)
    learning_rate = random.uniform(0.0001, 0.001)
    activation = ['relu', 'tanh', 'elu', 'selu'][random.randint(0, 3)]
    layers = random.randrange(2, 8, 1)
    neurons = random.randrange(100, 800, 100)

    model = mlp_random(classes, len(X_profiling[0]), activation, neurons, layers, learning_rate)
    es = EarlyStopping(monitor='val_accuracy',mode='max',patience=30,restore_best_weights=True)
    his = model.fit(
        x=X_profiling,
        y=Y_profiling,
        batch_size=mini_batch,
        verbose=2,
        epochs=200,
        shuffle=True,
        validation_data=(X_validation, Y_validation),
        callbacks=[es])

    # prediction = model.predict(X_validation)
    # prediction = prediction.reshape(-1)
    # Y_validation = Y_validation.reshape(-1)
    # corr = np.corrcoef(Y_validation,prediction)
    K.clear_session()
    return max(his.history['val_accuracy'])#prediction


gpus = tf.config.list_physical_devices('GPU')
if len(gpus) != 0:
    tf.config.experimental.set_memory_growth(gpus[0],True)

model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(224, 224, 3)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Flatten())
model.add(layers.Dropout(0.5))
model.add(layers.Dense(128, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer=optimizers.RMSprop(learning_rate=1e-4), metrics=['acc'])
model.summary()

from tensorflow.keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(
    rescale=1./255,
    # rotation_range=40,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(d5_train_dir,target_size=(224, 224),batch_size=32,class_mode='binary')
validation_generator = test_datagen.flow_from_directory(d5_validation_dir,target_size=(224, 224),batch_size=32,class_mode='binary')

history = model.fit(
    train_generator,
    # steps_per_epoch=100,
    epochs=50,
    validation_data=validation_generator,
    validation_steps=10)

# import matplotlib.pyplot as plt
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(1, len(acc) + 1)
with open('result_d5','w') as f:
    f.write("epoch acc val_acc loss val_loss\n")
    for e in epochs:
        str = "{} {} {} {} {}\n".format(e,acc[e-1],val_acc[e-1],loss[e-1],val_loss[e-1])
        f.write(str)