import os,sys
sys.path.append('../')
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import warnings
warnings.filterwarnings("ignore")
import time
import pickle

import tensorflow as tf 
from tensorflow import keras as tk
from tensorflow.keras import layers
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, Conv1D, BatchNormalization
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam, RMSprop,SGD
from tensorflow.keras.utils import to_categorical
from tensorflow.keras import backend as K
# from one_cycle_lr import OneCycleLR
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.models import Sequential
from sklearn.preprocessing import StandardScaler
import random
import h5py
import numpy as np
from ascon_helper import *
from ga_helper import *

gpus = tf.config.list_physical_devices('GPU')
if len(gpus) != 0:
    tf.config.experimental.set_memory_growth(gpus[0],True)

class partial_acc(tk.metrics.Metric):
    def __init__(self,name="pac",**kwargs):
        super(partial_acc,self).__init__(name=name,**kwargs)
        self.corr = self.add_weight(name="correct",initializer="zeros")
        self.count = self.add_weight(name="count",initializer="zeros")
    def update_state(self,y_true,y_pred,sample_weight=None):
        y_true = tf.cast(y_true,dtype=tf.float32)
        y_pred = tf.cast(y_pred,dtype=tf.float32)
        y_pred_bin = tf.where(y_pred>0.5,1.,0.)
        y = tf.where(y_pred_bin==y_true,1.0,0.0)
        correct = tf.compat.v1.count_nonzero(y,dtype=tf.int32)
        correct = tf.cast(correct,dtype=tf.float32)
        total = tf.cast(tf.size(y),dtype=tf.float32)
        self.corr.assign_add(correct)
        self.count.assign_add(total)
    def result(self):
        return self.corr/self.count
    def reset_states(self):
        self.corr.assign(0.0)
        self.count.assign(0.0)
  
def mlp_random(classes, number_of_samples, activation, neurons, layers, learning_rate):
    model = Sequential()
    model.add(BatchNormalization(input_shape=(number_of_samples,)))
    for l_i in range(layers):
        model.add(Dense(neurons, activation=activation, kernel_initializer='he_uniform', bias_initializer='zeros'))
    model.add(Dense(classes, activation='softmax'))
    # model.add(Dense(classes))
    model.summary()
    optimizer = RMSprop(learning_rate=learning_rate)#categorical_crossentropy
    model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])

    return model

def mlp_random_r(classes, number_of_samples, activation, neurons, layers, learning_rate):
    model = Sequential()
    model.add(BatchNormalization(input_shape=(number_of_samples,)))
    for l_i in range(layers):
        model.add(Dense(neurons, activation=activation, kernel_initializer='he_uniform', bias_initializer='zeros'))
    # model.add(Dense(classes, activation='softmax'))
    model.add(Dense(classes))
    model.summary()
    optimizer = RMSprop(learning_rate=learning_rate)#categorical_crossentropy
    model.compile(loss='mse', optimizer=optimizer, metrics=['mae'])

    return model

def run_mlp(X_profiling, Y_profiling, X_validation, Y_validation,classes):
    mini_batch = 700 #random.randrange(500, 1000, 100)
    learning_rate = 0.000180006094109679
    activation = 'tanh'
    layers = 2
    neurons = 100

    model = mlp_random(classes, len(X_profiling[0]), activation, neurons, layers, learning_rate)
    es = EarlyStopping(monitor='val_accuracy',mode='max',patience=30,restore_best_weights=True)
    his = model.fit(
        x=X_profiling,
        y=Y_profiling,
        batch_size=mini_batch,
        verbose=2,
        epochs=200,
        shuffle=True,
        validation_data=(X_validation, Y_validation),
        callbacks=[es])

    prediction = model.predict(X_validation)
    # prediction = prediction.reshape(-1)
    # Y_validation = Y_validation.reshape(-1)
    # corr = np.corrcoef(Y_validation,prediction)
    K.clear_session()
    return prediction

def run_mlp_r(X_profiling, Y_profiling, X_validation, Y_validation,classes):
    mini_batch = 800
    learning_rate = 0.0002769973772928945
    activation = 'elu'
    layers = 3
    neurons = 150

    model = mlp_random_r(classes, len(X_profiling[0]), activation, neurons, layers, learning_rate)
    es = EarlyStopping(monitor='val_mae',mode='min',patience=20,restore_best_weights=True)
    his = model.fit(
        x=X_profiling,
        y=Y_profiling,
        batch_size=mini_batch,
        verbose=2,
        epochs=200,
        shuffle=True,
        validation_data=(X_validation, Y_validation),
        callbacks=[es])

    prediction = model.predict(X_validation)
    K.clear_session()
    return prediction


def cnn_random(classes, number_of_samples, activation, neurons, conv_layers, filters, kernel_size, stride, layers, learning_rate):
    model = Sequential()
    for layer_index in range(conv_layers):
        if layer_index == 0:
            model.add(Conv1D(filters=filters, kernel_size=kernel_size, strides=stride, activation='relu', padding='valid',input_shape=(number_of_samples, 1)))
        else:
            model.add(Conv1D(filters=filters, kernel_size=kernel_size, strides=stride, activation='relu', padding='valid'))

    model.add(Flatten())
    for layer_index in range(layers):
        model.add(Dense(neurons, activation=activation, kernel_initializer='random_uniform', bias_initializer='zeros'))

    model.add(Dense(classes, activation='softmax'))#classify
    # model.add(Dense(classes))#regression
    model.summary()
    # pac = partial_acc()
    # model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=learning_rate), metrics=['accuracy'])
    optimizer = Adam(learning_rate=learning_rate)#categorical_crossentropy binary_crossentropy
    model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])

    return model


def cnn_random_r(classes, number_of_samples, activation, neurons, conv_layers, filters, kernel_size, stride, layers, learning_rate):
    model = Sequential()
    for layer_index in range(conv_layers):
        if layer_index == 0:
            model.add(Conv1D(filters=filters, kernel_size=kernel_size, strides=stride, activation='relu', padding='valid',input_shape=(number_of_samples, 1)))
        else:
            model.add(Conv1D(filters=filters, kernel_size=kernel_size, strides=stride, activation='relu', padding='valid'))

    model.add(Flatten())
    for layer_index in range(layers):
        model.add(Dense(neurons, activation=activation, kernel_initializer='random_uniform', bias_initializer='zeros'))

    # model.add(Dense(classes, activation='softmax'))#classify
    model.add(Dense(classes))#regression
    model.summary()
    # pac = partial_acc()
    # model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=learning_rate), metrics=['accuracy'])
    optimizer = Adam(learning_rate=learning_rate)#categorical_crossentropy binary_crossentropy
    model.compile(loss='mse', optimizer=optimizer, metrics=['mae'])

    return model

def run_cnn(X_profiling, Y_profiling, X_validation, Y_validation, classes):
    X_profiling = X_profiling.reshape((X_profiling.shape[0], X_profiling.shape[1], 1))
    X_validation = X_validation.reshape((X_validation.shape[0], X_validation.shape[1], 1))
    # X_attack = X_attack.reshape((X_attack.shape[0], X_attack.shape[1], 1))

    mini_batch = 500 #random.randrange(500, 1000, 100)
    learning_rate = 0.00021731202151463133 #random.uniform(0.0001, 0.001)
    activation = "relu" #['relu', 'tanh', 'elu', 'selu'][random.randint(0, 3)]
    dense_layers = 5 #random.randrange(2, 8, 1)
    neurons = 200 #random.randrange(50, 200, 20)
    conv_layers = 2 #random.randrange(2, 6, 1)
    filters = 28 #random.randrange(8, 32, 4)
    kernel_size = 16 #random.randrange(2, 24, 2)
    stride = 5 #random.randrange(1, 10, 5)

    
    model = cnn_random(classes, len(X_profiling[0]), activation, neurons, conv_layers, filters,kernel_size, stride, dense_layers, learning_rate)
    es = EarlyStopping(monitor='val_accuracy',mode='max',patience=20,restore_best_weights=True)
    his=model.fit(
        x=X_profiling,
        y=Y_profiling,
        batch_size=mini_batch,
        verbose=2,
        epochs=200,
        shuffle=True,
        validation_data=(X_validation, Y_validation),
        callbacks=[es])
    prediction = model.predict(X_validation)
    # prediction = prediction.reshape(-1)
    # Y_validation = Y_validation.reshape(-1)
    # corr = np.corrcoef(Y_validation,prediction)
    K.clear_session()
    return prediction

def run_cnn_r(X_profiling, Y_profiling, X_validation, Y_validation, classes):
    X_profiling = X_profiling.reshape((X_profiling.shape[0], X_profiling.shape[1], 1))
    X_validation = X_validation.reshape((X_validation.shape[0], X_validation.shape[1], 1))
    # X_attack = X_attack.reshape((X_attack.shape[0], X_attack.shape[1], 1))

    mini_batch = 500
    learning_rate = 0.000277000557246052
    activation = 'elu'
    dense_layers = 7
    neurons = 400
    conv_layers = 2
    filters = 16
    kernel_size = 18
    stride = 5

    
    model = cnn_random_r(classes, len(X_profiling[0]), activation, neurons, conv_layers, filters,kernel_size, stride, dense_layers, learning_rate)
    es = EarlyStopping(monitor='val_mae',mode='min',patience=30,restore_best_weights=True)
    his=model.fit(
        x=X_profiling,
        y=Y_profiling,
        batch_size=mini_batch,
        verbose=2,
        epochs=300,
        shuffle=True,
        validation_data=(X_validation, Y_validation),
        callbacks=[es])
    prediction = model.predict(X_validation)
    K.clear_session()
    return prediction

# % MLP 65 [0.27070000767707825, 700, 0.000180006094109679, "tanh", 2, 100]
# % MLP 2 [0.8870000243186951, 900, 0.0003783336090725856, "elu", 2, 100]
# % MLP r [1.1298818588256836, 800, 0.0002769973772928945, "elu", 3, 150]

# % CNN r [1.1940339803695679, 500, 0.000277000557246052, "elu", 7, 400, 2, 16, 18, 5]
# % CNN 65 [0.26600000262260437, 500, 0.00021731202151463133, "relu", 5, 200, 2, 28, 16, 5]
# % CNN 2[0.8776000142097473, 500, 0.00035872323990838794, "relu", 3, 250, 2, 28, 12, 5]


train_num_traces = 50000
valid_num_traces = 10000
# attack_num_traces = 80000
num_classes = 65
#datasets
ss = StandardScaler()
with h5py.File('../ascon_cw_unprotected.h5','r') as f:
    profiling_traces = f['random_keys']['traces'][:train_num_traces]
    print(profiling_traces.shape)
    testing_traces = f['fixed_keys']['traces'][train_num_traces:train_num_traces+valid_num_traces]
    profiling_nonce = f['random_keys']['metadata']['nonce'][:train_num_traces]
    profiling_nonce0 = profiling_nonce[:,0:8]
    profiling_nonce1 = profiling_nonce[:,8:16]

    testing_nonce = f['fixed_keys']['metadata']['nonce'][train_num_traces:train_num_traces+valid_num_traces]
    testing_nonce0 = testing_nonce[:,0:8]
    testing_nonce1 = testing_nonce[:,8:16]

    profiling_key = f['random_keys']['metadata']['key'][:train_num_traces]
    profiling_key_c = profiling_key[:,0:8]

    testing_key = f['fixed_keys']['metadata']['key'][train_num_traces:train_num_traces+valid_num_traces]
    testing_key_c = testing_key[:,0:8]

    profiling_labels = computer_Sbox_labels_for_randomkey(keys=profiling_key_c,nonce0s=profiling_nonce0,nonce1s=profiling_nonce1,n_bit=64,dec=False)
    profiling_labels_hw = np.count_nonzero(profiling_labels,axis=1)
    # profiling_labels_hw = np.where(profiling_labels_hw>=32,1,0)
    # profiling_labels_hw = profiling_labels_hw.astype(np.float32)
    # profiling_labels_hw = profiling_labels[:,2]
    profiling_labels_hw = to_categorical(profiling_labels_hw,num_classes=num_classes)

    testing_labels = computer_Sbox_labels_for_randomkey(keys=testing_key_c,nonce0s=testing_nonce0,nonce1s=testing_nonce1,n_bit=64,dec=False)
    testing_labels_hw = np.count_nonzero(testing_labels,axis=1)
    # testing_labels_hw = np.where(testing_labels_hw>=32,1,0)
    # testing_labels_hw = testing_labels_hw.astype(np.float32)
    # testing_labels_hw = testing_labels[:,2]
    testing_labels_hw = to_categorical(testing_labels_hw,num_classes=num_classes)

# num_models = 10
# predict = np.zeros((num_models,valid_num_traces,num_classes))
# accu    = []
# ems_predictions = np.zeros((num_models,valid_num_traces,num_classes),dtype=np.float32)
# ems_predictions = np.zeros((num_models,valid_num_traces,64),dtype=np.float32)
# for i in range(num_models):
train_indices = np.random.choice(np.arange(len(profiling_traces)),size=len(profiling_traces),replace=True)
    # valid_indices = np.random.choice(np.arange(len(testing_traces)),size=len(testing_traces),replace=True)
X_train = profiling_traces[train_indices]
Y_train = profiling_labels_hw[train_indices]
    # X_valid = testing_traces[valid_indices]
    # Y_valid = testing_labels_hw[valid_indices]
model_record = run_mlp(X_train,Y_train,testing_traces,testing_labels_hw,num_classes)
    # model_record = run_cnn(X_train,Y_train,testing_traces,testing_labels_hw,num_classes)


np.save('prediction_cnn_2/predict.npy',model_record)

# with open('./check_point_mask/model_params.pkl','wb') as f:
    # mode_params = pickle.dump(model_params,f)



# with open('./check_point_mask/model_params_cnn.pkl','wb') as f:
    # mode_params = pickle.dump(model_params,f)

# print(mode_params)
# model0 = mlp_random(classes=1,number_of_samples=len(profiling_traces[0]),activation=mode_params[0]['activation'],neurons=mode_params[0]['neurons'],layers=mode_params[0]['layers'],learning_rate=mode_params[0]['learning_rate'])


#  callback = OneCycleLR(len(profiling_traces),64,5e-3,end_percentage=0.2,scale_percentage=0.1,maximum_momentum=None,minimum_momentum=None,verbose=True)
# checkpoint_path = "./check_point_mask/cpd-{epoch:04d}.weights.h5"
# checkpoint_dir = os.path.dirname(checkpoint_path)
# cp_callback = tf.keras.callbacks.ModelCheckpoint(
#     filepath=checkpoint_path, 
#     verbose=1, 
#     monitor='val_loss',
#     mode='min',
#     save_best_only=True,
#     save_weights_only=True)

# es = EarlyStopping(monitor='loss',patience=30)
# metric = ['mae']
# model = get_cnn_best(length=len(profiling_traces[0]),loss='mse',metrics=metric,n_class=1)
# history = model.fit(profiling_traces,profiling_labels_hw,epochs=400,batch_size=64,verbose=1,validation_data=(testing_traces,testing_labels_hw),callbacks=[cp_callback])

