import argparse
from keras.callbacks import ModelCheckpoint
import numpy as np
import pickle
import random

from model import model
from utils import get_kdd_data, generate_batch

np.random.seed(2018)
np.random.RandomState(2018)
random.seed(2018)

# default args
DATASET = 'kddcup99'
SAMPLES = 50000
COLS = str(['srv_count','serror_rate','srv_serror_rate','rerror_rate','srv_rerror_rate',
            'same_srv_rate','diff_srv_rate','srv_diff_host_rate','dst_host_count','dst_host_srv_count',
            'dst_host_same_srv_rate','dst_host_diff_srv_rate','dst_host_same_src_port_rate',
            'dst_host_srv_diff_host_rate','dst_host_serror_rate','dst_host_srv_serror_rate',
            'dst_host_rerror_rate','dst_host_srv_rerror_rate','target'])
# VAE architecture
HIDDEN_LAYERS = 2
LATENT_DIM = 2
HIDDEN_DIM = [15,7]
# VAE training
EPOCHS = 20
BATCH_SIZE = 32
SAVE = True
SAVE_PATH = './models/'
PRINT_PROGRESS = False
STANDARDIZED = True

def train(model,X,args):
    """ Train VAE. """

    if args.standardized:
        mu = np.mean(X,axis=0)
        sigma = np.std(X,axis=0)
        # save mu and sigma
        with open(args.save_path + 'mu_sigma.pickle', 'wb') as f:
            pickle.dump([mu,sigma], f)
        X = (X - mu) / (sigma + 1e-10) # standardize input variables

    # set training arguments
    if args.print_progress:
        verbose = 1
    else:
        verbose = 0

    kwargs = {}
    kwargs['epochs'] = args.epochs
    kwargs['batch_size'] = args.batch_size
    kwargs['shuffle'] = True
    kwargs['validation_data'] = (X,None)
    kwargs['verbose'] = verbose

    if args.save: # create callback
        checkpointer = ModelCheckpoint(filepath=args.save_path + 'vae_weights.h5',verbose=0,
                                       save_best_only=True,save_weights_only=True)
        kwargs['callbacks'] = [checkpointer]
        
        # save model architecture
        with open(args.save_path + 'model.pickle', 'wb') as f:
            pickle.dump([X.shape[1],args.hidden_layers,args.latent_dim,args.hidden_dim],f)

    model.fit(X,**kwargs)

def run(args):
    """ Load data, generate training batch, initiate model and train VAE. """
    
    print('\nLoad dataset')
    if args.dataset=='kddcup99':
        keep_cols = args.keep_cols[1:-1].replace("'","").replace(" ","").split(",")
        data = get_kdd_data(keep_cols=keep_cols)
    else:
        raise ValueError('Only "kddcup99" dataset supported.')
    
    print('\nGenerate training batch')
    X, _ = generate_batch(data,args.samples,0.)
    
    print('\nInitiate outlier detector model')
    n_features = data.shape[1]-1 # nb of features
    vae = model(n_features,hidden_layers=args.hidden_layers,latent_dim=args.latent_dim,hidden_dim=args.hidden_dim)
    if args.print_progress:
        vae.summary()
    
    print('\nTrain outlier detector')
    train(vae,X,args)
    
if __name__ == '__main__':
    
    parser = argparse.ArgumentParser(description="Train VAE outlier detector.")
    parser.add_argument('--dataset',type=str,choices=DATASET,default=DATASET)
    parser.add_argument('--samples',type=int,default=SAMPLES)
    parser.add_argument('--keep_cols',type=str,default=COLS)
    parser.add_argument('--hidden_layers',type=int,default=HIDDEN_LAYERS)
    parser.add_argument('--latent_dim',type=int,default=LATENT_DIM)
    parser.add_argument('--hidden_dim',type=int,nargs='+',default=HIDDEN_DIM)
    parser.add_argument('--epochs',type=int,default=EPOCHS)
    parser.add_argument('--batch_size',type=int,default=BATCH_SIZE)
    parser.add_argument('--standardized', default=STANDARDIZED, action='store_false')
    parser.add_argument('--print_progress', default=PRINT_PROGRESS, action='store_true')
    parser.add_argument('--save', default=SAVE, action='store_false')
    parser.add_argument('--save_path',type=str,default=SAVE_PATH)
    args = parser.parse_args()

    run(args)