import json
import os
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten, Reshape, Layer
from Xg_Utils.params_run_submit import *
import numpy as np
from sklearn.metrics import roc_auc_score

def nn_weights_optimizer(nb_features):
    nn_model = Sequential()
    nn_model.add(Dense(1,input_shape=(nb_features,),activation='sigmoid'))
    return nn_model

def optimize_weights_model_based(X,y,val_X,val_y):
    experiment_name = 'optimize_weights_model_based'
    experiment_path = os.path.join('Xg_metadata', experiment_name).replace('\\', '/')
    #os.mkdir(experiment_path)

    test_uid, test_x = get_test_data(
        file_path='/media/dell/cb552bf1-c649-4cca-8aca-3c24afca817b/dell/wxm/Data/DCP2P/test_x.csv')
    dtest = xgb.DMatrix(test_x)
    dval = xgb.DMatrix(val_X, label=val_y)
    dtrain = xgb.DMatrix(X,label=y)

    nn_params = {
        'optimizer':'adam',
        'loss':'mse',
        'nb_epoch':50,
        'shuffle':False,
    }

    name = 'bagging_forward_selection_of_learners'
    folder_path = os.path.join('Xg_metadata', name).replace('\\', '/')
    for file in os.listdir(folder_path):
        if file.endswith('json'):
            if file != '4_meta_data.json':
                continue
            file_path = folder_path + '/' +file
            f = open(file_path)
            json_obj = json.load(f)
            params_list = json_obj['individual_model_performance']

            preds = []
            preds_test = []
            preds_train = []
            for p in range(len(params_list)):
                params = params_list[p]['params']
                run = run_XGBoost(X,y,val_X,val_y,params=params,rounds=1000)

                pred = run.predict(dval)
                pred_test = run.predict(dtest)
                pred_train = run.predict(dtrain)

                preds_train.append(pred_train)
                preds_test.append(pred_test)
                preds.append(pred)

            nb_features = len(preds)
            preds = np.asarray(preds)
            preds_test = np.asarray(preds_test)
            preds_train = np.asarray(preds_train)

            preds = preds.transpose()
            preds_test = preds_test.transpose()
            preds_train = preds_train.transpose()
            print preds.shape
            print preds_test.shape
            print preds_train.shape

            nn_model = nn_weights_optimizer(nb_features)
            nn_model.compile(optimizer=nn_params['optimizer'],loss=nn_params['loss'])
            nn_model.fit(X=preds_train,y=np.asarray(y),nb_epoch=nn_params['nb_epoch'],shuffle=nn_params['shuffle'])

            nn_test_pred = nn_model.predict(preds_test)
            nn_test_pred = np.reshape(nn_test_pred,(len(nn_test_pred,)))

            nn_train_pred = nn_model.predict(preds_train)
            nn_train_pred = np.reshape(nn_train_pred,(len(nn_train_pred,)))

            nn_pred = np.reshape(nn_model.predict(preds),(len(nn_model.predict(preds),)))
            nn_val_AUC = roc_auc_score(np.asarray(val_y),nn_pred)
            nn_train_AUC = roc_auc_score(np.asarray(y),nn_train_pred)
            json_obj_op = {
                "bagging_performance":json_obj,
                'nn_params':nn_params,
                'optimize_val_AUC':nn_val_AUC,
                'optimize_train_AUC':nn_train_AUC
            }

            f = open(experiment_path + '/' + file[0:2].replace('_','') + '_meta_data.json', 'wb')
            meta_json = json.dumps(json_obj_op, default=lambda o: o.__dict__, indent=4)
            f.write(meta_json)
            f.close()

            weights_path = experiment_path + '/' + file[0:2].replace('_','') + 'nn_weights'
            nn_model.save_weights(weights_path,overwrite=True)

            test_result = pd.DataFrame(columns=['uid', 'score'])
            test_result.uid = test_uid
            test_result.score = nn_test_pred
            test_result.to_csv(experiment_path + '/' + file[0:2].replace('_','') + '_result.csv', index=None, encoding='utf - 8')


def optimize_weights_ensemble_based(X,y,val_X,val_y):
    #TODO: seems there are some troubles in bagging_ensemble_preds_test
    experiment_name = 'optimize_weights_ensemble_based'
    experiment_path = os.path.join('Xg_metadata', experiment_name).replace('\\', '/')
    #os.mkdir(experiment_path)

    test_uid, test_x = get_test_data(
        file_path='/media/dell/cb552bf1-c649-4cca-8aca-3c24afca817b/dell/wxm/Data/DCP2P/test_x.csv')
    dtest = xgb.DMatrix(test_x)
    dval = xgb.DMatrix(val_X, label=val_y)
    dtrain = xgb.DMatrix(X,label=y)

    nn_params = {
        'optimizer':'adam',
        'loss':'mse',
        'nb_epoch':10,
        'shuffle':False
    }

    name = 'bagging_forward_selection_of_learners'
    folder_path = os.path.join('Xg_metadata', name).replace('\\', '/')
    preds_ensemble = []
    preds_test_ensemble = []
    preds_train_ensemble =[]
    flag = 0
    for file in os.listdir(folder_path):
        if file.endswith('json'):
            file_path = folder_path + '/' +file
            f = open(file_path)
            json_obj = json.load(f)
            params_list = json_obj['individual_model_performance']

            preds = 0
            preds_test = 0
            preds_train = 0
            for p in range(len(params_list)):
                params = params_list[p]['params']
                run = run_XGBoost(X,y,val_X,val_y,params=params,rounds=10)

                pred = run.predict(dval)
                pred_test = run.predict(dtest)
                pred_train = run.predict(dtrain)

                preds_train +=pred_train
                preds_test += pred_test
                preds += pred
            preds = preds/len(preds)
            preds_test = preds_test/len(preds_test)
            preds_train = preds_train/len(preds_train)

            preds_test_ensemble.append(preds_test)
            preds_train_ensemble.append(preds_train)
            preds_ensemble.append(preds)
            flag += 1
            if flag >= 3:
                break

    nb_features = len(preds_train_ensemble)
    preds_ensemble = np.asarray(preds_ensemble)
    preds_test_ensemble = np.asarray(preds_test_ensemble)
    preds_train_ensemble = np.asarray(preds_train_ensemble)

    print 'bagging all ensemble'
    bagging_all_ensemble_pred = 0
    bagging_all_ensemble_pred_test = 0
    for e in range(len(preds_ensemble)):
        bagging_all_ensemble_pred += preds_ensemble[e]
        bagging_all_ensemble_pred_test += preds_test_ensemble[e]
    bagging_all_ensemble_pred = bagging_all_ensemble_pred/len(preds_ensemble)
    bagging_all_ensemble_pred_test = bagging_all_ensemble_pred_test/len(preds_test_ensemble)

    bagging_all_ensemble_AUC = roc_auc_score(np.asarray(val_y),bagging_all_ensemble_pred)

    test_result = pd.DataFrame(columns=['uid', 'score'])
    test_result.uid = test_uid
    test_result.score = bagging_all_ensemble_pred_test
    test_result.to_csv(experiment_path + '/' + 'bagging_result.csv', index=None, encoding='utf - 8')

    preds_ensemble = preds_ensemble.transpose()
    preds_test_ensemble = preds_test_ensemble.transpose()
    preds_train_ensemble = preds_train_ensemble.transpose()

    nn_model = nn_weights_optimizer(nb_features)
    nn_model.compile(optimizer=nn_params['optimizer'],loss=nn_params['loss'])
    nn_model.fit(X=preds_train_ensemble,y=np.asarray(y),nb_epoch=nn_params['nb_epoch'],shuffle=nn_params['shuffle'])

    optimize_all_ensemble_pred = nn_model.predict(preds_ensemble)
    optimize_all_ensemble_pred_train = nn_model.predict(preds_train_ensemble)
    optimize_all_ensemble_pred_test = nn_model.predict(preds_test_ensemble)

    test_result = pd.DataFrame(columns=['uid', 'score'])
    test_result.uid = test_uid
    test_result.score = optimize_all_ensemble_pred_test
    test_result.to_csv(experiment_path + '/' + 'optimize_result.csv', index=None, encoding='utf - 8')

    optimize_all_ensemble_AUC = roc_auc_score(np.asarray(val_y),optimize_all_ensemble_pred)
    optimize_all_ensemble_AUC_train = roc_auc_score(np.asarray(y),optimize_all_ensemble_pred_train)

    json_obj_op = {
        'nn_params':nn_params,
        'bagging_ensembel_val_AUC':bagging_all_ensemble_AUC,
        'optimize_ensemble_val_AUC':optimize_all_ensemble_AUC,
        'optimize_ensemble_train_AUC':optimize_all_ensemble_AUC_train
    }
    f = open(experiment_path + '/' + 'meta_data.json', 'wb')
    meta_json = json.dumps(json_obj_op, default=lambda o: o.__dict__, indent=4)
    f.write(meta_json)
    f.close()