#!/usr/bin/env python
# -*- coding:utf-8 _*-

from __future__ import print_function

import os
import sys
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from xgboost import XGBRegressor
import matplotlib.pyplot as plt

analyPath = os.getcwd()
plt.rcParams["font.family"] = "Times New Roman"
plt.rcParams["font.size"] = 10.5


class Train_CV_SGBRT(object):

    def __init__(self):
        self.data_path = analyPath + "/data"

    def analy(self):

        # read data
        datafile = os.path.join(self.data_path, str(self.algorithm_name) + ".csv")
        data = pd.read_csv(datafile)
        configNum = data.shape[1] - 1
        X = data.iloc[0:, 0:configNum]
        y = data.iloc[0:, configNum]
        events_name = X.columns

        # Linear
        modelLinear = LinearRegression()
        modelLinear.fit(X, y)
        importanceLinear = modelLinear.coef_
        importanceabsLinear = abs(importanceLinear)
        sumLinear = sum(importanceLinear)
        sumabsLinear = sum(importanceabsLinear)
        for i in range(importanceLinear.size):
            importanceLinear[i] = importanceLinear[i] / sumLinear
            importanceabsLinear[i] = importanceabsLinear[i] / sumabsLinear

        # Linear_isolate
        importanceisolateLinear = []
        importanceisolateLinearnorm = []
        quadratic_featurizer = PolynomialFeatures(degree=2)
        Linear = LinearRegression()
        for i in events_name:
            Xi = quadratic_featurizer.fit_transform(X[[i]])
            Linear.fit(Xi, y)
            importanceisolateLinear.append(abs(Linear.score(Xi, y)))
        sumisolateLinear = sum(importanceisolateLinear)
        for i in range(configNum):
            importanceisolateLinearnorm.append(importanceisolateLinear[i] / sumisolateLinear)

        # Decision Tree
        modelDecisionTree = DecisionTreeRegressor()
        modelDecisionTree.fit(X, y)
        importanceDecisionTree = modelDecisionTree.feature_importances_

        # Random Forest
        modelRandomForest = RandomForestRegressor()
        modelRandomForest.fit(X, y)
        importanceRandomForest = modelRandomForest.feature_importances_

        # XGBoost
        modelXGB = XGBRegressor()
        modelXGB.fit(X, y)
        importanceXGB = modelXGB.feature_importances_

        # mean
        mean = []
        for i in range(configNum):
            mean.append(np.mean([importanceabsLinear[i], importanceisolateLinear[i], importanceDecisionTree[i], importanceRandomForest[i], importanceXGB[i]]))

        # importamce matrix
        importance = pd.DataFrame([importanceLinear, importanceabsLinear, importanceisolateLinear, importanceisolateLinearnorm, importanceDecisionTree, importanceRandomForest, importanceXGB, mean]).T
        importance.index = events_name
        importance.columns = ['Linear', 'Linear_abs', 'Linear_iso', 'Linear_iso_norm', 'DecisionTree', 'RandomForest', 'XGBoost', 'mean']

        # write csv file
        importance.to_csv(analyPath + '/result/importance/' + str(self.algorithm_name))

        # sort importance of events
        importance = importance.sort_values(by='mean', ascending=False)
        importance = importance.loc[importance[:]['mean'] > 0.01]
        plotNum = len(importance)

        # generate bar graph file
        x = np.arange(plotNum)
        total_width, n = 0.8, 5
        width = total_width / n
        plt.figure(figsize=(plotNum * 0.2 + 3, 4), frameon=False)
        plt.bar(x+0*width, importance['Linear_abs'], width=width, label='Linear_abs', hatch='///')
        plt.bar(x+1*width, importance['Linear_iso_norm'], width=width, label='Linear_iso', hatch='...')
        plt.bar(x+2*width, importance['DecisionTree'], width=width, tick_label=importance.index, label='DecisionTree')
        plt.bar(x+3*width, importance['RandomForest'], width=width, label='RandomForest', hatch='xxx')
        plt.bar(x+4*width, importance['XGBoost'], width=width, label='XGBoost', hatch='---')
        plt.xticks(rotation=90)
        plt.legend()
        plt.tight_layout()
        plt.savefig(analyPath + '/result/importance/' + str(self.algorithm_name) + '.pdf')

    def build(self, item):
        self.algorithm_name = item.replace(".csv", "")
        self.analy()

    def build_loop(self):
        path_list = os.listdir(self.data_path)
        for i in path_list:
            if i != ".gitignore":
                self.algorithm_name = i.replace(".csv", "")
                print(self.algorithm_name)
                self.analy()


if __name__ == '__main__':
    train_cv_sgbrt = Train_CV_SGBRT()
    train_cv_sgbrt.__init__()
    if len(sys.argv) > 1:
        train_cv_sgbrt.build(sys.argv[1])
    else:
        train_cv_sgbrt.build_loop()
