#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# # author : cypro666
# # date   : 2015.08.01
# # wrapper of Boosting methods in sklearn
import sys, json, time
import numpy as np
from threading import Thread
from sklearn import ensemble
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.naive_bayes import MultinomialNB

from magic3.utils import Timer
from magic3.filesystem import *
add_sys_path(grand_dir(__file__))

from skt.base import MethodBase
from skt.utils import _normalize, make_options


class AdaBoostClassifier(MethodBase):

    def __init__(self, parameters={}):
        super().__init__(parameters)

    @property
    def name(self): return 'AdaBoost'

    def output(self, cls):
        self.Log('output')
        super().output()
        extra = {}

        try:    extra["estimators"] = [str(name).split('\n') for name in cls.estimators_],
        except: pass

        try:    extra["classes"] = [int(i) for i in cls.classes_],
        except: pass

        try:    extra["n_classes_"] = int(cls.n_classes_),
        except: pass

        try:    extra["feature_importances"] = [float(i) for i in cls.feature_importances_],
        except: pass

        try:    extra["estimator_weights"] = [float(i) for i in cls.estimator_weights_],
        except: pass

        try:    extra["estimator_errors"] = [float(i) for i in cls.estimator_errors_]
        except: pass

        fn = self.json_name()
        json.dump(extra, open(fn, 'w'), indent=4)

    def execute(self):
        self.read_input()
        self._normalize(False, self._param['normalize'])

        if self._param['learning_rate'] > 1.0 or self._param['learning_rate'] < 0.0:
            self._param['learning_rate'] = 1.0
        if self._param['n_estimators'] not in set(range(1, 100)):
            self._param['n_estimators'] = 10

        self.save_parameters()

        if self._param['estimator'] == 'bayes':
            self._param['estimator'] = MultinomialNB(fit_prior=True)
        elif self._param['estimator'] == 'dtree':
            self._param['estimator'] = DecisionTreeClassifier(max_features='log2')

        cls = ensemble.AdaBoostClassifier(base_estimator=self._param['estimator'],
                                          n_estimators=self._param['n_estimators'],
                                          learning_rate=self._param['learning_rate'],
                                          algorithm='SAMME.R',
                                          random_state=True)

        cls.fit(self._train, self._label)

        self._results = cls.predict(self._testing)
        self.output(cls)

    def run(self, timeout):
        ''' start to run '''
        t = Thread(target=self.execute)
        t.start()
        t.join(timeout)

        if t.is_alive():
            self.Log('timeout!')

        self.Log('exit')


class AdaBoostRegressor(MethodBase):
    ''' interface of AdaBoost in sklearn '''

    def __init__(self, parameters={}):
        super().__init__(parameters)

    @property
    def name(self): return 'AdaBoost'

    def output(self, cls):
        ''' write extra data to json file '''
        self.Log('output')
        super().output()
        extra = {}

        try:    extra["estimators"] = [str(name).split('\n') for name in cls.estimators_],
        except: pass

        try:    extra["classes"] = [int(i) for i in cls.classes_],
        except: pass

        try:    extra["n_classes_"] = int(cls.n_classes_),
        except: pass

        try:    extra["feature_importances"] = [float(i) for i in cls.feature_importances_],
        except: pass

        try:    extra["estimator_weights"] = [float(i) for i in cls.estimator_weights_],
        except: pass

        try:    extra["estimator_errors"] = [float(i) for i in cls.estimator_errors_]
        except: pass

        fn = self.json_name()
        json.dump(extra, open(fn, 'w'), indent=4)

    def execute(self):
        ''' fix parameters and run algorithm '''
        self.read_input()
        self._normalize(False, self._param['normalize'])

        if self._param['learning_rate'] > 1.0 or self._param['learning_rate'] < 0.0:
            self._param['learning_rate'] = 1.0
        if self._param['n_estimators'] not in set(range(1, 100)):
            self._param['n_estimators'] = 10

        self.save_parameters()

        if self._param['estimator'] == 'bayes':
            self._param['estimator'] = MultinomialNB(fit_prior=True)
        elif self._param['estimator'] == 'dtree':
            self._param['estimator'] = DecisionTreeRegressor(max_features='log2')

        cls = ensemble.AdaBoostRegressor(base_estimator=self._param['estimator'],
                                         n_estimators=self._param['n_estimators'],
                                         learning_rate=self._param['learning_rate'],
                                         loss=self._param['losser'],
                                         random_state=True)

        cls.fit(self._train, self._label)
        self._results = cls.predict(self._testing)
        self.output(cls)

    def run(self, timeout):
        ''' start to run '''
        t = Thread(target=self.execute)
        t.start()
        t.join(timeout)

        if t.is_alive():
            self.Log('timeout!')

        self.Log('exit')


if __name__ == '__main__':
    opts = [('train_file', 'str', []),
            ('label_file', 'str', []),
            ('testing_file', 'str', []),
            ('results_file', 'str', []),
            ('log_file', 'str', []),
            ('normalize', 'choice', ['0', '1', '2']),
            ('target', 'choice', ['classify', 'regression']),
            ('losser', 'choice', ['linear', 'square', 'exponential']),
            ('estimator', 'choice', ['dtree', 'bayes']),
            ('learning_rate', 'float', []),
            ('n_estimators', 'int', [])]

    parameters = make_options(opts)
    timer = Timer()

    if parameters['target'] == 'classify':
        AdaBoostClassifier(parameters).run(1000)
    elif parameters['target'] == 'regression':
        AdaBoostRegressor(parameters).run(1000)
    else:
        raise

    if __debug__: print('elapsed:', timer)



