#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# # author : cypro666
# # date   : 2015.08.01
# # wrapper of KNN method in sklearn
import sys, json, time
import numpy as np
from threading import Thread
from sklearn import linear_model

from magic3.utils import Timer
from magic3.filesystem import *
add_sys_path(grand_dir(__file__))

from skt.base import MethodBase
from skt.utils import _normalize, make_options


class Lasso(MethodBase):
    def __init__(self, parameters={}):
        super().__init__(parameters)

    @property
    def name(self): return 'Lasso'

    def output(self, lgr):
        self.Log('output')
        super().output()

        extra = {
            "coef" : [float(i) for i in lgr.coef_],
            "intercept"  : float(lgr.intercept_),
            "n_iter" : int(lgr.n_iter_)
        }

        fn = self.json_name()
        json.dump(extra, open(fn, 'w'), indent=4)


    def execute(self):
        self.read_input()
        self._normalize(False, self._param['normalize'])

        if self._param['tol'] < 1.0E-5:
            self._param['tol'] = 1.0E-5

        if self._param['alpha'] < 1.0E-5:
            self._param['alpha'] = 1.0E-5

        if self._param['max_iter'] < 1500:
            self._param['max_iter'] = 1500

        self.save_parameters()

        lgr = linear_model.Lasso(copy_X=False,
                                 fit_intercept=True,
                                 alpha=self._param['alpha'],
                                 max_iter=self._param['max_iter'],
                                 tol=self._param['tol'],
                                 normalize=False,
                                 positive=False,
                                 random_state=True,
                                 selection='cyclic',
                                 warm_start=1)

        lgr.fit(self._train, self._label)

        self._results = lgr.predict(self._testing)
        self.output(lgr)

    def run(self, timeout):
        t = Thread(target=self.execute)
        t.start()
        t.join(timeout)
        if t.is_alive():
            self.Log('timeout!')
        self.Log('exit')


class Lars(MethodBase):
    def __init__(self, parameters={}):
        super().__init__(parameters)

    @property
    def name(self): return 'Lars'

    def output(self, lgr):
        self.Log('output')
        super().output()

        extra = {
            "alphas" :[float(i) for i in lgr.alphas_],
            "coef" : [float(i) for i in lgr.coef_],
            "intercept"  : float(lgr.intercept_),
            "n_iter" : int(lgr.n_iter_)
        }

        fn = self.json_name()
        json.dump(extra, open(fn, 'w'), indent=4)

    def execute(self):
        self.read_input()
        self._normalize(False, self._param['normalize'])

        if self._param['eps'] < 1.0E-6:
            self._param['eps'] = 1.0E-6

        if self._param['nonzero_coefs'] < 2:
            self._param['nonzero_coefs'] = 2

        self.save_parameters()

        lgr = linear_model.Lars(copy_X=False,
                                eps=self._param['eps'],
                                n_nonzero_coefs=self._param['nonzero_coefs'],
                                normalize=False,
                                fit_intercept=True,
                                fit_path=True,
                                verbose=False)

        lgr.fit(self._train, self._label)

        self._results = lgr.predict(self._testing)
        self.output(lgr)

    def run(self, timeout):
        t = Thread(target=self.execute)
        t.start()
        t.join(timeout)
        if t.is_alive():
            self.Log('timeout!')
        self.Log('exit')


if __name__ == '__main__':
    opts = [('train_file', 'str', []),
            ('label_file', 'str', []),
            ('testing_file', 'str', []),
            ('results_file', 'str', []),
            ('log_file', 'str', []),
            ('schema', 'choice', ['lasso', 'lars']),
            ('normalize', 'choice', ['0', '1', '2']),
            ('alpha', 'float', []),
            ('tol', 'float', []),
            ('eps', 'float', []),  # using for Lars only
            ('nonzero_coefs', 'int', []),  # using for Lars only
            ('max_iter', 'int', [])]

    LogicRegress = None

    parameters = make_options(opts)

    if parameters['schema'].lower() == 'lasso':
        LogicRegress = Lasso
    elif parameters['schema'].lower() == 'lars':
        LogicRegress = Lars
    else:
        print('schema should be Lasso or Lars')
        sys.exit()

    timer = Timer()
    LogicRegress(parameters).run(3600)

    if __debug__:
        print('elapsed:', timer)

