#coding=utf8
# Copyright (c) 2016 Tinydot. inc.
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

import time
from threading import Lock
import twisted.internet.reactor as reactor
import twisted.internet.threads as threads
import requests_twisted

session = requests_twisted.TwistedRequestsSession()
from .logger import get_logger, LOG

def seconds_to_hm(sec):

    ret = ""
    for t, u, us in [(3600*24, "day", "days"),
                     (3600, "hour", "hours"),
                     (60, "minute", "minutes"),
                     ]:
        m  = int(sec/t)
        txt = "%s %s "%(m, u if m == 1 else us)
        if m >0:
            ret += txt

    s = sec%60
    ret +=" %.3f %s"%(s, "seconds" if s > 1 else "second")

    return ret.strip()

def obj_id(_obj):
    return str(id(_obj))

def __make_defer_call__(f, *args, **kwargs):
    return threads.deferToThread(f, *args, **kwargs)

def __run_defer_fcs__():
    reactor.run()

def __stop_defer_fcs__():
    reactor.stop()

def force_reinit_reactor():
    reactor.__init__()

def on_gen_task_error(w, e):

    if w._run_stat != STAT_RUNNING:
        return

    LOG.debug("default Asynwork gen error handler: gentask fail: %s"%e)
    import traceback
    LOG.warn(traceback.format_exc())

    LOG.debug("try stop asywork")
    w._set_exception(e)
    w._try_stop()


STAT_RUNNING, STAT_STOPED, STAT_STOPING = "STAT_RUNNING", "STAT_STOPED", "STAT_STOPING"

class AsynWorker(object):

    def __init__(self,
                 task_gen,
                 task_process,
                 request_handler,
                 exception_handler,
                 gen_task_exception_handler=on_gen_task_error,

                 task_gen_args=(),
                 task_gen_kwargs={},

                 task_process_args=(),
                 task_process_kwargs={},

                 request_handler_args=(),
                 request_handler_kwargs={},

                 exception_handler_args=(),
                 exception_handler_kwargs={},

                 gen_task_exception_args= (),
                 gen_task_exception_kwargs = {},

                 pool_size = 10,
                 on_task_all_feed_callback=None,
                 ):

        self.__p_tasks_lock  = Lock()
        self.__p_tasks_count = 0


        self._async_pending_lock = Lock()
        self._async_pending_count = 0

        self._pending_lock = Lock()
        self._pending_tasks = set()
        self._pool_size = max(5, pool_size)

        self._task_gen = task_gen
        self._task_gen_args = task_gen_args
        self._task_gen_kwargs = task_gen_kwargs

        self._task_process = task_process
        self._task_process_args = task_process_args
        self._task_process_kwargs = task_process_kwargs

        self._request_handler = request_handler
        self._request_handler_args = request_handler_args
        self._request_handler_kwargs = request_handler_kwargs

        self._exception_handler = exception_handler
        self._exception_handler_args = exception_handler_args
        self._exception_handler_kwargs = exception_handler_kwargs

        self._gen_task_exception_handler = gen_task_exception_handler
        self._gen_task_exception_args = gen_task_exception_args
        self._gen_task_exception_kwargs = gen_task_exception_kwargs

        self._on_task_all_feed_callback = on_task_all_feed_callback
        self.next_task = iter(self._task_gen(*self._task_gen_args, **self._task_gen_kwargs))
        self.logger = get_logger(self.__class__.__name__)

        self._feed_task = True
        self._run_stat = STAT_STOPED
        self.__exception = None


    def __on_async_process_complete(self, _):
        self.__dec_async_pending_count(1)
        self.schedule()

    def __inc_async_pending_count(self, deta):
        with self._async_pending_lock:
            self._async_pending_count += deta

    def async_pending_count(self):
        return self._async_pending_count

    def __dec_async_pending_count(self, deta):
        with self._async_pending_lock:
            self._async_pending_count -= deta

    def __on_success__(self, ret_data, task):

        self.__complete_task__(task)
        self._request_handler(self, task, ret_data,
                              *self._request_handler_args,
                              **self._request_handler_kwargs)

    def __on_error__(self, ret_data, task):

        self.__complete_task__(task)
        self._exception_handler(self, task, ret_data,
                                *self._exception_handler_args,
                                **self._exception_handler_kwargs)

    def add_task(self, task):
        task_hash = obj_id(task)
        if task_hash in self._pending_tasks:
            self.logger.warn("Task(%s) is already pending"%str(task))
            return False

        self.__pending_task__(task)
        self.__inc_async_pending_count(1)

        df = __make_defer_call__(self._task_process, task, *self._task_process_args, **self._task_process_kwargs)
        df.addCallback(self.__on_success__, task)
        df.addErrback(self.__on_error__, task)
        df.addBoth(self.__on_async_process_complete)
        return True

    def __inc_ptask_count__(self):
        with self.__p_tasks_lock:
            self.__p_tasks_count += 1

    def processed_task_count(self):
        return self.__p_tasks_count

    def __pending_task__(self, task):

        task_hash = obj_id(task)
        with self._pending_lock:
            self._pending_tasks.add(task_hash)

    def __pending_task_count__(self):
        with self._pending_lock:
            return len(self._pending_tasks)

    def __complete_task__(self, task):

        task_hash = obj_id(task)
        with self._pending_lock:
            if task_hash in self._pending_tasks:
                self._pending_tasks.remove(task_hash)

        self.__inc_ptask_count__()
        self.schedule()

    def schedule(self):

        if self.__pending_task_count__() >= self._pool_size:
            return

        try:
            if self._feed_task:
                task = next(self.next_task)
            else:
                time.sleep(1)
                self.schedule()
                return

        except Exception as e:

            if isinstance(e, StopIteration):
                if self.__pending_task_count__() == 0 and self.async_pending_count() == 0:
                    self.logger.debug("No mini-task to process! Stop")
                    if callable(self._on_task_all_feed_callback):
                        self._on_task_all_feed_callback()

                    self._try_stop()
            else:
                if callable(self._gen_task_exception_handler):
                    self._gen_task_exception_handler(self, e,
                                                     *self._gen_task_exception_args,
                                                     **self._gen_task_exception_kwargs)
            return

        if self._feed_task:
            self.add_task(task)

        self.schedule()

    def stop_feed(self):
        self._feed_task = False
        self.logger.debug("stop feed task, wait %s task process complete"%self.async_pending_count())

    def _try_stop(self):
        if self._run_stat != STAT_RUNNING:
            return

        self.logger.debug("try stop")
        self._is_running = STAT_STOPING
        __make_defer_call__(self.stop)

    def stop(self):
        __stop_defer_fcs__()

    def start(self):

        start_time = time.time()

        self._run_stat = STAT_RUNNING
        self.schedule()

        self.logger.debug("start async worker")
        __run_defer_fcs__()
        self.logger.debug("processed mini-tasks: %s cost: %s"%(self.processed_task_count(),
                                              seconds_to_hm(time.time() - start_time)))

        self._run_stat = STAT_STOPED
        force_reinit_reactor()

        if self.__exception != None:
            self.logger.debug("exit wit exception: %s"%self.__exception )
            raise self.__exception

    def _set_exception(self, e):
        self.logger.debug("set exit exception: %s"%e)
        self.__exception = e