#coding=utf8
# Copyright (c) 2016 Tinydot. inc.
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

from threading import Lock
import multiprocessing as mtp
from _thread import start_new_thread
import time
import os
from .logger import get_logger, LOG
import pickle


def worker(work_id,
           task_queue,
           func_task_process,
           handler_task_cmp,
           handler_task_error,
           args_fc_tsk_process,
           kwargs_fc_task_process           
          ):
    
    LOG.debug("start worker: %s"%work_id)
    while True:
        tsk = pickle.loads(task_queue.get())
        if tsk.get("type") == "exit":
            LOG.debug("[%s] worker exit"%work_id)
            break
        try:
            ret = func_task_process(work_id, tsk.get("task"), *args_fc_tsk_process, **kwargs_fc_task_process)
            if callable(handler_task_cmp):
                handler_task_cmp(work_id, tsk, ret)

        except Exception as e:
            if callable(handler_task_error):
                handler_task_error(work_id, tsk, e)
       
        # loop back



class MultiPTask(object):

    def __init__(self,
                 func_task_gen,
                 func_task_process,
                 handler_task_cmp = None,
                 handler_task_error = None,
                 args_fc_tsk_gen = None,
                 kwargs_fc_task_gen = None,
                 args_fc_tsk_process = None,
                 kwargs_fc_task_process = None,
                 process_count = -1,
                 task_queue_size = 10,
                ):
        self.logger = get_logger(self.__class__.__name__)
        self.func_task_gen = func_task_gen
        self.func_task_process = func_task_process
        self.handler_task_cmp = handler_task_cmp
        self.handler_task_error = handler_task_error
        self.args_fc_tsk_gen = args_fc_tsk_gen or ()
        self.kwargs_fc_task_gen = kwargs_fc_task_gen or {}
        self.args_fc_tsk_process = args_fc_tsk_process or ()
        self.kwargs_fc_task_process = kwargs_fc_task_process or {}
        self.process_count = int(max(1, int(process_count) if process_count > 0 else mtp.cpu_count()/2))
        self.task_queue_size = max(1, task_queue_size)
        
        # attrs
        self.process_info = []
        self.run_stat = False

    def stop(self):
        self.run_stat = False

    def start(self):
        self.run_stat = True
        self.task_queue = mtp.Queue()
        self.logger.debug("process count: %s"%self.process_count)
        for i in range(self.process_count):
            p = mtp.Process(target=worker, args=(i, self.task_queue,
                                                 self.func_task_process,
                                                 self.handler_task_cmp,
                                                 self.handler_task_error,
                                                 self.args_fc_tsk_process,
                                                 self.kwargs_fc_task_process))
            self.process_info.append({"id": i, "process": p})
            p.start()

        # gen task
        for tsk in self.func_task_gen(*self.args_fc_tsk_gen, **self.kwargs_fc_task_gen):
            while self.task_queue.qsize() > self.task_queue_size:
                time.sleep(0.1)
            self.task_queue.put(pickle.dumps({"type":"task", "task": tsk}))
            if not self.run_stat:
                self.logger.debug("try stop all")
                break

        # exit
        alive_process = len(self.process_alive())
        while alive_process > 0:
            while self.task_queue.qsize() > self.task_queue_size:
                time.sleep(0.1)
            self.task_queue.put(pickle.dumps({"type":"exit"}))
            acount = len(self.process_alive())
            if acount != alive_process:
                self.logger.debug("alive process: %s"%acount)
            alive_process = acount

        # complete
        for p in self.process_info:
            p["process"].join()

        self.logger.debug("process complete")


    def process_alive(self):
        c = []
        for p in self.process_info:
            if p["process"].is_alive():
                c.append(p["id"])
        return c


