# coding: utf-8
# author: t_wolf
#   date: 2016-06-28

# /*
# 框架入口
# 加载变量及环境检查
#
#
# */

from web_utils import *


class EasyWeb:
    def __init__(self):
        pass

    @staticmethod
    def start_server():
        pass

    @staticmethod
    def start_frame():
        pass

    @staticmethod
    def start_frame_session():
        pass

    @staticmethod
    def start_apps():
        pass

    @staticmethod
    def process_init():
        run_mode = CONFIGS.FRAME.RUN_OPTIONS.MODE.upper()
        processes = CONFIGS.FRAME.RUN_OPTIONS.UWSGI.workers
        if run_mode == 'UWSGI':
            curr_time = time.time()
            process_list = []
            process_home = os.path.join(ROOT_PATH, 'framework/data/processes')
            for item in os.listdir(process_home):
                item_path = os.path.join(process_home, item)
                if os.path.isfile(item_path) and item.startswith(
                        'processes') and os.path.getctime(item_path) + 10 > curr_time:
                    process_list.append(item_path)

            process_num = len(process_list)
            if process_num == 0:
                FRAME_DEFINE.MAIN_PROCESS = True

            open(os.path.join(process_home, 'processes_%d' % process_num), 'w')
            if process_num + 1 == processes:
                [system_remove(_p) for _p in process_list]
            elif process_num + 1 > processes:
                raise RuntimeError('Wrong Start Status for Run Mode [UWSGI].')

        elif run_mode == 'DEFAULT':
            FRAME_DEFINE.MAIN_PROCESS = True
        else:
            raise ValueError('UnKnown Run Mode [%s] for EasyWeb Frame.' % run_mode)

    @staticmethod
    def reg_background_worker(app, workers):
        """
        启动后台定时任务
        :param app: 定时任务所属应用
        :param workers: 任务主体
        :return: None
        """
        logger.debug("App [%s] has background_task: %s" % (app, workers))
        for worker in workers:
            _worker = importlib.import_module(worker['module'])
            for _class in worker['class']:
                _backer = getattr(_worker, _class)()
                setattr(_backer, 'class_path', "%s.%s" % (worker['module'], _class))
                back_workers.update({stalker_ids.next(): _backer})

    @staticmethod
    def generate_background_threading():
        for _id, _backer in back_workers.items():
            s = getattr(_backer, 's')
            ms = getattr(_backer, 'ms')
            cron = getattr(_backer, 'cron')
            counts = getattr(_backer, 'counts')
            app = getattr(_backer, 'belong_app')
            class_path = getattr(_backer, 'class_path')

            work_type = ""
            work_schedule = {}

            if cron:
                work_type = " Type [Cron]"
                work_schedule.update({_id: {'type': 'cron', 'schedule': cron}})
            else:
                work_schedule.update({_id: {'type': 'comm', 'schedule': s * 1000 + ms}})

            if counts:
                work_schedule[_id].update({'counts': counts})

            logger.debug("In App [%s], Register an Background Worker%s: [%s]." % (app, work_type, class_path))

            back_workers_schedule.update(work_schedule)

        th = Thread(target=EasyWeb.task_threading)
        th.setDaemon(True)
        return th

    @staticmethod
    def task_threading():
        parent_conn, child_conn = Pipe()
        p = Process(target=EasyWeb.plan_workers_schedule, args=(child_conn,))
        p.start()

        while True:
            execute_id = 0
            try:
                execute_id = int(parent_conn.recv())
                if execute_id:
                    curr_task = back_workers[execute_id]
                else:
                    break
                Thread(target=lambda x: x.run(), args=(curr_task,)).start()

            except Exception, e:
                logger.error(
                    "Background Worker [%s] Exception: %s" % (
                        getattr(back_workers[execute_id], 'class_path'),
                        e
                    )
                )

        logger.debug("All Background Worker Ending, Background Threading Now Quit.")

    @staticmethod
    def plan_workers_schedule(conn):
        task_plan = []
        last_wait = 0
        task_schedule = back_workers_schedule

        def calc_task_delay(t_id):

            if t_id not in task_schedule:
                return

            if 'counts' in task_schedule[t_id]:
                task_schedule[t_id]['counts'] -= 1

            if task_schedule[t_id]['type'] == 'cron':
                if not isinstance(task_schedule[t_id]['schedule'], Crontab):
                    task_schedule[t_id]['schedule'] = Crontab(task_schedule[t_id]['schedule'])
                next_wait = task_schedule[t_id].delay_time()
                next_cron = True
            else:
                next_wait = task_schedule[t_id]['schedule'] / 1000
                next_cron = False

            if 'counts' in task_schedule[t_id]:
                if task_schedule[t_id]['counts'] <= 0:
                    del task_schedule[t_id]

            task_plan.append([t_id, next_wait, next_cron])

        for _id in task_schedule.keys():
            calc_task_delay(_id)

        while task_plan:
            if not len(task_plan):
                break

            task_plan = map(
                lambda x: (x[0], 0 if x[1] < 0 else x[1]),
                [(p, q - last_wait if o else q) for p, q, o in task_plan]
            )
            sorted(task_plan, key=lambda x: x[1])
            curr_turn = task_plan.pop(0)
            last_wait = curr_turn[1]

            calc_task_delay(curr_turn[0])

            time.sleep(curr_turn[1])
            conn.send(curr_turn[0])

        conn.send(0)  # send to end sub process

    @staticmethod
    def start_public_pool():

        if CONFIGS.FRAME.share_pool_enabled:

            logger.debug("Ready to Start Share Pool.")

            share_pool_type = CONFIGS.FRAME.share_pool_type.lower()

            if share_pool_type == 'redis':

                SHARE_SPACE.process = RedisSharePool(
                    DbUtil.redis_connector(
                        CONFIGS.FRAME.redis_options.host,
                        CONFIGS.FRAME.redis_options.port,
                        CONFIGS.FRAME.redis_options.db,
                    )
                )
            elif share_pool_type == 'shelf':
                SHARE_SPACE.process = ShelfSharePool(
                    shelve.open(os.path.join(ROOT_PATH, 'framework/data/shelf'))
                )

            elif share_pool_type == 'sqlite':
                SHARE_SPACE.process = DBSharePool(
                    web.database(dbn="sqlite", db=os.path.join(ROOT_PATH, 'framework/data/easy.db')),
                    "easy_web_share_pool"
                )

            elif share_pool_type == 'files':
                SHARE_SPACE.process = FileSharePool(
                    os.path.join(ROOT_PATH, 'framework/data/normal')
                )

            else:
                raise ValueError(
                    "UnKnown Type [%s], can not create Share Pool." % CONFIGS.FRAME.share_pool_enabled
                )

            logger.debug("Create Share [%s] Pool." % share_pool_type.capitalize())
