#!/usr/bin/env python

# encoding: utf-8

'''
 * Create File runner
 * Created by leixu on 2017/11/13
 * IDE PyCharm
'''
import importlib
from luobocrawler.selenium_script.crawler import Crawler
import inspect
from luobocrawler.script_runner import async_script_runner, this_process_script_runner
import os

from multiprocessing import Process
from multiprocessing.pool import Pool


class ScriptRunner(object):
    def __init__(self, name, crawler_config):
        self._instance_script = None
        self._script_name = name
        self._crawler_config = crawler_config

    def _begin_script(self, rule_index, param_deliver):
        print(f"开始执行爬取规则")
        if self._crawler_config["type"] == "custom":
            return this_process_script_runner(self._instance_script, rule_index, param_deliver)
        elif self._crawler_config["type"] == "async":
            return async_script_runner(self._instance_script, rule_index, param_deliver)

    def check_script(self):
        print(f"开始加载:script.{self._script_name}")
        _script = importlib.import_module(f'script.{self._script_name}')

        for name, obj in inspect.getmembers(_script):
            if inspect.isclass(obj):
                if issubclass(obj, Crawler):
                    if obj is Crawler:
                        pass
                    else:
                        return obj
        raise Exception("没有找到对应的模块")

    def get_node(self):
        _class_result = self.check_script()
        if _class_result:
            self._instance_script = _class_result()
        else:
            raise Exception("no Crawler subclass found in the rule file,please check you script")

    def start(self, rule_index, param_deliver):
        self.get_node()
        return self._begin_script(rule_index, param_deliver)


def do_script(script_path, params_config, active_rule, crawler_config, result=None):
    name = os.path.split(script_path)[-1].split(".")[0]
    script_runner = ScriptRunner(f"{name}", crawler_config)
    result = script_runner.start(active_rule, params_config)
    return result


class RunnerResult():
    def __init__(self, val=None):
        self._val = val
        self._end_flag = False

    @property
    def val(self):
        return self._val

    @val.setter
    def val(self, value):
        self._val = value

    @property
    def end_flag(self):
        return self._end_flag

    @end_flag.setter
    def end_flag(self, value):
        self._end_flag = value


class ProcessRunner(object):
    def __init__(self):
        pass

    def start_process_pool(self, script_path, params_config, active_rule, crawler_config):
        runner_result = RunnerResult()
        pool = Pool(processes=1)

        def _callback(value):
            runner_result.val = value
            runner_result.end_flag = True

        pool.apply_async(do_script, (script_path, params_config, active_rule, crawler_config,), callback=_callback)
        return runner_result

    def gen_process(self, script_path, params_config, active_rule, crawler_config, result={}):
        p = Process(target=do_script, args=(script_path, params_config, active_rule, crawler_config, result))
        p.start()
