#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2017/5/16 14:40
# @Author  : Leixu
# @Site    : 
# @File    : base_spider.py
# @Software: PyCharm Community Edition
import traceback
from abc import ABCMeta, abstractmethod
from enum import Enum, unique
from asyncio import Queue
import asyncio
from celery.result import AsyncResult
from luobocrawler.selenium_script.runner import ScriptRunner, ProcessRunner

from luobo.component.celery_c.task import *


@unique
class SpiderState(Enum):
    INIT = 0
    PREPARING = 1
    RUNNING = 2
    STOP = 3
    SUSPENDED = 4
    ERROR = 5


@unique
class ScriptRunMode(Enum):
    LOCAL = 0
    REMOTE = 1


@unique
class SpiderType(Enum):
    CUSTOMER = 0
    BROWSER = 1


class LuoBoSpider(metaclass=ABCMeta):
    def __init__(self):
        self._spider_state = SpiderState.INIT

    @property
    def spider_state(self):
        return self._spider_state

    @spider_state.setter
    def spider_state(self, val):
        if isinstance(val, SpiderState):
            self._spider_state = val
        else:
            raise Exception("Set spider state failed")


class BaseSpider(LuoBoSpider):
    def __init__(self):
        super().__init__()
        self._spider_type = SpiderType.CUSTOMER

    def start(self):
        pass

    def execute_fetch(self):
        pass

    @abstractmethod
    def start_url(self):
        pass

    def process(self):
        pass

    @abstractmethod
    def process_data(self, response):
        pass


class BrowserSpider(LuoBoSpider):
    def __init__(self):
        super().__init__()
        self.RUN_MODE = ScriptRunMode.REMOTE
        self._spider_type = SpiderType.BROWSER

    def param_input(self):
        pass

    def response_output(self):
        pass

    def error_handler(self):
        pass

    @abstractmethod
    def start_crawler(self):
        pass


class Launcher(object):
    url_list = Queue()

    def __init__(self, setting, spider):
        self.spider = spider
        self.setting = setting

    def load_url(self, url):
        asyncio.ensure_future(self.url_list.put(url))

    async def get_url(self):
        item = await self.url_list.get()
        if item is None:
            return None, None
        return item["url"], item["callback"]

    def get_queue_size(self):
        return self.url_list.qsize()

    @staticmethod
    async def begin(self, crawler_setting):
        while True:
            if self.get_queue_size() == 0:
                self.spider.spider_state = SpiderState.SUSPENDED
            url, callback = await self.get_url()
            if url is "pass":
                continue
            if url is None:
                break
            try:
                print("开始任务")
                self.spider.spider_state = SpiderState.RUNNING
                res: AsyncResult = tornado_http_client.apply_async(args=[url, crawler_setting],
                                                                   queue='machine1', routing_key='machine1')

                def wait_for_result():
                    def on_result_ready(result: AsyncResult):
                        if result.failed():
                            response = result.traceback
                        else:
                            response = result.result
                        callback(response)
                        result.forget()

                    res.then(on_result_ready)

                wait_for_result()
            except StopIteration:
                traceback.print_exc()
                break
            except UnicodeDecodeError:
                traceback.print_exc()
            except Exception:
                traceback.print_exc()
                break

    async def begin_local(self):
        pass


class WorkLauncher(object):
    crawler_list = Queue()

    def __init__(self, setting, spider):
        self.spider = spider
        self.setting = setting

    def load_crawler(self, crawler):
        asyncio.ensure_future(self.crawler_list.put(crawler))

    async def get_crawler(self):
        item = await self.crawler_list.get()
        if item is None:
            return None, None
        return item["crawler"], item["callback"]

    @staticmethod
    async def begin(self, spider):
        try:
            print("开始任务")
            self.spider.spider_state = SpiderState.RUNNING
            crawler, callback = await self.get_crawler()
            _e_flag = False

            print("获取数据")

            if crawler is None:
                return
            params_config = spider.params_deliver.get_config()
            script_context = crawler.get_script_context()
            active_rule = crawler.rule_index
            config = crawler.get_config()
            res = remote_script.apply_async(args=[script_context, params_config, active_rule, config],
                                            queue='machine1', routing_key='machine1')

            async def wait_for_result():

                def on_result_ready(result: AsyncResult):
                    nonlocal _e_flag
                    if result.failed():
                        response = result.traceback
                    else:
                        response = result.result
                    print("获取到结果")
                    callback(response)
                    result.forget()
                    _e_flag = True

                def on_error(error):
                    print(error)

                res.then(on_result_ready, on_error=on_error)

            await wait_for_result()
            while not _e_flag:
                await asyncio.sleep(10)
        except StopIteration:
            traceback.print_exc()
        except UnicodeDecodeError:
            traceback.print_exc()
        except Exception:
            traceback.print_exc()

    @staticmethod
    async def begin_local(self, spider):
        try:
            print("开始任务")
            await asyncio.sleep(1)
            self.spider.spider_state = SpiderState.RUNNING
            crawler, callback = await self.get_crawler()

            if crawler is None:
                return

            print("获取数据")
            params_config = spider.params_deliver.get_config()
            active_rule = crawler.rule_index
            script_path = crawler.get_script_path()
            config = crawler.get_config()
            p = ProcessRunner()
            result = p.start_process_pool(script_path, params_config, active_rule, config)
            while not result.end_flag:
                await asyncio.sleep(3)
            callback(result.val)
        except Exception as e:
            traceback.print_exc()


class Setting(object):
    URL_NUMBERS_ONE_TIME = 1
    PARALLEL_WORKER_NUMBERS = 1
