import asyncio
import collections
from datetime import datetime
from signal import SIGINT, SIGTERM
from types import AsyncGeneratorType
import typing
from typing import List

from aiohttp import ClientSession

from sai.utils import logger
from sai.exceptions import NotImplementedParseError, NothingMatchedError, DropItem
from sai.response import Response
from sai.item import Item
from sai.request import Request
from sai.pipelines import Pipeline


try:
    import uvloop

    asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
    pass


class Spider:

    name = 'Sai'

    # Some fields for statistics
    failed_counts: int = 0
    success_counts: int = 0

    # Concurrency control
    worker_numbers: int = 2
    concurrency: int = 3

    # Spider entry
    start_urls: list = None

    # A queue to save coroutines
    worker_tasks: list = []

    def __init__(self,
                 pipelines: List[Pipeline]= [],
                 loop=None,
                 cookies=None,
                 **spider_kwargs):
        # start_urls 可以为 [], 如果是这样, 必须重写 start_requests
        # if not isinstance(self.start_urls, collections.Iterable):
        #     raise ValueError(
        #         "Spider must have a param named start_urls, eg: start_urls = ['https://www.github.com']"
        #     )

        self.pipelines: List[Pipeline] = pipelines
        self.loop = loop
        asyncio.set_event_loop(self.loop)

        self.request_config = {}
        self.headers = {}
        self.metadata = {}
        self.aiohttp_kwargs = {}
        self.spider_kwargs = spider_kwargs
        self.request_session = ClientSession(cookies=cookies)

        # Fixme
        self.cookies: dict = None

        # self.cancel_tasks = cancel_tasks
        # self.is_async_start = is_async_start

        # TODO: customize middleware

        # async queue as a producer
        self.request_queue = asyncio.Queue()

        # semaphore, used for concurrency control
        self.sem = asyncio.Semaphore(self.concurrency)

    @classmethod
    def start(
        cls,
        pipelines=None,
        loop=None,
        cookies=None,
        **spider_kwargs,
    ):
        """
        Start a spider
        :param after_start: hook
        :param before_stop: hook
        :param middleware: customize middleware or a list of middleware
        :param loop: event loop
        :param close_event_loop: bool
        :param spider_kwargs: Additional keyword args to initialize spider
        :return: An instance of :cls:`Spider`
        """
        loop = loop or asyncio.new_event_loop()
        spider_ins = cls(pipelines=pipelines, loop=loop, cookies=cookies, **spider_kwargs)

        # Actually start crawling
        spider_ins.loop.run_until_complete(
            # TODO: _start()
            spider_ins._start()
        )
        spider_ins.loop.run_until_complete(spider_ins.loop.shutdown_asyncgens())
        spider_ins.loop.close()

        return spider_ins

    async def _start(self):
        start_time = datetime.now()

        # Add signal
        for signal in (SIGINT, SIGTERM):
            try:
                self.loop.add_signal_handler(
                    signal, lambda: asyncio.ensure_future(self.stop(signal))
                )
            except NotImplementedError:
                logger.warning(
                    f"{self.name} tried to use loop.add_signal_handler "
                    "but it is not implemented on this platform."
                )

        # Actually run crawling
        try:
            for pipeline in self.pipelines:
                pipeline.open_spider(self)
            await self.start_master()
        finally:
            await self.request_session.close()

            for pipeline in self.pipelines:
                pipeline.close_spider(self)

            # Display logs about this crawl task
            end_time = datetime.now()
            logger.info(
                f"Total requests: {self.failed_counts + self.success_counts}"
            )

            if self.failed_counts:
                logger.info(f"Failed requests: {self.failed_counts}")
            logger.info(f"Time usage: {end_time - start_time}")
            logger.info("Spider finished!")

    async def start_master(self):
        """
        Actually start crawling
        """
        async for request_ins in self.start_requests():
            self.request_queue.put_nowait(self.handle_request(request_ins))
        workers = [
            asyncio.ensure_future(self.start_worker())
            for i in range(self.worker_numbers)
        ]
        for worker in workers:
            logger.info(f"Worker started: {id(worker)}")

        await self.request_queue.join()


    async def start_worker(self):
        """
        Start spider worker
        :return:
        """
        while True:
            request_item = await self.request_queue.get()
            # Fixme: Using local worker_tasks list
            self.worker_tasks.append(request_item)
            if self.request_queue.empty():
                results = await asyncio.gather(
                    *self.worker_tasks, return_exceptions=True
                )
                for task_result in results:
                    if not isinstance(task_result, RuntimeError) and task_result:
                        callback_results, response = task_result
                        # if callback_results is not AsyncGeneratorType, just pass, all done
                        if isinstance(callback_results, AsyncGeneratorType):
                            await self._process_async_callback(
                                callback_results, response
                            )
                self.worker_tasks = []
            self.request_queue.task_done()

    async def start_requests(self):
        """
        Process the start URLs
        :return: AN async iterator
        """
        if self.start_urls:
            for url in self.start_urls:
                yield self.request(url=url, callback=self.parse, metadata=self.metadata)
        else:
            raise NotImplementedError

    def request(
        self,
        url: str,
        method: str = "GET",
        *,
        callback=None,
        encoding: typing.Optional[str] = None,
        headers: dict = None,
        metadata: dict = None,
        request_config: dict = None,
        request_session=None,
        **aiohttp_kwargs,
    ):
        """
        Init a Request class for crawling html
        :param url:
        :param method:
        :param callback:
        :param encoding:
        :param headers:
        :param metadata:
        :param request_config:
        :param request_session:
        :param aiohttp_kwargs:
        :return:
        """
        headers = headers or {}
        metadata = metadata or {}
        request_config = request_config or {}
        request_session = request_session or self.request_session

        headers.update(self.headers.copy())
        request_config.update(self.request_config.copy())
        aiohttp_kwargs.update(self.aiohttp_kwargs.copy())

        return Request(
            url=url,
            method=method,
            callback=callback,
            encoding=encoding,
            headers=headers,
            metadata=metadata,
            request_config=request_config,
            request_session=request_session,
            **aiohttp_kwargs,
        )

    async def parse(self, response):
        """
        Used for subclasses, directly parse the responses corresponding with start_urls
        :param response: Response
        :return:
        """
        raise NotImplementedParseError("<!!! parse function is expected !!!>")

    async def handle_request(
        self, request: Request
    ) -> typing.Tuple[AsyncGeneratorType, Response]:
        """
        Wrap request with middleware.
        :param request:
        :return:
        """
        callback_result, response = None, None

        try:
            callback_result, response = await request.fetch_callback(self.sem)
            await self._process_response(request=request, response=response)
        except NotImplementedParseError as e:
            logger.exception(e)
        except Exception as e:
            logger.exception(f"<Callback[{request.callback.__name__}]: {e}")

        return callback_result, response

    async def _process_response(self, request: Request, response: Response):
        if response:
            if response.ok:
                # 保存 cookies
                self.cookies = response.cookies
                # Process succeed response
                self.success_counts += 1
                await self.process_succeed_response(request, response)
            else:
                # Process failed response
                self.failed_counts += 1
                await self.process_failed_response(request, response)

    async def process_succeed_response(self, request, response):
        pass

    async def process_failed_response(self, request, response):
        pass

    async def _process_async_callback(
        self, callback_results: AsyncGeneratorType, response: Response = None
    ):
        try:
            async for callback_result in callback_results:
                if isinstance(callback_result, AsyncGeneratorType):
                    await self._process_async_callback(callback_result)
                elif isinstance(callback_result, Request):
                    self.request_queue.put_nowait(
                        self.handle_request(request=callback_result)
                    )
                elif isinstance(callback_result, typing.Coroutine):
                    self.request_queue.put_nowait(
                        self.handle_callback(
                            aws_callback=callback_result, response=response
                        )
                    )
                elif isinstance(callback_result, Item) or isinstance(callback_result, dict):
                    # Process target item
                    # Fixme: async?
                    new_callback_result = callback_result
                    for pipeline in self.pipelines:
                        try:
                            # if raise DropItem, drop item
                            new_callback_result = pipeline.process_item(new_callback_result, self)
                        except DropItem:
                            break
                else:
                    # Fixme: rase some error
                    pass
        except Exception as e:
            logger.exception(e)

    async def handle_callback(self, aws_callback: typing.Coroutine, response):
        """
        Process coroutine callback function
        """
        callback_result = None

        try:
            callback_result = await aws_callback
        except NothingMatchedError as e:
            logger.exception(f"<Item: {str(e).lower()}>")
        except Exception as e:
            logger.exception(f"<Callback[{aws_callback.__name__}]: {e}")

        return callback_result, response

    # async def process_item(self, item: typing.Union[Item, dict]):
    #     raise NotImplemented
