from bald_spider.core.downloder import Downloader
from typing import Optional, Generator
from bald_spider.core.scheduler import Scheduler


class Engine:
    """爬虫引擎"""

    def __init__(self):
        self.downloader: Optional[Downloader] = None
        self.start_requests: Optional[Generator] = None
        self.scheduler: Optional[Scheduler] = None

    async def start_spider(self, spider):
        self.scheduler = Scheduler()
        self.downloader = Downloader()
        if hasattr(self.scheduler, 'open'):
            self.scheduler.open()
        self.start_requests = iter(spider.start_requests())
        await self.crawl()

    async def crawl(self):
        """
        爬虫主逻辑
        :return:
        """
        while True:
            if (request := await self._get_next_request()) is not None:
                await self._crawl(request)
            else:
                try:
                    start_request = next(self.start_requests)  # noqa
                except StopIteration:
                    self.start_requests = None
                except Exception as exc:
                    break
                else:
                    await self.equeue_request(start_request)

    async def equeue_request(self, request):
        """入队，顺便去重"""
        await self._scheduler_request(request)

    async def _scheduler_request(self, request):
        await self.scheduler.enqueue_request(request)

    async def _get_next_request(self):
        """出队"""
        return await self.scheduler.next_request()

    async def _crawl(self, request):
        await self.downloader.downloade(request)
