#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2025/2/1 10:18
# @Author  : Ramsey
# @Site    : zh
# @File    : crawler.py
# @Software: PyCharm
import asyncio
from datetime import datetime
import signal
from spider_framework.spider import Spider
from typing import Type, Set, final, Optional
from spider_framework.core.engine import Engine
from spider_framework.utils.log import spider_logger
from spider_framework.utils.status_collector import StatusCollector

logger = spider_logger(__name__)


class Crawler:
    def __init__(self, spider_cls, settings):
        # 未实例化
        self.spider_cls = spider_cls
        # 已实例化
        self.spider: Optional[Spider] = None
        self.engine: Optional[Engine] = None
        self.status_collector: Optional[StatusCollector] = None
        self.settings = settings.copy()

    async def crawl(self):
        self.spider = self._create_spider()
        self.engine = self._create_engine()
        self.status_collector = self._create_status_collector()
        await self.engine.start_spider(spider_instance=self.spider)

    def _create_status_collector(self):
        status_collector = StatusCollector(crawler=self)
        status_collector.status_dict["start_time"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        return status_collector

    def _create_engine(self):
        return Engine(crawler=self)

    def _create_spider(self) -> Spider:
        # 这样直接实例化不合理
        spider_instance = self.spider_cls.create_instance(self)
        self._set_spider_setting(spider_instance)
        return spider_instance

    def _set_spider_setting(self, spider_instance):
        self.merge_setting(spider_instance)

    def merge_setting(self, spider_instance):
        if hasattr(spider_instance, "custom_settings"):
            self.settings.update_settings(getattr(spider_instance, "custom_settings"))

    def close(self, reason: str = "finished"):
        self.status_collector.status_dict["end_time"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        self.status_collector.close_spider(spider=self.spider, reason=reason)


class StartUpCrawler:
    def __init__(self, settings=None):
        self.crawlers: final[Set] = set()
        self._active: final[Set] = set()
        self.settings = settings
        signal.signal(signal.SIGINT, self._shutdown)

    async def crawl(self, spider: Type[Spider]):
        crawler = self._create_crawler(spider_cls=spider)
        self.crawlers.add(crawler)
        task = await self._crawl(crawler)
        self._active.add(task)

    @staticmethod
    async def _crawl(crawler):
        return asyncio.create_task(crawler.crawl())

    async def start(self):
        await asyncio.gather(*self._active)

    def _create_crawler(self, spider_cls):
        return Crawler(spider_cls, self.settings)

    def _shutdown(self, _signum, _frame):
        for crawler in self.crawlers:
            crawler.engine.running = False
        logger.warning("爬虫程序通过 `ctrl + c` 关闭")


if __name__ == "__main__":
    pass
