import asyncio
import time

from items import RequestItem, ResponseItem
from crawler.async_crawler import AsyncCrawler
from utils import logger, heartbeat
from utils.database import Kafka
from utils.async_context import AsyncContext
from config import settings


class Spider:
    def __callback(self, fut: asyncio.Future):
        result: ResponseItem = fut.result()
        if not result.errors:
            logger.success(result.url)
            Kafka.producer.send(
                result.dict(exclude={"errors"}),
                "key",
                f"{settings.kafka.consumer.topic}_result"
            )
            # todo 删除本地保存的KAFKA任务
        else:
            logger.error(result.errors.__str__())

    async def __make_request(self, tasks):
        async with AsyncContext() as ac:
            for task in tasks:
                item = RequestItem(**task)
                ac.run(AsyncCrawler.request, self.__callback, args=(item,))

    @Kafka.consumer.decorator
    def __get_task(self, loop, **kwargs):
        task_list = []
        cache = kwargs.get('cache')
        for msg in cache:
            task_list.append(msg.value)
        if task_list:
            loop.run_until_complete(self.__make_request(task_list))

    @classmethod
    def run(cls):
        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)
        cls().__get_task(
            loop,
            topic=settings.kafka.consumer.topic,
            group_id=settings.kafka.consumer.group_id,
            max_count=settings.kafka.consumer.max_count,
            timeout=settings.kafka.consumer.timeout * 1000
        )


if __name__ == '__main__':
    heartbeat()
    Spider.run()
