import json
from typing import Iterable
import six
from scrapy.http.request import Request

from scrapy_redis import spiders


class CustomSpider(spiders.RedisSpider):
    """
    支持多 redis_key 的自定义分布式爬虫
    重写 next_requests 方法，从redis中获取数据，并生成请求
    """
    redis_keys = []
    redis_batch_size = 1

    def next_requests(self):
        if not isinstance(self.redis_keys, list) or len(self.redis_keys) == 0:
            raise ValueError("redis_keys must be a non-empty list")
        found = 0
        datas = []
        for redis_key in self.redis_keys:
            if len(datas) >= self.redis_batch_size:
                break
            datas += self.fetch_data(redis_key, self.redis_batch_size)
        for data in datas:
            reqs = self.make_request_from_data(data)
            if isinstance(reqs, Iterable):
                for req in reqs:
                    yield req
                    found += 1
                    self.logger.info(f'start req url:{req.url}')
            elif reqs:
                yield reqs
                found += 1
            else:
                self.logger.debug("Request not made from data: %r", data)

        if found:
            self.logger.debug("Read %s requests from '%s'", found, self.redis_key)

    def make_request_from_data(self, data):
        if six.PY3 and isinstance(data, bytes):
            data = json.loads(data.decode(self.redis_encoding))
        return self.make_requests_from_url(data)

    def make_requests_from_url(self, url):
        return Request(url.get('url'), dont_filter=True, meta={'task_id': url.get('task_id')})
