import json
from copy import deepcopy
from redis import ResponseError
from scrapy import signals, Request
from scrapy.exceptions import DontCloseSpider
from scrapy.spiders import Spider, CrawlSpider
from collections import Iterable
from . import connection, defaults
from .utils import bytes_to_str
from crawler.connection import filter_pop_redis
from crawler.monitor import IdleLogRecorder


class RedisMixin(object):
    """Mixin class to implement reading urls from a redis queue."""
    redis_key = None
    redis_batch_size = None
    redis_encoding = None
    project_type = None

    # Redis client placeholder.
    server = None
    idle_times = 0
    idle_logger = IdleLogRecorder()

    def start_requests(self):
        """Returns a batch of start requests from redis."""
        return self.next_requests()

    def setup_redis(self, crawler=None):
        """Setup redis connection and idle signal.

        This should be called after the spider has set its crawler object.
        """
        if self.server is not None:
            return

        if crawler is None:
            # We allow optional crawler argument to keep backwards
            # compatibility.
            # XXX: Raise a deprecation warning.
            crawler = getattr(self, "crawler", None)

        if crawler is None:
            raise ValueError("crawler is required")

        settings = crawler.settings

        if self.redis_key is None:
            self.redis_key = settings.get("REDIS_START_URLS_KEY", defaults.START_URLS_KEY)

        if self.project_type is None:
            self.project_type = settings.get("PROJECT_TYPE", "victor")

        self.redis_key = '{%(slot_key)s}_%(project_type)s_queue' % {'project_type': self.project_type,
                                                                    'slot_key': self.name}

        if not self.redis_key.strip():
            raise ValueError("redis_key must not be empty")

        if self.redis_batch_size is None:
            # TODO: Deprecate this setting (REDIS_START_URLS_BATCH_SIZE).
            self.redis_batch_size = settings.getint(
                "REDIS_START_URLS_BATCH_SIZE", settings.getint("CONCURRENT_REQUESTS")
            )

        try:
            self.redis_batch_size = int(self.redis_batch_size)
        except (TypeError, ValueError):
            raise ValueError("redis_batch_size must be an integer")

        if self.redis_encoding is None:
            self.redis_encoding = settings.get("REDIS_ENCODING", defaults.REDIS_ENCODING)

        self.logger.info(
            "Reading start URLs from redis key '%(redis_key)s' "
            "(batch size: %(redis_batch_size)s, encoding: %(redis_encoding)s",
            self.__dict__
        )

        self.server = connection.from_settings(crawler.settings)
        # The idle signal is called when the spider has no requests left,
        # that's when we will schedule new requests from redis queue
        crawler.signals.connect(self.spider_idle, signal=signals.spider_idle)

    def next_requests(self):
        # use_set = self.settings.getbool("REDIS_START_URLS_AS_SET", defaults.START_URLS_AS_SET)
        # fetch_one = self.server.spop if use_set else self.server.lpop

        try:
            found = 0
            while found < self.redis_batch_size:
                try:
                    data = filter_pop_redis(self.server, self.name, self.project_type)
                except ResponseError as e:
                    data = None
                if isinstance(data, bytes):
                    data = json.loads(bytes_to_str(data))

                reqs = None
                if not data:
                    break

                if hasattr(self, 'start_task'):
                    reqs = self.start_task(data)
                    self.idle_times += 1
                    if self.idle_times == 50:
                        self.idle_logger.record_idle_log(self, self.project_type, step="start_task")
                        self.idle_times = 0

                if reqs:
                    if isinstance(reqs, Iterable):
                        for req in reqs:
                            yield req
                    else:
                        yield reqs
                    found += 1
        except Exception as e:
            self.logger.error('Read Next Requests Error: ' + str(e))

    def make_requests_from_url(self, url, meta={}, callback=None, errback=None, headers={}):
        return Request(url,
                       headers=deepcopy(headers),
                       meta=deepcopy(meta),
                       callback=callback,
                       errback=errback,
                       dont_filter=True
                       )

    def schedule_next_request(self):
        for req in self.next_requests():
            self.crawler.engine.crawl(req, spider=self)

    def spider_idle(self):
        self.logger.info('Spider Idle')

        self.schedule_next_request()
        raise DontCloseSpider


class RedisSpider(RedisMixin, Spider):
    """Spider that reads urls from redis queue when idle.

    Attributes
    ----------
    redis_key : str (default: REDIS_START_URLS_KEY)
        Redis key where to fetch start URLs from..
    redis_batch_size : int (default: CONCURRENT_REQUESTS)
        Number of messages to fetch from redis on each attempt.
    redis_encoding : str (default: REDIS_ENCODING)
        Encoding to use when decoding messages from redis queue.

    Settings
    --------
    REDIS_START_URLS_KEY : str (default: "<spider.name>:start_urls")
        Default Redis key where to fetch start URLs from..
    REDIS_START_URLS_BATCH_SIZE : int (deprecated by CONCURRENT_REQUESTS)
        Default number of messages to fetch from redis on each attempt.
    REDIS_START_URLS_AS_SET : bool (default: False)
        Use SET operations to retrieve messages from the redis queue. If False,
        the messages are retrieve using the LPOP command.
    REDIS_ENCODING : str (default: "utf-8")
        Default encoding to use when decoding messages from redis queue.

    """

    @classmethod
    def from_crawler(self, crawler, *args, **kwargs):
        obj = super(RedisSpider, self).from_crawler(crawler, *args, **kwargs)
        obj.setup_redis(crawler)
        return obj


class RedisCrawlSpider(RedisMixin, CrawlSpider):
    """Spider that reads urls from redis queue when idle.

    Attributes
    ----------
    redis_key : str (default: REDIS_START_URLS_KEY)
        Redis key where to fetch start URLs from..
    redis_batch_size : int (default: CONCURRENT_REQUESTS)
        Number of messages to fetch from redis on each attempt.
    redis_encoding : str (default: REDIS_ENCODING)
        Encoding to use when decoding messages from redis queue.

    Settings
    --------
    REDIS_START_URLS_KEY : str (default: "<spider.name>:start_urls")
        Default Redis key where to fetch start URLs from..
    REDIS_START_URLS_BATCH_SIZE : int (deprecated by CONCURRENT_REQUESTS)
        Default number of messages to fetch from redis on each attempt.
    REDIS_START_URLS_AS_SET : bool (default: True)
        Use SET operations to retrieve messages from the redis queue.
    REDIS_ENCODING : str (default: "utf-8")
        Default encoding to use when decoding messages from redis queue.

    """

    @classmethod
    def from_crawler(self, crawler, *args, **kwargs):
        obj = super(RedisCrawlSpider, self).from_crawler(crawler, *args, **kwargs)
        obj.setup_redis(crawler)
        return obj
