#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/2/28 09:38
# @Author  : 王凯
# @File    : spiders.py
# @Project : scrapy_spider
import json
import os
from collections.abc import Iterable
from queue import Queue, Empty

from scrapy import signals, FormRequest
from scrapy.exceptions import DontCloseSpider
from scrapy.spiders import Spider, CrawlSpider
import time

from . import defaults
from ..scrapy_redis_custom import connection
from ..scrapy_redis_custom.utils import bytes_to_str, is_dict, TextColor


class RedisTaskMixin(object):
    """Mixin class to implement reading urls from a redis queue."""

    redis_key = None
    redis_batch_size = None
    redis_encoding = None

    # Redis client placeholder.
    server = None

    # Idle start time
    spider_idle_start_time = int(time.time())
    max_idle_time = None

    local_block_queue = None

    task_obj = None
    spider_new_request_start_time = None

    def start_requests(self):
        """Returns a batch of start requests from redis."""
        return self.next_requests()

    def setup_redis(self, crawler=None):
        """Setup redis connection and idle signal.

        This should be called after the spider has set its crawler object.
        """
        if self.server is not None:
            return

        if crawler is None:
            # We allow optional crawler argument to keep backwards
            # compatibility.
            # XXX: Raise a deprecation warning.
            crawler = getattr(self, "crawler", None)

        if crawler is None:
            raise ValueError("crawler is required")

        settings = crawler.settings

        log_level = settings.get("LOG_LEVEL", "DEBUG")
        if log_level:
            os.environ.setdefault("LOG_LEVEL", log_level)
            os.environ.setdefault("LOGURU_LEVEL", log_level)

        if self.redis_key is None:
            self.redis_key = settings.get(
                "REDIS_START_URLS_KEY",
                defaults.START_URLS_KEY,
            )

        self.redis_key = self.redis_key % {"name": self.name}

        if not self.redis_key.strip():
            raise ValueError("redis_key must not be empty")

        if self.redis_encoding is None:
            self.redis_encoding = settings.get("REDIS_ENCODING", defaults.REDIS_ENCODING)

        self.redis_batch_size = 1
        self.local_block_queue = Queue(maxsize=self.redis_batch_size)

        self.logger.info(
            "Reading start URLs from redis key '%(redis_key)s' "
            "(batch size: %(redis_batch_size)s, encoding: %(redis_encoding)s)",
            self.__dict__,
        )

        self.server = connection.from_settings(crawler.settings)

        if settings.getbool("REDIS_START_URLS_AS_SET", defaults.START_URLS_AS_SET):
            self.fetch_data = self.server.spop
            self.recovery_running_data = self.server.sadd
            self.count_size = self.server.scard
        elif settings.getbool("REDIS_START_URLS_AS_ZSET", defaults.START_URLS_AS_ZSET):
            self.fetch_data = self.pop_priority_queue
            self.recovery_running_data = self.server.zadd
            self.count_size = self.server.zcard
        else:
            self.fetch_data = self.pop_list_queue
            self.recovery_running_data = self.server.lpush
            self.count_size = self.server.llen

        if self.max_idle_time is None:
            self.max_idle_time = settings.get("MAX_IDLE_TIME_BEFORE_CLOSE", defaults.MAX_IDLE_TIME)

        try:
            self.max_idle_time = int(self.max_idle_time)
        except (TypeError, ValueError):
            raise ValueError("max_idle_time must be an integer")

        # The idle signal is called when the spider has no requests left,
        # that's when we will schedule new requests from redis queue
        crawler.signals.connect(self.spider_idle, signal=signals.spider_idle)
        # 非正常停止时回收任务
        crawler.signals.connect(self.spider_close, signal=signals.spider_closed)

    def spider_close(self):
        if isinstance(self.task_obj, dict):
            if self.server.zadd == self.recovery_running_data:
                data = {json.dumps(self.task_obj, ensure_ascii=False): 0}
            else:
                data = json.dumps(self.task_obj, ensure_ascii=False)
            self.recovery_running_data(self.redis_key, data)
        else:
            if self.server.zadd == self.recovery_running_data:
                data = {self.task_obj: 0}
            else:
                data = self.task_obj
            self.recovery_running_data(self.redis_key, data)
        self.logger.warning(f"爬虫非正常结束，回收任务 {self.task_obj}")

    def pop_list_queue(self, redis_key, batch_size):
        with self.server.pipeline() as pipe:
            pipe.lrange(redis_key, 0, batch_size - 1)
            pipe.ltrim(redis_key, batch_size, -1)
            datas, _ = pipe.execute()
        return datas

    def pop_priority_queue(self, redis_key, batch_size):
        with self.server.pipeline() as pipe:
            pipe.zrevrange(redis_key, 0, batch_size - 1)
            pipe.zremrangebyrank(redis_key, -batch_size, -1)
            datas, _ = pipe.execute()
        return datas

    def next_requests(self):
        """Returns a request to be scheduled or none."""
        found = 0
        cost_time = 0
        if self.task_obj:
            if self.spider_new_request_start_time:
                cost_time = int(time.time() * 1000) - self.spider_new_request_start_time
            self.logger.info(
                f"************ spider end [{self.task_obj}] 完成采集"
                f"{f',耗时 {cost_time / 1000}s' if cost_time else ''} "
                f"************"
            )
            self.end_callback(self.task_obj)
        else:
            self.logger.info("************ spider wait task ************")

        if self.local_block_queue.empty():
            self.put_task_to_queue()

        data = self.local_block_queue.get()

        self.logger.info(f"************ spider start 任务 [{data}] 开始采集 ************")
        self.spider_new_request_start_time = int(time.time() * 1000)
        self.task_obj = data
        self.start_callback(data)

        reqs = self.make_request_from_data(bytes_to_str(data, self.redis_encoding))
        if isinstance(reqs, Iterable):
            for req in reqs:
                yield req
                found += 1
                self.logger.debug(f"start req url:{req.url}")
        elif reqs:
            yield reqs
            found += 1
        else:
            self.logger.debug(f"Request not made from data: {data}")

        if found:
            self.logger.debug(f"Read {found} requests from '{self.redis_key}'")

        self.put_task_to_queue()

    def start_callback(self, task_obj, *args, **kwargs):
        ...

    def end_callback(self, task_obj, *args, **kwargs):
        ...

    def make_request_from_data(self, formatted_data: str):
        """
        make a request form redis_task data
        :return scrapy.Request
        """
        if is_dict(formatted_data):
            parameter = json.loads(formatted_data)
        else:
            self.logger.warning(
                f"{TextColor.WARNING}WARNING: String request is deprecated, please use JSON data format. \
                Detail information, please check https://github.com/rmax/scrapy-redis#features{TextColor.ENDC}"
            )
            return FormRequest(formatted_data, dont_filter=True)

        if parameter.get("url", None) is None:
            self.logger.warning(f"{TextColor.WARNING}The data from Redis has no url key in push data{TextColor.ENDC}")
            return []

        url = parameter.pop("url")
        method = parameter.pop("method").upper() if "method" in parameter else "GET"
        metadata = parameter.pop("meta") if "meta" in parameter else {}

        return FormRequest(url, dont_filter=True, method=method, formdata=parameter, meta=metadata)

    def put_task_to_queue(self):
        if not self.count_size(self.redis_key):
            self.add_task()

        if self.local_block_queue.empty():
            datas = self.fetch_data(self.redis_key, 1)
            for data in datas:
                if is_dict(data):
                    self.local_block_queue.put_nowait(json.loads(data))
                else:
                    self.logger.warning(
                        f"{TextColor.WARNING}WARNING: String request is deprecated, please use JSON data format. \
                        Detail information, please check https://github.com/rmax/scrapy-redis#features{TextColor.ENDC}"
                    )
                    self.local_block_queue.put_nowait(data)

    def add_task(self):
        ...

    def schedule_next_requests(self):
        """Schedules a request if available"""
        for req in self.next_requests():
            if req:
                self.crawler.engine.crawl(req)

    def spider_idle(self):
        """
        Schedules a request if available, otherwise waits.
        or close spider when waiting seconds > MAX_IDLE_TIME_BEFORE_CLOSE.
        MAX_IDLE_TIME_BEFORE_CLOSE will not affect SCHEDULER_IDLE_BEFORE_CLOSE.
        """
        if self.server is not None and self.count_size(self.redis_key) > 0:
            self.spider_idle_start_time = int(time.time())

        self.schedule_next_requests()

        idle_time = int(time.time()) - self.spider_idle_start_time
        if self.max_idle_time != 0 and idle_time >= self.max_idle_time:
            return
        raise DontCloseSpider


class RedisTaskSpider(RedisTaskMixin, Spider):
    """Spider that reads urls from redis queue when idle.

    Attributes
    ----------
    redis_key : str (default: REDIS_START_URLS_KEY)
        Redis key where to fetch start URLs from..
    redis_batch_size : int (default: CONCURRENT_REQUESTS)
        Number of messages to fetch from redis on each attempt.
    redis_encoding : str (default: REDIS_ENCODING)
        Encoding to use when decoding messages from redis queue.

    Settings
    --------
    REDIS_START_URLS_KEY : str (default: "scrapy:<spider.name>:start_urls")
        Default Redis key where to fetch start URLs from..
    REDIS_START_URLS_BATCH_SIZE : int (deprecated by CONCURRENT_REQUESTS)
        Default number of messages to fetch from redis on each attempt.
    REDIS_START_URLS_AS_ZSET : bool (default: True)
        Use SET operations to retrieve messages from the redis queue. If False,
        the messages are retrieve using the LPOP command.
    REDIS_ENCODING : str (default: "utf-8")
        Default encoding to use when decoding messages from redis queue.

    """

    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        obj = super(RedisTaskSpider, cls).from_crawler(crawler, *args, **kwargs)
        obj.setup_redis(crawler)
        return obj


class RedisTaskCrawlSpider(RedisTaskMixin, CrawlSpider):
    """Spider that reads urls from redis queue when idle.

    Attributes
    ----------
    redis_key : str (default: REDIS_START_URLS_KEY)
        Redis key where to fetch start URLs from..
    redis_batch_size : int (default: CONCURRENT_REQUESTS)
        Number of messages to fetch from redis on each attempt.
    redis_encoding : str (default: REDIS_ENCODING)
        Encoding to use when decoding messages from redis queue.

    Settings
    --------
    REDIS_START_URLS_KEY : str (default: "<spider.name>:start_urls")
        Default Redis key where to fetch start URLs from..
    REDIS_START_URLS_BATCH_SIZE : int (deprecated by CONCURRENT_REQUESTS)
        Default number of messages to fetch from redis on each attempt.
    REDIS_START_URLS_AS_ZSET : bool (default: True)
        Use SET operations to retrieve messages from the redis queue.
    REDIS_ENCODING : str (default: "utf-8")
        Default encoding to use when decoding messages from redis queue.

    """

    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        obj = super(RedisTaskCrawlSpider, cls).from_crawler(crawler, *args, **kwargs)
        obj.setup_redis(crawler)
        return obj
