# -*- coding: utf-8 -*-
import logging
import json
import pika
from scrapy.utils.request import request_from_dict
from scrapy.http import Request
from scrapy import BaseScheduler
from . import connection

logger = logging.getLogger(__name__)

class Scheduler(BaseScheduler):  
    def __init__(self, crawler):
        super().__init__(crawler)
        self.spider = None
        self.connection = None
        self.channel = None
        self.queue_name = None
        
    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)
        
    def open(self, spider):
        """打开调度器"""
        self.spider = spider
        self.queue_name = f'{spider.name}:requests'
        self.connection, self.channel = connection.connect_rabbitmq(spider.name)
        return self
        
    def close(self, reason):
        """关闭调度器"""
        if self.connection and not self.connection.is_closed:
            self.connection.close()
            
    # def enqueue_request(self, request):
    #     """将请求放入队列"""
    #     if not self.channel or self.channel.is_closed:
    #         self.connection, self.channel = connection.connect_rabbitmq(self.spider.name)
            
    #     try:
    #         request_dict = request_to_dict(request, self.spider)
    #         self.channel.basic_publish(
    #             exchange='',
    #             routing_key=self.queue_name,
    #             body=json.dumps(request_dict),
    #             properties=pika.BasicProperties(
    #                 delivery_mode=2,  # 持久化消息
    #             )
    #         )
    #         self.stats.inc_value('scheduler/enqueued/rabbitmq')
    #         return True
    #     except Exception as e:
    #         logger.error(f"Error enqueueing request: {e}")
    #         return False
            
    def next_request(self):
        """从队列中获取下一个请求"""
        if not self.channel or self.channel.is_closed:
            self.connection, self.channel = connection.connect_rabbitmq(self.spider.name)
            
        try:
            method_frame, header_frame, body = self.channel.basic_get(
                queue=self.queue_name,
                auto_ack=True
            )
            
            if method_frame:
                request_dict = json.loads(body.decode())
                request = request_from_dict(request_dict, self.spider)
                self.stats.inc_value('scheduler/dequeued/rabbitmq')
                return request
                
        except Exception as e:
            logger.error(f"Error getting next request: {e}")
            
        return None
        
    def has_pending_requests(self):
        """检查是否有待处理的请求"""
        if not self.channel or self.channel.is_closed:
            return False
        try:
            # 使用queue_declare获取队列信息
            queue = self.channel.queue_declare(
                queue=self.queue_name,
                passive=True  # 不创建队列，只获取信息
            )
            return queue.method.message_count > 0
        except Exception as e:
            logger.error(f"Error checking pending requests: {e}")
            return False
            
    def __len__(self):
        """获取队列中的消息数量"""
        try:
            if not self.channel or self.channel.is_closed:
                self.connection, self.channel = connection.connect_rabbitmq(self.spider.name)
                
            queue = self.channel.queue_declare(
                queue=self.queue_name,
                durable=True,
                passive=True
            )
            return queue.method.message_count
        except Exception as e:
            logger.error(f"Error getting queue length: {e}")
            return 0
