# -*- coding: utf-8 -*-
"""
Item pipelines for bot_scrapy
"""
import json
import logging
import pika
from itemadapter import ItemAdapter
from . import connection

logger = logging.getLogger(__name__)

class RabbitmqPipeline:
    """Pushes serialized item into a RabbitMQ queue

    Settings
    --------
    RABBITMQ_QUEUE_NAME : str
        RabbitMQ queue name format string
    RABBITMQ_SERIALIZER : str
        Object path to serializer function.
    """

    def __init__(self, spider_name, serialize_func=json.dumps):
        """Initialize pipeline.

        Parameters
        ----------
        spider_name : str
            Name of the spider
        serialize_func : callable
            Items serializer function.
        """
        self.spider_name = spider_name
        self.serialize = serialize_func
        self.connection = None
        self.channel = None
        # 初始化时建立连接
        self.connect()

    @classmethod
    def from_settings(cls, settings):
        params = {}
        if settings.get('RABBITMQ_SERIALIZER'):
            params['serialize_func'] = load_object(settings['RABBITMQ_SERIALIZER'])
        return cls(**params)

    @classmethod
    def from_crawler(cls, crawler):
        return cls(spider_name=crawler.spider.name)

    def ensure_connection(self):
        """确保RabbitMQ连接可用，如果不可用则重连"""
        try:
            if self.connection is None or self.connection.is_closed or \
               self.channel is None or self.channel.is_closed:
                self.connect()
            return True
        except Exception as e:
            logger.error(f"RabbitMQ connection check failed: {e}")
            return False

    def connect(self):
        """建立RabbitMQ连接"""
        try:
            self.connection, self.channel = connection.connect_rabbitmq(self.spider_name)
            logger.info("Successfully connected to RabbitMQ")
        except Exception as e:
            logger.error(f"Failed to connect to RabbitMQ: {e}")
            self.connection = None
            self.channel = None
            raise

    def process_item(self, item, spider):
        retries = 3  # 最大重试次数
        for attempt in range(retries):
            try:
                # 检查连接状态并在需要时重连
                if not self.ensure_connection():
                    raise Exception("Failed to ensure RabbitMQ connection")

                data = self.serialize(ItemAdapter(item).asdict())
                queue_name = connection.get_queue_name(spider.name)
                
                self.channel.basic_publish(
                    exchange='',
                    routing_key=self,
                    body=data,
                    properties=pika.BasicProperties(
                        delivery_mode=2,  # make message persistent
                    )
                )
                logger.debug(f'Item sent to RabbitMQ queue: {queue_name}')
                break  # 发送成功，跳出重试循环
            except Exception as e:
                logger.error(f'Failed to send item to RabbitMQ (attempt {attempt + 1}/{retries}): {str(e)}')
                if attempt == retries - 1:  # 最后一次重试
                    logger.error('Max retries reached, giving up')
                else:
                    # 重置连接以便下次重试
                    self.connection = None
                    self.channel = None
        return item

    def close_spider(self, spider):
        """Close RabbitMQ connection when spider is closed."""
        if self.connection and not self.connection.is_closed:
            self.connection.close()
            self.connection = None
            self.channel = None


class BotImagesPipeline(ImagesPipeline):

    @classmethod
    def from_crawler(cls, crawler):
        pass
    

    def clean_file_path(self, file_path):
        invalid_chars = r'[<>:"/\\|?*]'
        return re.sub(invalid_chars, '', file_path)


    def file_path(self, request, response=None, info=None, *, item=None):
        num = item['image_urls'].index(request.url)
        extention = request.url.split('.')[-1]
        filename = f"{item['title']}_{num}.{extention}"
        filename = self.clean_file_path(filename)

        return filename
    

    def thumb_path(self, request, thumb_id, response=None, info=None, *, item=None):
        num = item['image_urls'].index(request.url)
        extention = request.url.split('.')[-1]
        filename = f"thumb/{item['title']}_{num}.{extention}"
        filename = self.clean_file_path(filename)

        return filename
    

    def item_completed(self, results, item, info):
        file_paths = [x["path"] for ok, x in results if ok]
        if not file_paths:
            raise DropItem("Item contains no files")
        adapter = ItemAdapter(item)
        adapter["file_paths"] = [os.path.join(self.store.basedir, x) for x in file_paths]
        return item


class BotFilesPipeline(FilesPipeline):

    def clean_file_path(self, file_path):
        invalid_chars = r'[<>:"/\\|?*]'
        return re.sub(invalid_chars, '', file_path)
    

    def file_path(self, request, response=None, info=None, *, item=None):
        num = item['file_urls'].index(request.url)
        extention = request.url.split('.')[-1]
        filename = f"{item['title']}_{num}.{extention}"
        filename = self.clean_file_path(filename)

        return filename
    

    def item_completed(self, results, item, info):
        file_paths = [x["path"] for ok, x in results if ok]
        if not file_paths:
            raise DropItem("Item contains no files")

        adapter = ItemAdapter(item)        
        adapter["file_paths"] = [os.path.join(self.store.basedir, x) for x in file_paths]
        return item