from datetime import datetime

from itemadapter import ItemAdapter
from scrapy.utils.serialize import ScrapyJSONEncoder


class WebCrawlerPipeline:
    """简单打印"""

    def __init__(self):
        self.encoder = ScrapyJSONEncoder(indent=2, ensure_ascii=False)

    def process_item(self, item, spider):
        """处理并打印Item"""
        adapter = ItemAdapter(item)

        # 构建打印信息
        debug_info = {
            'spider': spider.name,
            'item_type': type(item).__name__,
            'data': dict(adapter),
            'timestamp': datetime.now().isoformat()
        }
        # 彩色打印输出
        spider.logger.info("\033[92m" + "=" * 50 + "\033[0m")  # 绿色分隔线
        spider.logger.info("\033[93mDEBUG ITEM:\033[0m")
        spider.logger.info(self.encoder.encode(debug_info))
        spider.logger.info("\033[92m" + "=" * 50 + "\033[0m")

        return item  # 必须返回item，否则会中断Pipeline链