#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2025/2/6 14:22
# @Author  : Ramsey
# @Site    : zh
# @File    : pipeline_manager.py
# @Software: PyCharm
from pprint import pformat
from spider_framework.utils.transform import common_call
from spider_framework.utils.log import spider_logger
from spider_framework.utils.error import PipelineInitError
from spider_framework.utils.project import load_class


class PipelineManager:
    def __init__(self, crawler):
        self.crawler = crawler
        self.methods = []
        self.pipelines = []
        self.logger = spider_logger(self.__class__.__name__, log_level=self.crawler.settings.get("LOG_LEVEL"))
        pipelines = self.crawler.settings.get("PIPELINES")
        self._add_pipeline(pipelines)
        self._add_methods()

    @classmethod
    def create_instance(cls, *args, **kwargs):
        return cls(*args, **kwargs)

    def _add_pipeline(self, pipelines):
        for pipeline in pipelines:
            pipeline_cls = load_class(pipeline)
            if not hasattr(pipeline_cls, "create_instance"):
                raise PipelineInitError(f"{pipeline_cls} not have `create_instance` method")
            self.pipelines.append(pipeline_cls.create_instance(self.crawler))
        if pipelines:
            self.logger.info(f"enable pipelines: \n {pformat(pipelines)}")

    def _add_methods(self):
        for pipeline in self.pipelines:
            if hasattr(pipeline, "process_item"):
                self.methods.append(pipeline.process_item)

    async def process_item(self, item):
        for method in self.methods:
            # method(item, self.crawler.spider)
            await common_call(method, item, self.crawler.spider)


if __name__ == "__main__":
    pass
