#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2025/2/3 10:31
# @Author  : Ramsey
# @Site    : zh
# @File    : processor.py
# @Software: PyCharm
from asyncio import Queue
from spider_framework.http.request import Request
from spider_framework.items.items import Item
from typing import Optional
from spider_framework.pipeline.pipeline_manager import PipelineManager


class Processor:
    def __init__(self, crawler):
        self.queue = Queue()
        self.crawler = crawler
        self.pipeline: Optional[PipelineManager] = None

    def open(self):
        self.pipeline = PipelineManager.create_instance(self.crawler)

    async def process(self):
        result = await self.queue.get()
        if isinstance(result, Request):
            await self.crawler.engine._push_request(request=result)
        else:
            await self.process_item(result)

    async def process_item(self, item: Item):
        self.crawler.status_collector.increase_value("item_successful_count")
        await self.pipeline.process_item(item=item)

    async def add_2_queue(self, output):
        await self.queue.put(output)
        await self.process()

    def idea(self) -> bool:
        return len(self) == 0

    def __len__(self):
        return self.queue.qsize()


if __name__ == "__main__":
    pass
