#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2025/1/25 22:49
# @Author  : Ramsey
# @Site    : zh
# @File    : downloader.py
# @Software: PyCharm
import httpx
from typing import Final, Set, Optional
from contextlib import asynccontextmanager
from spider_framework.utils.log import spider_logger
from spider_framework.http.response import Response
from spider_framework.middleware.middleware_manager import MiddlewareManager


class ActiveManager:

    def __init__(self):
        self._active: Final[Set] = set()

    def add(self, request):
        self._active.add(request)

    def remove(self, request):
        self._active.remove(request)

    @asynccontextmanager
    async def __call__(self, request):
        try:
            yield self.add(request)
        finally:
            self.remove(request)

    def __len__(self):
        return len(self._active)


class HttpxDownloader:
    def __init__(self, crawler):
        self.crawler = crawler
        self._active = ActiveManager()
        self.spider_logger = spider_logger(name=self.__class__.__name__,
                                           log_level=self.crawler.settings.get("LOG_LEVEL"))
        self._client: Optional[httpx.AsyncClient] = None
        self._timeout: Optional[httpx.Timeout] = None
        self.middleware_manager: Optional[MiddlewareManager] = None

    def open(self):
        self.spider_logger.info(
            f"{self.crawler.spider} <downloader class {type(self).__name__} <concurrency {self.crawler.settings.get('CONCURRENCY')}>")
        request_timeout = self.crawler.settings.get("REQUEST_TIMEOUT")
        self._timeout = httpx.Timeout(timeout=request_timeout)
        self.middleware_manager = MiddlewareManager.create_instance(crawler=self.crawler)

    async def fetch(self, request) -> Optional[Response]:
        async with self._active(request):
            response = await self.middleware_manager.download(request)
            return response

    async def download(self, request) -> Optional[Response]:
        try:
            proxies = request.proxy
            async with httpx.AsyncClient(timeout=self._timeout, proxy=proxies) as client:
                self.spider_logger.debug(f"请求下载中, url: {request.url}, 请求方式: {request.method}")
                response = await client.request(request.method, request.url, headers=request.headers,
                                                cookies=request.cookies, data=request.data)
                body = await response.aread()
        except Exception as error:
            self.spider_logger.error(f"请求下载失败, url: {request.url}, 请求方式: {request.method}, 错误信息: {error}")
            return None
        else:
            return self.structure_response(request, response, body)

    @staticmethod
    def structure_response(request, response, body) -> Response:
        return Response(
            url=request.url,
            headers=dict(response.headers),
            status=response.status_code,
            body=body,
            request=request,
        )

    def idea(self):
        return len(self) == 0

    def __len__(self):
        return len(self._active)


if __name__ == "__main__":
    pass
