from typing import Optional

import httpx
from aiohttp import ClientTimeout
from httpx import AsyncClient

from bald_spider import Response
from bald_spider.core.downloader import DownloaderBase


class HTTPXDownloader(DownloaderBase):
    def __init__(self, crawler):
        super().__init__(crawler)
        self._client :Optional[AsyncClient] = None
        self._timeout: Optional[ClientTimeout] = None

    def open(self):
        self.logger.info(
            f"{self.crawler.spider} <downloader class: {type(self).__name__}>"
            f"<concurrency: {self.crawler.settings.get('CONCURRENCY')}>"
        )
        request_timeout = self.crawler.settings.getint('REQUEST_TIMEOUT')
        self._timeout = httpx.Timeout(timeout=request_timeout)


    async def download(self, request) -> Optional[Response]:
        try:
            proxies = request.proxy
            async with httpx.AsyncClient(timeout=self._timeout,proxy=proxies) as session:
                self.logger.debug(f"request downloading : {request.url}, method: {request.method}")
                response = await session.request(request.method, request.url, headers=request.headers,cookies=request.cookies,data=request.body)
                body = await response.aread()
        except Exception as exc:
            self.logger.error(f"Error during request: {exc}")
            raise exc
        return self.structure_response(request, response, body)

    @staticmethod
    def structure_response(request, response, body):
        return Response(
            request=request,
            body=body,
            status=response.status_code,
            headers=dict(response.headers),
            url=request.url,
        )

    async def closer(self):
        pass

