# http_ops.py
import aiohttp
import asyncio
from logger_config import logger
from config import get_env
from utils import load_cookie


async def fetch_page_with_http(url, max_retries=3):
    """使用HTTP获取页面内容（适用于不需要JS分析的页面）"""
    logger.info(f"使用HTTP访问: {url}")

    # 加载cookies
    cookie_list = await load_cookie()
    if not cookie_list:
        logger.error("无法加载cookies，请先登录")
        return None

    cookies_dict = {cookie['name']: cookie['value'] for cookie in cookie_list}

    # 配置HTTP客户端
    timeout = aiohttp.ClientTimeout(total=30)
    connector = aiohttp.TCPConnector(limit=1)

    # 重试机制
    for attempt in range(max_retries):
        try:
            async with aiohttp.ClientSession(
                    cookies=cookies_dict,
                    connector=connector,
                    timeout=timeout,
                    headers={'User-Agent': get_env('USERAGENT')}
            ) as session:
                async with session.get(url) as response:
                    if response.status == 200:
                        content = await response.text()
                        logger.success(f"成功获取页面内容，长度: {len(content)}")
                        return content
                    else:
                        logger.warning(f"尝试 {attempt + 1}/{max_retries}：返回状态码 {response.status}")
        except (aiohttp.ClientError, asyncio.TimeoutError) as e:
            logger.error(f"尝试 {attempt + 1}/{max_retries}：请求出错: {str(e)}")

        if attempt < max_retries - 1:
            await asyncio.sleep(2 ** attempt)

    logger.error(f"所有尝试失败：{url}")
    return None