import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from typing import Dict, Optional, Any
import logging
from crawler_base import CrawlerBase

logger = logging.getLogger(__name__)

class HTTPCrawler(CrawlerBase):
    def __init__(self, config=None):
        super().__init__(config)
        self.session = self._create_session()
        self._setup_proxies()
    
    def _create_session(self) -> requests.Session:
        session = requests.Session()
        
        # 设置重试策略
        retry_strategy = Retry(
            total=self.retry_times,
            backoff_factor=1,
            status_forcelist=[429, 500, 502, 503, 504],
        )
        
        adapter = HTTPAdapter(max_retries=retry_strategy)
        session.mount("http://", adapter)
        session.mount("https://", adapter)
        
        # 设置默认超时
        timeout = self.config.get('request_settings.timeout', 30)
        session.timeout = timeout
        
        return session
    
    def _setup_proxies(self):
        proxy_settings = self.config.get('proxy_settings', {})
        if proxy_settings.get('enabled', False):
            self.proxies = proxy_settings.get('proxy_list', [])
    
    def get_page(self, url: str, method: str = 'GET', **kwargs) -> Optional[str]:
        headers = kwargs.get('headers', {})
        headers.setdefault('User-Agent', self.get_random_user_agent())
        
        params = {
            'url': url,
            'method': method,
            'headers': headers,
            'timeout': kwargs.get('timeout', self.config.get('request_settings.timeout', 30))
        }
        
        # 添加其他参数
        if 'data' in kwargs:
            params['data'] = kwargs['data']
        if 'json' in kwargs:
            params['json'] = kwargs['json']
        if 'params' in kwargs:
            params['params'] = kwargs['params']
        if 'cookies' in kwargs:
            params['cookies'] = kwargs['cookies']
        
        # 设置代理（如果启用）
        proxy = self.get_next_proxy()
        if proxy:
            params['proxies'] = proxy
        
        return self.retry_request(self._make_request, **params)
    
    def _make_request(self, **params) -> Optional[str]:
        try:
            response = self.session.request(**params)
            response.raise_for_status()
            
            # 尝试检测编码
            if response.encoding == 'ISO-8859-1':
                response.encoding = response.apparent_encoding or 'utf-8'
            
            self.add_delay()
            return response.text
            
        except requests.RequestException as e:
            logger.error(f"Request failed: {str(e)}")
            raise
    
    def get_json(self, url: str, **kwargs) -> Optional[Dict[str, Any]]:
        content = self.get_page(url, **kwargs)
        if content:
            try:
                import json
                return json.loads(content)
            except json.JSONDecodeError as e:
                logger.error(f"Failed to parse JSON: {str(e)}")
        return None
    
    def download_file(self, url: str, filepath: str, **kwargs) -> bool:
        try:
            headers = kwargs.get('headers', {})
            headers.setdefault('User-Agent', self.get_random_user_agent())
            
            proxy = self.get_next_proxy()
            proxies = proxy if proxy else None
            
            response = self.session.get(
                url, 
                headers=headers, 
                proxies=proxies,
                stream=True,
                timeout=kwargs.get('timeout', self.config.get('request_settings.timeout', 30))
            )
            response.raise_for_status()
            
            with open(filepath, 'wb') as f:
                for chunk in response.iter_content(chunk_size=8192):
                    if chunk:
                        f.write(chunk)
            
            logger.info(f"File downloaded successfully: {filepath}")
            self.add_delay()
            return True
            
        except Exception as e:
            logger.error(f"Failed to download file: {str(e)}")
            return False
    
    def post_data(self, url: str, data: Dict[str, Any] = None, json_data: Dict[str, Any] = None, **kwargs) -> Optional[str]:
        params = kwargs.copy()
        if data:
            params['data'] = data
        if json_data:
            params['json'] = json_data
        
        return self.get_page(url, method='POST', **params)
    
    def close(self):
        if self.session:
            self.session.close()
            logger.info("HTTP session closed")