import time
import random
from typing import Optional, Set
from scrapy import signals
from scrapy.downloadermiddlewares.retry import RetryMiddleware as BaseRetryMiddleware
from scrapy.utils.response import response_status_message
import structlog


class ExponentialBackoffRetryMiddleware(BaseRetryMiddleware):
    
    def __init__(self, settings):
        super().__init__(settings)
        self.logger = structlog.get_logger(self.__class__.__name__)
        self.retry_backoff_base = settings.getfloat('RETRY_BACKOFF_BASE', 2)
        self.retry_backoff_max = settings.getfloat('RETRY_BACKOFF_MAX', 60)
        self.retry_jitter = settings.getbool('RETRY_JITTER', True)
        self.retry_stats = {}
        
        self.custom_retry_codes = set(settings.getlist('CUSTOM_RETRY_HTTP_CODES', []))
        self.retry_http_codes.update(int(x) for x in self.custom_retry_codes)
        
        self.logger.info(
            "Exponential backoff retry middleware initialized",
            max_retry_times=self.max_retry_times,
            backoff_base=self.retry_backoff_base,
            backoff_max=self.retry_backoff_max,
            retry_codes=list(self.retry_http_codes)
        )
    
    @classmethod
    def from_crawler(cls, crawler):
        instance = cls(crawler.settings)
        crawler.signals.connect(instance.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
        return instance
    
    def calculate_backoff_delay(self, retry_count: int) -> float:
        delay = min(
            self.retry_backoff_base ** retry_count,
            self.retry_backoff_max
        )
        
        if self.retry_jitter:
            jitter = random.uniform(0, delay * 0.1)
            delay += jitter
        
        return delay
    
    def process_response(self, request, response, spider):
        if response.status in self.retry_http_codes:
            reason = response_status_message(response.status)
            retry_count = request.meta.get('retry_times', 0) + 1
            
            if retry_count <= self.max_retry_times:
                delay = self.calculate_backoff_delay(retry_count)
                
                self.log_retry_attempt(
                    request=request,
                    reason=reason,
                    retry_count=retry_count,
                    delay=delay,
                    spider=spider
                )
                
                retryreq = self._retry(request, reason, spider)
                if retryreq:
                    retryreq.meta['retry_times'] = retry_count
                    retryreq.dont_filter = True
                    retryreq.priority = request.priority + self.priority_adjust
                    
                    if delay > 0:
                        time.sleep(delay)
                    
                    return retryreq
            else:
                self.log_retry_failure(
                    request=request,
                    reason=f"Gave up after {retry_count} retries",
                    spider=spider
                )
        
        return response
    
    def process_exception(self, request, exception, spider):
        retry_count = request.meta.get('retry_times', 0) + 1
        
        if self._is_retryable_exception(exception) and retry_count <= self.max_retry_times:
            delay = self.calculate_backoff_delay(retry_count)
            
            self.log_retry_attempt(
                request=request,
                reason=str(exception),
                retry_count=retry_count,
                delay=delay,
                spider=spider,
                is_exception=True
            )
            
            retryreq = self._retry(request, str(exception), spider)
            if retryreq:
                retryreq.meta['retry_times'] = retry_count
                retryreq.dont_filter = True
                retryreq.priority = request.priority + self.priority_adjust
                
                if 'retry_exception_types' not in retryreq.meta:
                    retryreq.meta['retry_exception_types'] = []
                retryreq.meta['retry_exception_types'].append(type(exception).__name__)
                
                if delay > 0:
                    time.sleep(delay)
                
                return retryreq
        else:
            self.log_retry_failure(
                request=request,
                reason=f"Non-retryable exception or max retries exceeded: {exception}",
                spider=spider
            )
        
        return None
    
    def _is_retryable_exception(self, exception) -> bool:
        retryable_exceptions = (
            ConnectionError,
            TimeoutError,
            ConnectionResetError,
            ConnectionAbortedError,
            BrokenPipeError
        )
        
        return isinstance(exception, retryable_exceptions)
    
    def log_retry_attempt(self, request, reason: str, retry_count: int, delay: float, spider, is_exception: bool = False):
        url = request.url
        
        if url not in self.retry_stats:
            self.retry_stats[url] = {
                'attempts': 0,
                'exceptions': 0,
                'status_codes': {},
                'total_delay': 0
            }
        
        self.retry_stats[url]['attempts'] += 1
        self.retry_stats[url]['total_delay'] += delay
        
        if is_exception:
            self.retry_stats[url]['exceptions'] += 1
        else:
            status_code = reason.split()[0] if ' ' in reason else reason
            if status_code not in self.retry_stats[url]['status_codes']:
                self.retry_stats[url]['status_codes'][status_code] = 0
            self.retry_stats[url]['status_codes'][status_code] += 1
        
        self.logger.warning(
            "Retrying request with exponential backoff",
            url=url,
            reason=reason,
            retry_count=retry_count,
            max_retries=self.max_retry_times,
            delay_seconds=round(delay, 2),
            spider=spider.name
        )
        
        spider.crawler.stats.inc_value('retry/count')
        spider.crawler.stats.inc_value(f'retry/reason/{reason}')
    
    def log_retry_failure(self, request, reason: str, spider):
        url = request.url
        retry_history = request.meta.get('retry_exception_types', [])
        
        self.logger.error(
            "Request failed after all retry attempts",
            url=url,
            reason=reason,
            retry_count=request.meta.get('retry_times', 0),
            max_retries=self.max_retry_times,
            retry_history=retry_history,
            spider=spider.name
        )
        
        spider.crawler.stats.inc_value('retry/max_reached')
        
        if hasattr(spider, 'handle_retry_failure'):
            spider.handle_retry_failure(request, reason)
    
    def spider_opened(self, spider):
        self.logger.info(
            "Exponential backoff retry middleware activated",
            spider=spider.name,
            max_retries=self.max_retry_times,
            backoff_base=self.retry_backoff_base,
            backoff_max=self.retry_backoff_max
        )
    
    def spider_closed(self, spider):
        total_retries = sum(stats['attempts'] for stats in self.retry_stats.values())
        total_delay = sum(stats['total_delay'] for stats in self.retry_stats.values())
        urls_with_retries = len(self.retry_stats)
        
        most_retried = sorted(
            self.retry_stats.items(),
            key=lambda x: x[1]['attempts'],
            reverse=True
        )[:5]
        
        self.logger.info(
            "Retry middleware statistics",
            spider=spider.name,
            total_retries=total_retries,
            total_delay_seconds=round(total_delay, 2),
            urls_with_retries=urls_with_retries,
            most_retried_urls=[
                {
                    'url': url,
                    'attempts': stats['attempts'],
                    'exceptions': stats['exceptions']
                }
                for url, stats in most_retried
            ]
        )