import time
from typing import Dict, Optional
from datetime import datetime, timedelta
from scrapy import signals
from scrapy.exceptions import IgnoreRequest
import redis
import structlog
import json
from urllib.parse import urlparse


class RateLimitMiddleware:
    
    def __init__(self, redis_host: str, redis_port: int, redis_db: int = 0, default_rate_limit: int = 10):
        self.logger = structlog.get_logger(self.__class__.__name__)
        self.redis_client = None
        self.redis_host = redis_host
        self.redis_port = redis_port
        self.redis_db = redis_db
        self.default_rate_limit = default_rate_limit
        self.rate_limits = {}
        self.request_counts = {}
        self.blocked_until = {}
        
        try:
            self.redis_client = redis.Redis(
                host=redis_host,
                port=redis_port,
                db=redis_db,
                decode_responses=True,
                socket_connect_timeout=5,
                socket_timeout=5
            )
            self.redis_client.ping()
            self.use_redis = True
            self.logger.info("Connected to Redis for distributed rate limiting")
        except Exception as e:
            self.logger.warning(
                "Failed to connect to Redis, using in-memory rate limiting",
                error=str(e)
            )
            self.use_redis = False
    
    @classmethod
    def from_crawler(cls, crawler):
        redis_host = crawler.settings.get('REDIS_HOST', 'redis')
        redis_port = crawler.settings.getint('REDIS_PORT', 6379)
        redis_db = crawler.settings.getint('REDIS_DB', 0)
        default_rate_limit = crawler.settings.getint('DEFAULT_RATE_LIMIT', 10)
        
        instance = cls(redis_host, redis_port, redis_db, default_rate_limit)
        
        crawler.signals.connect(instance.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
        
        return instance
    
    def load_rate_limits(self, spider):
        try:
            if self.use_redis:
                config_key = f"crawler:config:{spider.name}"
                config_data = self.redis_client.get(config_key)
                if config_data:
                    config = json.loads(config_data)
                    self.rate_limits = config.get('rate_limits', {})
                    self.logger.info(
                        "Loaded rate limits from Redis",
                        spider=spider.name,
                        rate_limits=self.rate_limits
                    )
        except Exception as e:
            self.logger.error(
                "Failed to load rate limits from Redis",
                error=str(e)
            )
        
        if not self.rate_limits:
            self.rate_limits = getattr(spider, 'rate_limits', {})
            if not self.rate_limits:
                self.rate_limits = {'default': self.default_rate_limit}
    
    def get_source_key(self, request) -> str:
        source = request.meta.get('source')
        if source:
            return source
        
        parsed_url = urlparse(request.url)
        return parsed_url.netloc or 'default'
    
    def get_rate_limit(self, source: str) -> int:
        return self.rate_limits.get(source, self.rate_limits.get('default', self.default_rate_limit))
    
    def check_rate_limit(self, source: str, spider_name: str) -> bool:
        rate_limit = self.get_rate_limit(source)
        
        if self.use_redis:
            return self._check_redis_rate_limit(source, spider_name, rate_limit)
        else:
            return self._check_memory_rate_limit(source, rate_limit)
    
    def _check_redis_rate_limit(self, source: str, spider_name: str, rate_limit: int) -> bool:
        try:
            key = f"rate_limit:crawler:{source}:{spider_name}"
            window = 60
            
            current_time = time.time()
            window_start = current_time - window
            
            pipe = self.redis_client.pipeline()
            pipe.zremrangebyscore(key, 0, window_start)
            pipe.zcard(key)
            pipe.zadd(key, {str(current_time): current_time})
            pipe.expire(key, window + 1)
            results = pipe.execute()
            
            count = results[1]
            
            if count > rate_limit:
                self.redis_client.zrem(key, str(current_time))
                return False
            
            stats_key = f"crawler:stats:{spider_name}:{source}"
            self.redis_client.hincrby(stats_key, "requests", 1)
            self.redis_client.expire(stats_key, 3600)
            
            return True
            
        except Exception as e:
            self.logger.error(
                "Redis rate limit check failed",
                error=str(e),
                source=source
            )
            return True
    
    def _check_memory_rate_limit(self, source: str, rate_limit: int) -> bool:
        current_time = time.time()
        window = 60
        
        if source not in self.request_counts:
            self.request_counts[source] = []
        
        self.request_counts[source] = [
            timestamp for timestamp in self.request_counts[source]
            if timestamp > current_time - window
        ]
        
        if len(self.request_counts[source]) >= rate_limit:
            return False
        
        self.request_counts[source].append(current_time)
        return True
    
    def calculate_delay(self, source: str) -> float:
        rate_limit = self.get_rate_limit(source)
        
        min_interval = 60.0 / rate_limit
        
        if self.use_redis:
            try:
                stats_key = f"crawler:stats:{source}"
                stats = self.redis_client.hgetall(stats_key)
                if stats and 'avg_response_time' in stats:
                    avg_response_time = float(stats['avg_response_time'])
                    min_interval = max(min_interval, avg_response_time * 1.5)
            except:
                pass
        
        return min_interval
    
    def process_request(self, request, spider):
        if request.meta.get('dont_rate_limit'):
            return None
        
        source = self.get_source_key(request)
        
        if source in self.blocked_until:
            if datetime.now() < self.blocked_until[source]:
                self.logger.warning(
                    "Request blocked due to rate limiting",
                    url=request.url,
                    source=source,
                    blocked_until=self.blocked_until[source].isoformat()
                )
                raise IgnoreRequest(f"Rate limited for source: {source}")
            else:
                del self.blocked_until[source]
        
        if not self.check_rate_limit(source, spider.name):
            delay = self.calculate_delay(source)
            self.blocked_until[source] = datetime.now() + timedelta(seconds=delay)
            
            self.logger.warning(
                "Rate limit exceeded",
                source=source,
                url=request.url,
                retry_after=delay
            )
            
            request.meta['retry_after'] = delay
            raise IgnoreRequest(f"Rate limit exceeded for source: {source}")
        
        self.logger.debug(
            "Request allowed by rate limiter",
            url=request.url,
            source=source
        )
        
        return None
    
    def process_response(self, request, response, spider):
        source = self.get_source_key(request)
        
        if self.use_redis:
            try:
                stats_key = f"crawler:stats:{spider.name}:{source}"
                self.redis_client.hincrby(stats_key, "success", 1)
                
                response_time = response.meta.get('download_latency', 0)
                if response_time:
                    self.redis_client.hset(stats_key, "last_response_time", response_time)
                    
                    current_avg = self.redis_client.hget(stats_key, "avg_response_time")
                    if current_avg:
                        new_avg = (float(current_avg) + response_time) / 2
                    else:
                        new_avg = response_time
                    self.redis_client.hset(stats_key, "avg_response_time", new_avg)
            except Exception as e:
                self.logger.error(
                    "Failed to update response stats",
                    error=str(e)
                )
        
        return response
    
    def spider_opened(self, spider):
        self.load_rate_limits(spider)
        self.logger.info(
            "Rate limit middleware activated",
            spider=spider.name,
            rate_limits=self.rate_limits,
            using_redis=self.use_redis
        )
    
    def spider_closed(self, spider):
        stats = {
            "total_sources": len(self.request_counts) if not self.use_redis else "N/A",
            "blocked_sources": len(self.blocked_until)
        }
        
        if self.use_redis:
            try:
                pattern = f"crawler:stats:{spider.name}:*"
                keys = self.redis_client.keys(pattern)
                total_requests = 0
                total_success = 0
                
                for key in keys:
                    key_stats = self.redis_client.hgetall(key)
                    if 'requests' in key_stats:
                        total_requests += int(key_stats['requests'])
                    if 'success' in key_stats:
                        total_success += int(key_stats['success'])
                
                stats['total_requests'] = total_requests
                stats['total_success'] = total_success
                stats['success_rate'] = (total_success / max(total_requests, 1)) * 100
            except Exception as e:
                self.logger.error(
                    "Failed to collect final stats",
                    error=str(e)
                )
        
        self.logger.info(
            "Rate limit middleware statistics",
            spider=spider.name,
            **stats
        )