import random
import os
import json
from typing import List, Optional, Dict, Any
from scrapy import signals
from scrapy.exceptions import NotConfigured
import structlog
import httpx
from datetime import datetime, timedelta


class ProxyMiddleware:
    
    def __init__(self, proxy_list: List[str], validation_url: str = "http://httpbin.org/ip"):
        self.logger = structlog.get_logger(self.__class__.__name__)
        self.proxy_list = proxy_list
        self.validation_url = validation_url
        self.valid_proxies = []
        self.failed_proxies = {}
        self.proxy_stats = {}
        self.max_failures = 3
        self.failure_timeout = 300  # 5 minutes
        
        if self.proxy_list:
            self.validate_proxies()
    
    @classmethod
    def from_crawler(cls, crawler):
        proxy_enabled = crawler.settings.getbool('PROXY_ENABLED', False)
        
        if not proxy_enabled:
            raise NotConfigured('Proxy middleware is disabled')
        
        proxy_list = cls.load_proxy_list(crawler.settings)
        
        if not proxy_list:
            raise NotConfigured('No proxies configured')
        
        validation_url = crawler.settings.get('PROXY_VALIDATION_URL', 'http://httpbin.org/ip')
        
        instance = cls(proxy_list, validation_url)
        
        crawler.signals.connect(instance.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(instance.spider_closed, signal=signals.spider_closed)
        
        return instance
    
    @staticmethod
    def load_proxy_list(settings) -> List[str]:
        proxy_list = []
        
        proxy_list_setting = settings.getlist('PROXY_LIST', [])
        if proxy_list_setting:
            proxy_list.extend(proxy_list_setting)
        
        proxy_file = settings.get('PROXY_FILE')
        if proxy_file and os.path.exists(proxy_file):
            with open(proxy_file, 'r') as f:
                lines = f.readlines()
                proxy_list.extend([line.strip() for line in lines if line.strip()])
        
        proxy_env = os.environ.get('CRAWLER_PROXIES')
        if proxy_env:
            env_proxies = proxy_env.split(',')
            proxy_list.extend([proxy.strip() for proxy in env_proxies if proxy.strip()])
        
        return list(set(proxy_list))
    
    def validate_proxies(self):
        self.logger.info("Validating proxy pool", total_proxies=len(self.proxy_list))
        
        for proxy in self.proxy_list:
            if self.validate_proxy(proxy):
                self.valid_proxies.append(proxy)
                self.proxy_stats[proxy] = {
                    "requests": 0,
                    "success": 0,
                    "failures": 0,
                    "last_used": None,
                    "avg_response_time": 0
                }
        
        self.logger.info(
            "Proxy validation completed",
            valid_proxies=len(self.valid_proxies),
            invalid_proxies=len(self.proxy_list) - len(self.valid_proxies)
        )
    
    def validate_proxy(self, proxy: str) -> bool:
        try:
            with httpx.Client(proxies={"http://": proxy, "https://": proxy}, timeout=10) as client:
                response = client.get(self.validation_url)
                if response.status_code == 200:
                    self.logger.debug("Proxy validated successfully", proxy=proxy)
                    return True
        except Exception as e:
            self.logger.debug("Proxy validation failed", proxy=proxy, error=str(e))
        
        return False
    
    def get_proxy(self) -> Optional[str]:
        now = datetime.now()
        
        for proxy in list(self.failed_proxies.keys()):
            failure_info = self.failed_proxies[proxy]
            if now - failure_info["last_failure"] > timedelta(seconds=self.failure_timeout):
                del self.failed_proxies[proxy]
                if proxy in self.proxy_list and proxy not in self.valid_proxies:
                    if self.validate_proxy(proxy):
                        self.valid_proxies.append(proxy)
                        self.proxy_stats[proxy] = {
                            "requests": 0,
                            "success": 0,
                            "failures": 0,
                            "last_used": None,
                            "avg_response_time": 0
                        }
        
        available_proxies = [
            proxy for proxy in self.valid_proxies 
            if proxy not in self.failed_proxies
        ]
        
        if not available_proxies:
            self.logger.warning("No available proxies in pool")
            return None
        
        weights = []
        for proxy in available_proxies:
            stats = self.proxy_stats.get(proxy, {})
            success_rate = (
                stats.get("success", 0) / max(stats.get("requests", 1), 1)
            )
            weight = max(1, int((1 - success_rate) * 10))
            weights.append(weight)
        
        selected_proxy = random.choices(available_proxies, weights=weights, k=1)[0]
        
        self.proxy_stats[selected_proxy]["requests"] += 1
        self.proxy_stats[selected_proxy]["last_used"] = now.isoformat()
        
        return selected_proxy
    
    def process_request(self, request, spider):
        if not self.valid_proxies and not self.proxy_list:
            return None
        
        if request.meta.get('dont_proxy'):
            return None
        
        proxy = self.get_proxy()
        
        if proxy:
            request.meta['proxy'] = proxy
            self.logger.debug(
                "Using proxy for request",
                url=request.url,
                proxy=proxy
            )
    
    def process_response(self, request, response, spider):
        proxy = request.meta.get('proxy')
        
        if proxy and proxy in self.proxy_stats:
            self.proxy_stats[proxy]["success"] += 1
            
            if proxy in self.failed_proxies:
                del self.failed_proxies[proxy]
        
        return response
    
    def process_exception(self, request, exception, spider):
        proxy = request.meta.get('proxy')
        
        if proxy:
            self.handle_proxy_failure(proxy, exception)
            
            if proxy in self.failed_proxies and self.failed_proxies[proxy]["count"] >= self.max_failures:
                self.logger.warning(
                    "Removing proxy due to excessive failures",
                    proxy=proxy,
                    failures=self.failed_proxies[proxy]["count"]
                )
                if proxy in self.valid_proxies:
                    self.valid_proxies.remove(proxy)
            
            request.meta['dont_proxy'] = True
            return request
        
        return None
    
    def handle_proxy_failure(self, proxy: str, exception: Exception):
        if proxy not in self.failed_proxies:
            self.failed_proxies[proxy] = {
                "count": 0,
                "last_failure": datetime.now(),
                "errors": []
            }
        
        self.failed_proxies[proxy]["count"] += 1
        self.failed_proxies[proxy]["last_failure"] = datetime.now()
        self.failed_proxies[proxy]["errors"].append(str(exception))
        
        if proxy in self.proxy_stats:
            self.proxy_stats[proxy]["failures"] += 1
        
        self.logger.warning(
            "Proxy failure recorded",
            proxy=proxy,
            failure_count=self.failed_proxies[proxy]["count"],
            error=str(exception)
        )
    
    def spider_opened(self, spider):
        self.logger.info(
            "Proxy middleware enabled for spider",
            spider=spider.name,
            total_proxies=len(self.valid_proxies)
        )
    
    def spider_closed(self, spider):
        stats_summary = {
            "total_proxies": len(self.proxy_list),
            "valid_proxies": len(self.valid_proxies),
            "failed_proxies": len(self.failed_proxies),
            "proxy_performance": {}
        }
        
        for proxy, stats in self.proxy_stats.items():
            if stats["requests"] > 0:
                stats_summary["proxy_performance"][proxy] = {
                    "requests": stats["requests"],
                    "success_rate": (stats["success"] / stats["requests"]) * 100,
                    "failures": stats["failures"]
                }
        
        self.logger.info(
            "Proxy middleware statistics",
            spider=spider.name,
            **stats_summary
        )