import time
import requests
import aiohttp
import asyncio
import logging
from abc import ABC, abstractmethod
from typing import Dict, Any, Optional, Union, List, Callable
from urllib.parse import urlparse, urljoin

from app.modules.crawler.config import CrawlerSettings
from app.modules.crawler.utils.storage import save_content


logger = logging.getLogger(__name__)


class BaseCrawler(ABC):
    """Base abstract crawler class that defines the interface for all crawlers."""
    
    def __init__(self, settings: Optional[CrawlerSettings] = None):
        """Initialize the base crawler.
        
        Args:
            settings: Configuration settings for the crawler.
        """
        self.settings = settings or CrawlerSettings()
    
    @abstractmethod
    async def fetch(self, url: str, **kwargs) -> Any:
        """Fetch content from the specified URL.
        
        Args:
            url: The URL to fetch.
            **kwargs: Additional parameters for the request.
            
        Returns:
            The fetched content.
        """
        pass
    
    @abstractmethod
    async def fetch_multiple(self, urls: List[str], **kwargs) -> List[Any]:
        """Fetch content from multiple URLs.
        
        Args:
            urls: List of URLs to fetch.
            **kwargs: Additional parameters for the requests.
            
        Returns:
            List of fetched content.
        """
        pass


class HTTPCrawler(BaseCrawler):
    """Synchronous HTTP crawler for web scraping."""
    
    def __init__(self, settings: Optional[CrawlerSettings] = None):
        """Initialize the HTTP crawler.
        
        Args:
            settings: Configuration settings for the crawler.
        """
        super().__init__(settings)
        self.session = requests.Session()
        self.session.headers.update(self.settings.default_headers)
        if self.settings.proxies:
            self.session.proxies.update(self.settings.proxies)
    
    async def fetch(self, url: str, method: str = "GET", 
               headers: Optional[Dict[str, str]] = None, 
               params: Optional[Dict[str, Any]] = None,
               data: Optional[Any] = None, 
               json: Optional[Dict[str, Any]] = None,
               cookies: Optional[Dict[str, str]] = None,
               allow_redirects: bool = True,
               retry_count: int = 0,
               save: bool = False,
               parse: bool = False,
               parser_func: Optional[Callable] = None) -> Any:
        """Fetch content from the specified URL.
        
        Args:
            url: The URL to fetch.
            method: HTTP method to use.
            headers: Additional headers for the request.
            params: Query parameters for the request.
            data: Form data or body to send with the request.
            json: JSON data to send with the request.
            cookies: Cookies to send with the request.
            allow_redirects: Whether to follow redirects.
            retry_count: Current retry attempt count.
            save: Whether to save the response content to disk.
            parse: Whether to parse the response with a parser function.
            parser_func: Function to use for parsing the response.
            
        Returns:
            The response object or parsed content if parse is True.
        """
        if retry_count > self.settings.max_retries:
            logger.error(f"Maximum retry attempts reached for URL: {url}")
            return None
        
        # Apply delay to avoid rate limiting
        if retry_count > 0:
            time.sleep(self.settings.request_delay * (2 ** retry_count))  # Exponential backoff
        elif retry_count == 0 and self.settings.request_delay > 0:
            time.sleep(self.settings.request_delay)
        
        # Merge default headers with provided headers
        request_headers = self.settings.default_headers.copy()
        if headers:
            request_headers.update(headers)
        
        try:
            response = self.session.request(
                method=method,
                url=url,
                headers=request_headers,
                params=params,
                data=data,
                json=json,
                cookies=cookies,
                timeout=self.settings.request_timeout,
                allow_redirects=allow_redirects,
                verify=self.settings.verify_ssl
            )
            
            response.raise_for_status()
            
            # Save response content if requested
            if save or self.settings.save_to_disk:
                save_content(response.content, url, self.settings.save_directory)
            
            # Parse response if requested
            if parse and parser_func:
                return parser_func(response)
            
            return response
            
        except (requests.RequestException, requests.ConnectionError, 
                requests.Timeout, requests.TooManyRedirects) as e:
            logger.warning(f"Error fetching URL {url}: {str(e)}. Retry {retry_count + 1}/{self.settings.max_retries}")
            return await self.fetch(
                url=url,
                method=method,
                headers=headers,
                params=params,
                data=data,
                json=json,
                cookies=cookies,
                allow_redirects=allow_redirects,
                retry_count=retry_count + 1,
                save=save,
                parse=parse,
                parser_func=parser_func
            )
    
    async def fetch_multiple(self, urls: List[str], **kwargs) -> List[Any]:
        """Fetch content from multiple URLs sequentially.
        
        Args:
            urls: List of URLs to fetch.
            **kwargs: Additional parameters for the requests.
            
        Returns:
            List of responses or parsed content.
        """
        results = []
        for url in urls:
            result = await self.fetch(url, **kwargs)
            results.append(result)
        return results
    
    def close(self):
        """Close the session and release resources."""
        self.session.close()


class AsyncHTTPCrawler(BaseCrawler):
    """Asynchronous HTTP crawler for web scraping."""
    
    def __init__(self, settings: Optional[CrawlerSettings] = None):
        """Initialize the async HTTP crawler.
        
        Args:
            settings: Configuration settings for the crawler.
        """
        super().__init__(settings)
        self.session = None
    
    async def __aenter__(self):
        """Set up the async context manager by creating a client session."""
        self.session = aiohttp.ClientSession(headers=self.settings.default_headers)
        return self
    
    async def __aexit__(self, exc_type, exc_val, exc_tb):
        """Clean up the async context manager by closing the session."""
        if self.session:
            await self.session.close()
            self.session = None
    
    async def fetch(self, url: str, method: str = "GET", 
               headers: Optional[Dict[str, str]] = None, 
               params: Optional[Dict[str, Any]] = None,
               data: Optional[Any] = None, 
               json: Optional[Dict[str, Any]] = None,
               cookies: Optional[Dict[str, str]] = None,
               allow_redirects: bool = True,
               retry_count: int = 0,
               save: bool = False,
               parse: bool = False,
               parser_func: Optional[Callable] = None) -> Any:
        """Fetch content from the specified URL asynchronously.
        
        Args:
            url: The URL to fetch.
            method: HTTP method to use.
            headers: Additional headers for the request.
            params: Query parameters for the request.
            data: Form data or body to send with the request.
            json: JSON data to send with the request.
            cookies: Cookies to send with the request.
            allow_redirects: Whether to follow redirects.
            retry_count: Current retry attempt count.
            save: Whether to save the response content to disk.
            parse: Whether to parse the response with a parser function.
            parser_func: Function to use for parsing the response.
            
        Returns:
            The response object or parsed content if parse is True.
        """
        if retry_count > self.settings.max_retries:
            logger.error(f"Maximum retry attempts reached for URL: {url}")
            return None
        
        # Create a session if not exists
        if self.session is None:
            self.session = aiohttp.ClientSession(headers=self.settings.default_headers)
        
        # Apply delay to avoid rate limiting
        if retry_count > 0:
            await asyncio.sleep(self.settings.request_delay * (2 ** retry_count))  # Exponential backoff
        elif retry_count == 0 and self.settings.request_delay > 0:
            await asyncio.sleep(self.settings.request_delay)
        
        # Merge default headers with provided headers
        request_headers = self.settings.default_headers.copy()
        if headers:
            request_headers.update(headers)
        
        try:
            async with self.session.request(
                method=method,
                url=url,
                headers=request_headers,
                params=params,
                data=data,
                json=json,
                cookies=cookies,
                allow_redirects=allow_redirects,
                ssl=None if not self.settings.verify_ssl else None
            ) as response:
                response.raise_for_status()
                content = await response.read()
                
                # Save response content if requested
                if save or self.settings.save_to_disk:
                    save_content(content, url, self.settings.save_directory)
                
                # Parse response if requested
                if parse and parser_func:
                    return parser_func(response, content)
                
                return content
                
        except aiohttp.ClientError as e:
            logger.warning(f"Error fetching URL {url}: {str(e)}. Retry {retry_count + 1}/{self.settings.max_retries}")
            return await self.fetch(
                url=url,
                method=method,
                headers=headers,
                params=params,
                data=data,
                json=json,
                cookies=cookies,
                allow_redirects=allow_redirects,
                retry_count=retry_count + 1,
                save=save,
                parse=parse,
                parser_func=parser_func
            )
    
    async def fetch_multiple(self, urls: List[str], **kwargs) -> List[Any]:
        """Fetch content from multiple URLs concurrently.
        
        Args:
            urls: List of URLs to fetch.
            **kwargs: Additional parameters for the requests.
            
        Returns:
            List of responses or parsed content.
        """
        tasks = [self.fetch(url, **kwargs) for url in urls]
        return await asyncio.gather(*tasks, return_exceptions=True)
    
    async def close(self):
        """Close the session and release resources."""
        if self.session:
            await self.session.close()
            self.session = None 