from typing import Dict, List, Optional, Any
from pydantic import BaseModel, Field


class CrawlerSettings(BaseModel):
    """Configuration settings for the crawler module."""
    
    # Default headers to use for HTTP requests
    default_headers: Dict[str, str] = Field(
        default={
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
            "Accept-Language": "en-US,en;q=0.5",
        },
        description="Default headers to use for HTTP requests"
    )
    
    # Default timeout for HTTP requests in seconds
    request_timeout: int = Field(
        default=30,
        description="Default timeout for HTTP requests in seconds"
    )
    
    # Default delay between requests in seconds
    request_delay: float = Field(
        default=1.0,
        description="Default delay between requests in seconds to avoid rate limiting"
    )
    
    # Maximum number of retries for failed requests
    max_retries: int = Field(
        default=3,
        description="Maximum number of retries for failed requests"
    )
    
    # Proxy configuration (optional)
    proxies: Optional[Dict[str, str]] = Field(
        default=None,
        description="Proxy configuration for HTTP requests"
    )
    
    # Whether to verify SSL certificates
    verify_ssl: bool = Field(
        default=True,
        description="Whether to verify SSL certificates"
    )
    
    # Default parser for BeautifulSoup
    default_parser: str = Field(
        default="html.parser",
        description="Default parser for BeautifulSoup (html.parser, lxml, html5lib)"
    )
    
    # Whether to save crawled content to disk
    save_to_disk: bool = Field(
        default=False,
        description="Whether to save crawled content to disk"
    )
    
    # Directory to save crawled content
    save_directory: str = Field(
        default="./crawler_data",
        description="Directory to save crawled content"
    ) 