from typing import Optional, Dict, Any
from datetime import datetime
from pydantic import BaseModel, Field, validator
from uuid import UUID
from enum import Enum


class CrawlerStatus(str, Enum):
    ACTIVE = "active"
    INACTIVE = "inactive"
    PAUSED = "paused"
    ERROR = "error"


class UserAgentType(str, Enum):
    DESKTOP = "desktop"
    MOBILE = "mobile"
    MIXED = "mixed"


class CrawlerConfigBase(BaseModel):
    source_name: str = Field(..., min_length=1, max_length=100)
    url_template: str = Field(..., min_length=1)
    frequency: int = Field(default=3600, ge=60, le=86400)  # 1 minute to 24 hours
    status: CrawlerStatus = Field(default=CrawlerStatus.ACTIVE)
    retry_times: int = Field(default=3, ge=0, le=10)
    timeout: int = Field(default=30, ge=5, le=300)
    headers: Dict[str, str] = Field(default_factory=dict)
    rate_limit: int = Field(default=10, ge=1, le=100)
    proxy_enabled: bool = Field(default=False)
    user_agent_type: UserAgentType = Field(default=UserAgentType.MIXED)
    auth_config: Optional[Dict[str, Any]] = None
    custom_settings: Dict[str, Any] = Field(default_factory=dict)
    
    @validator('url_template')
    def validate_url_template(cls, v):
        if not v.startswith(('http://', 'https://')):
            raise ValueError('url_template must start with http:// or https://')
        return v
    
    @validator('headers')
    def validate_headers(cls, v):
        if v and not isinstance(v, dict):
            raise ValueError('headers must be a dictionary')
        return v


class CrawlerConfigCreate(CrawlerConfigBase):
    pass


class CrawlerConfigUpdate(BaseModel):
    source_name: Optional[str] = None
    url_template: Optional[str] = None
    frequency: Optional[int] = None
    status: Optional[CrawlerStatus] = None
    retry_times: Optional[int] = None
    timeout: Optional[int] = None
    headers: Optional[Dict[str, str]] = None
    rate_limit: Optional[int] = None
    proxy_enabled: Optional[bool] = None
    user_agent_type: Optional[UserAgentType] = None
    auth_config: Optional[Dict[str, Any]] = None
    custom_settings: Optional[Dict[str, Any]] = None


class CrawlerConfigInDB(CrawlerConfigBase):
    id: UUID
    last_run_at: Optional[datetime] = None
    last_success_at: Optional[datetime] = None
    last_error: Optional[str] = None
    created_at: datetime
    updated_at: datetime
    
    class Config:
        orm_mode = True


class CrawlerConfigResponse(CrawlerConfigInDB):
    pass


class CrawlerStats(BaseModel):
    source_name: str
    total_runs: int = 0
    successful_runs: int = 0
    failed_runs: int = 0
    average_items_per_run: float = 0
    average_runtime: float = 0
    last_run_status: Optional[str] = None
    last_run_items: Optional[int] = None
    uptime_percentage: float = 100.0