from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
from fastapi.responses import JSONResponse
import psycopg2
from psycopg2.extras import RealDictCursor
import structlog
from datetime import datetime

from ..models.crawler_config import (
    CrawlerConfigCreate,
    CrawlerConfigUpdate,
    CrawlerConfigResponse,
    CrawlerStats,
    CrawlerStatus
)


logger = structlog.get_logger(__name__)
router = APIRouter(prefix="/api/v1/crawler/configs", tags=["crawler-configs"])


def get_db_connection():
    import os
    return psycopg2.connect(
        host=os.getenv("POSTGRES_HOST", "postgres"),
        port=os.getenv("POSTGRES_PORT", 5432),
        database=os.getenv("POSTGRES_DB", "ai_writing"),
        user=os.getenv("POSTGRES_USER", "admin"),
        password=os.getenv("POSTGRES_PASSWORD", "admin123"),
        cursor_factory=RealDictCursor
    )


@router.get("/", response_model=List[CrawlerConfigResponse])
async def get_crawler_configs(
    status: Optional[CrawlerStatus] = None,
    limit: int = Query(default=100, le=1000),
    offset: int = Query(default=0, ge=0)
):
    try:
        with get_db_connection() as conn:
            with conn.cursor() as cur:
                query = "SELECT * FROM crawler_configs"
                params = []
                
                if status:
                    query += " WHERE status = %s"
                    params.append(status.value)
                
                query += " ORDER BY created_at DESC LIMIT %s OFFSET %s"
                params.extend([limit, offset])
                
                cur.execute(query, params)
                configs = cur.fetchall()
                
                return [CrawlerConfigResponse(**config) for config in configs]
    
    except Exception as e:
        logger.error("Failed to fetch crawler configs", error=str(e))
        raise HTTPException(status_code=500, detail="Failed to fetch configurations")


@router.get("/{source_name}", response_model=CrawlerConfigResponse)
async def get_crawler_config(source_name: str):
    try:
        with get_db_connection() as conn:
            with conn.cursor() as cur:
                cur.execute(
                    "SELECT * FROM crawler_configs WHERE source_name = %s",
                    (source_name,)
                )
                config = cur.fetchone()
                
                if not config:
                    raise HTTPException(status_code=404, detail=f"Configuration for '{source_name}' not found")
                
                return CrawlerConfigResponse(**config)
    
    except HTTPException:
        raise
    except Exception as e:
        logger.error("Failed to fetch crawler config", source_name=source_name, error=str(e))
        raise HTTPException(status_code=500, detail="Failed to fetch configuration")


@router.post("/", response_model=CrawlerConfigResponse, status_code=201)
async def create_crawler_config(config: CrawlerConfigCreate):
    try:
        with get_db_connection() as conn:
            with conn.cursor() as cur:
                cur.execute("""
                    INSERT INTO crawler_configs (
                        source_name, url_template, frequency, status, 
                        retry_times, timeout, headers, rate_limit, 
                        proxy_enabled, user_agent_type, auth_config, custom_settings
                    ) VALUES (
                        %s, %s, %s, %s, %s, %s, %s::jsonb, %s, %s, %s, %s::jsonb, %s::jsonb
                    ) RETURNING *
                """, (
                    config.source_name, config.url_template, config.frequency, 
                    config.status.value, config.retry_times, config.timeout,
                    json.dumps(config.headers), config.rate_limit,
                    config.proxy_enabled, config.user_agent_type.value,
                    json.dumps(config.auth_config) if config.auth_config else None,
                    json.dumps(config.custom_settings)
                ))
                
                new_config = cur.fetchone()
                conn.commit()
                
                logger.info("Created crawler config", source_name=config.source_name)
                return CrawlerConfigResponse(**new_config)
    
    except psycopg2.IntegrityError as e:
        if "duplicate key" in str(e):
            raise HTTPException(status_code=409, detail=f"Configuration for '{config.source_name}' already exists")
        raise HTTPException(status_code=400, detail="Invalid configuration data")
    except Exception as e:
        logger.error("Failed to create crawler config", error=str(e))
        raise HTTPException(status_code=500, detail="Failed to create configuration")


@router.put("/{source_name}", response_model=CrawlerConfigResponse)
async def update_crawler_config(source_name: str, config: CrawlerConfigUpdate):
    try:
        update_data = config.dict(exclude_unset=True)
        
        if not update_data:
            raise HTTPException(status_code=400, detail="No update data provided")
        
        with get_db_connection() as conn:
            with conn.cursor() as cur:
                set_clause = []
                params = []
                
                for field, value in update_data.items():
                    if field in ['headers', 'auth_config', 'custom_settings']:
                        set_clause.append(f"{field} = %s::jsonb")
                        params.append(json.dumps(value))
                    elif field == 'status':
                        set_clause.append(f"{field} = %s")
                        params.append(value.value if hasattr(value, 'value') else value)
                    elif field == 'user_agent_type':
                        set_clause.append(f"{field} = %s")
                        params.append(value.value if hasattr(value, 'value') else value)
                    else:
                        set_clause.append(f"{field} = %s")
                        params.append(value)
                
                params.append(source_name)
                
                query = f"""
                    UPDATE crawler_configs 
                    SET {', '.join(set_clause)}
                    WHERE source_name = %s
                    RETURNING *
                """
                
                cur.execute(query, params)
                updated_config = cur.fetchone()
                
                if not updated_config:
                    raise HTTPException(status_code=404, detail=f"Configuration for '{source_name}' not found")
                
                conn.commit()
                
                logger.info("Updated crawler config", source_name=source_name)
                return CrawlerConfigResponse(**updated_config)
    
    except HTTPException:
        raise
    except Exception as e:
        logger.error("Failed to update crawler config", source_name=source_name, error=str(e))
        raise HTTPException(status_code=500, detail="Failed to update configuration")


@router.delete("/{source_name}")
async def delete_crawler_config(source_name: str):
    try:
        with get_db_connection() as conn:
            with conn.cursor() as cur:
                cur.execute(
                    "DELETE FROM crawler_configs WHERE source_name = %s RETURNING id",
                    (source_name,)
                )
                deleted = cur.fetchone()
                
                if not deleted:
                    raise HTTPException(status_code=404, detail=f"Configuration for '{source_name}' not found")
                
                conn.commit()
                
                logger.info("Deleted crawler config", source_name=source_name)
                return {"message": f"Configuration for '{source_name}' deleted successfully"}
    
    except HTTPException:
        raise
    except Exception as e:
        logger.error("Failed to delete crawler config", source_name=source_name, error=str(e))
        raise HTTPException(status_code=500, detail="Failed to delete configuration")


@router.get("/{source_name}/stats", response_model=CrawlerStats)
async def get_crawler_stats(source_name: str):
    try:
        with get_db_connection() as conn:
            with conn.cursor() as cur:
                cur.execute(
                    "SELECT * FROM crawler_configs WHERE source_name = %s",
                    (source_name,)
                )
                config = cur.fetchone()
                
                if not config:
                    raise HTTPException(status_code=404, detail=f"Configuration for '{source_name}' not found")
                
                # TODO: Implement actual stats collection from crawler runs
                # This is a placeholder implementation
                stats = CrawlerStats(
                    source_name=source_name,
                    total_runs=0,
                    successful_runs=0,
                    failed_runs=0,
                    average_items_per_run=0,
                    average_runtime=0,
                    last_run_status=config.get('status'),
                    last_run_items=0,
                    uptime_percentage=100.0
                )
                
                return stats
    
    except HTTPException:
        raise
    except Exception as e:
        logger.error("Failed to fetch crawler stats", source_name=source_name, error=str(e))
        raise HTTPException(status_code=500, detail="Failed to fetch statistics")


import json