from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from typing import List, Dict, Any

from app.modules.crawler.schemas import CrawlRequest, CrawlResponse
from app.modules.crawler.api.endpoints import crawl, crawl_background, get_crawl_status


def get_crawler_router() -> APIRouter:
    """Create and configure a router for crawler endpoints.
    
    Returns:
        APIRouter: The configured router.
    """
    router = APIRouter()
    
    # Crawl endpoint - synchronous
    router.add_api_route(
        "/crawl",
        endpoint=crawl,
        methods=["POST"],
        response_model=CrawlResponse,
        status_code=200,
        summary="Crawl a URL",
        description="Crawl a URL and extract data according to the provided selectors."
    )
    
    # Crawl endpoint - asynchronous (background task)
    router.add_api_route(
        "/crawl/async",
        endpoint=crawl_background,
        methods=["POST"],
        status_code=202,
        summary="Crawl a URL asynchronously",
        description="Start a background task to crawl a URL and extract data."
    )
    
    # Get crawl status endpoint
    router.add_api_route(
        "/crawl/status/{task_id}",
        endpoint=get_crawl_status,
        methods=["GET"],
        status_code=200,
        summary="Get crawl status",
        description="Get the status of a background crawl task."
    )
    
    return router 