from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
import structlog
from contextlib import asynccontextmanager
import os
from prometheus_client import make_asgi_app, Counter, Histogram, Gauge
import time

from .api import config, monitoring


logger = structlog.get_logger(__name__)

# Prometheus metrics
request_count = Counter('crawler_api_requests_total', 'Total API requests', ['method', 'endpoint', 'status'])
request_duration = Histogram('crawler_api_request_duration_seconds', 'API request duration', ['method', 'endpoint'])
active_spiders = Gauge('crawler_active_spiders', 'Number of active spiders')


@asynccontextmanager
async def lifespan(app: FastAPI):
    logger.info("Crawler service starting up")
    
    # Initialize database connections, etc.
    # TODO: Start Scrapy engine in background
    
    yield
    
    logger.info("Crawler service shutting down")
    # Cleanup resources


app = FastAPI(
    title="Crawler Service API",
    description="Web crawler service for AI writing platform",
    version="1.0.0",
    lifespan=lifespan
)

# CORS middleware
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # TODO: Configure specific origins for production
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


@app.middleware("http")
async def track_metrics(request, call_next):
    start_time = time.time()
    
    response = await call_next(request)
    
    duration = time.time() - start_time
    
    # Track metrics
    request_count.labels(
        method=request.method,
        endpoint=request.url.path,
        status=response.status_code
    ).inc()
    
    request_duration.labels(
        method=request.method,
        endpoint=request.url.path
    ).observe(duration)
    
    return response


# Include routers
app.include_router(config.router)
app.include_router(monitoring.router)

# Mount Prometheus metrics endpoint
metrics_app = make_asgi_app()
app.mount("/metrics", metrics_app)


@app.get("/")
async def root():
    return {
        "service": "Crawler Service",
        "version": "1.0.0",
        "status": "running",
        "endpoints": {
            "configs": "/api/v1/crawler/configs",
            "status": "/api/v1/crawler/status",
            "stats": "/api/v1/crawler/stats",
            "health": "/health",
            "metrics": "/metrics",
            "docs": "/docs"
        }
    }


if __name__ == "__main__":
    port = int(os.getenv("CRAWLER_PORT", 8003))
    host = os.getenv("CRAWLER_HOST", "0.0.0.0")
    reload = os.getenv("CRAWLER_RELOAD", "false").lower() == "true"
    
    uvicorn.run(
        "src.main:app",
        host=host,
        port=port,
        reload=reload,
        log_config={
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": {
                "default": {
                    "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
                },
            },
            "handlers": {
                "default": {
                    "formatter": "default",
                    "class": "logging.StreamHandler",
                    "stream": "ext://sys.stdout",
                },
            },
            "root": {
                "level": "INFO",
                "handlers": ["default"],
            },
        }
    )