"""
FastAPI main application for RAG service.
"""
import os
import uuid
from typing import Optional, List
from dotenv import load_dotenv
from fastapi import FastAPI, UploadFile, File, HTTPException
from fastapi.staticfiles import StaticFiles
from fastapi.responses import HTMLResponse
from pydantic import BaseModel

# Load environment variables from .env file
load_dotenv()

from app.document_processor import DocumentProcessor
from app.vector_store import VectorStore
from app.query_service import QueryService
from app.embedding_service import EmbeddingService

app = FastAPI(title="RAG MVP Service", version="1.0.0")

# Mount static files for frontend demo
static_dir = "static"
if os.path.exists(static_dir):
    app.mount("/static", StaticFiles(directory=static_dir), name="static")

# Initialize services
document_processor = DocumentProcessor()
vector_store = VectorStore() 
embedding_service = EmbeddingService()
query_service = QueryService(vector_store)


class QueryRequest(BaseModel):
    """Query request model."""
    question: str
    top_k: int = 4


class QueryResponse(BaseModel):
    """Query response model."""
    answer: str
    evidence: List[dict]


class UploadResponse(BaseModel):
    """Upload response model."""
    file_id: str
    chunks: int


class FileInfo(BaseModel):
    """File information model."""
    file_id: str
    filename: str
    chunk_count: int
    upload_time: str


class FileListResponse(BaseModel):
    """File list response model."""
    files: List[FileInfo]
    total: int


@app.get("/", response_class=HTMLResponse)
async def root():
    """Serve the frontend demo page."""
    html_path = os.path.join(static_dir, "index.html")
    if os.path.exists(html_path):
        with open(html_path, "r", encoding="utf-8") as f:
            return f.read()
    return """
    <html>
        <head><title>RAG MVP Service</title></head>
        <body>
            <h1>RAG MVP Service</h1>
            <p>API is running. Use /upload to upload documents and /query to query.</p>
            <p>See <a href="/docs">/docs</a> for API documentation.</p>
        </body>
    </html>
    """


@app.post("/upload", response_model=UploadResponse)
async def upload_file(file: UploadFile = File(...)):
    """
    Upload a document file (.txt, .md, or .pdf).
    
    Returns file_id and number of chunks created.
    """
    # Validate file type
    allowed_extensions = {".txt", ".md", ".pdf"}
    file_ext = os.path.splitext(file.filename)[1].lower()
    
    if file_ext not in allowed_extensions:
        raise HTTPException(
            status_code=400,
            detail=f"Unsupported file type. Allowed: {', '.join(allowed_extensions)}"
        )
    
    # Generate file_id
    file_id = str(uuid.uuid4())
    
    # Read file content
    content = await file.read()
    
    # Process document
    try:
        chunks = document_processor.process_file(
            content=content,
            filename=file.filename,
            file_id=file_id,
            file_ext=file_ext
        )
        
        if not chunks:
            raise HTTPException(
                status_code=400,
                detail="File is empty or could not be processed. Please ensure the file contains text content."
            )
        
        # Generate embeddings for chunks
        chunk_texts = [chunk["text"] for chunk in chunks]
        embeddings = None
        embedding_error = None
        
        try:
            embeddings = embedding_service.get_embeddings(chunk_texts)
        except Exception as e:
            embedding_error = str(e)
            print(f"Embedding generation error: {embedding_error}")
        
        if not embeddings:
            # Provide detailed error message
            if not embedding_service.is_available():
                error_msg = (
                    "OPENAI_API_KEY is required to generate embeddings. "
                    "Please set OPENAI_API_KEY in your .env file or environment variables."
                )
            elif embedding_error:
                # Check if it's a 404 error (API endpoint not found)
                if "404" in embedding_error or "page not found" in embedding_error.lower():
                    error_msg = (
                        f"Embedding API endpoint not found (404). "
                        f"The configured API ({embedding_service.base_url}) may not support embeddings. "
                        f"Please check your OPENAI_BASE_URL configuration. "
                        f"Note: DeepSeek may not support embeddings API, consider using OpenAI for embeddings."
                    )
                else:
                    error_msg = (
                        f"Failed to generate embeddings: {embedding_error}. "
                        f"Please check your API configuration and network connection."
                    )
            else:
                error_msg = "Failed to generate embeddings. Please check your API configuration."
            
            raise HTTPException(status_code=400, detail=error_msg)
        
        # Store chunks in vector store with embeddings
        try:
            vector_store.add_chunks(file_id, chunks, embeddings=embeddings, filename=file.filename)
        except Exception as e:
            raise HTTPException(
                status_code=500,
                detail=f"Failed to store document in vector database: {str(e)}"
            )
        
        return UploadResponse(file_id=file_id, chunks=len(chunks))
    
    except HTTPException:
        # Re-raise HTTP exceptions as-is
        raise
    except Exception as e:
        # Log the full error for debugging
        import traceback
        error_trace = traceback.format_exc()
        print(f"Error processing file: {error_trace}")
        raise HTTPException(
            status_code=500,
            detail=f"Error processing file: {str(e)}"
        )


@app.post("/query", response_model=QueryResponse)
async def query(request: QueryRequest):
    """
    Query the RAG service with a question.
    
    Returns answer and evidence chunks.
    """
    try:
        result = await query_service.query(
            question=request.question,
            top_k=request.top_k
        )
        return QueryResponse(
            answer=result["answer"],
            evidence=result["evidence"]
        )
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error processing query: {str(e)}")


@app.get("/health")
async def health():
    """Health check endpoint."""
    return {"status": "healthy"}


@app.get("/files", response_model=FileListResponse)
async def list_files():
    """
    List all uploaded files with their metadata.
    
    Returns list of files sorted by upload time (newest first).
    """
    try:
        files = vector_store.list_files()
        file_infos = [
            FileInfo(
                file_id=f["file_id"],
                filename=f["filename"],
                chunk_count=f["chunk_count"],
                upload_time=f["upload_time"]
            )
            for f in files
        ]
        return FileListResponse(files=file_infos, total=len(file_infos))
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error listing files: {str(e)}")


@app.get("/files/{file_id}", response_model=FileInfo)
async def get_file_info(file_id: str):
    """
    Get metadata for a specific file.
    
    Args:
        file_id: Unique file identifier
    """
    try:
        file_metadata = vector_store.get_file_metadata(file_id)
        if not file_metadata:
            raise HTTPException(status_code=404, detail=f"File with id {file_id} not found")
        
        return FileInfo(
            file_id=file_metadata["file_id"],
            filename=file_metadata["filename"],
            chunk_count=file_metadata["chunk_count"],
            upload_time=file_metadata["upload_time"]
        )
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error getting file info: {str(e)}")


@app.delete("/files/{file_id}")
async def delete_file(file_id: str):
    """
    Delete a file and all its chunks from the vector store.
    
    Args:
        file_id: Unique file identifier
    """
    try:
        success = vector_store.delete_file(file_id)
        if not success:
            raise HTTPException(status_code=404, detail=f"File with id {file_id} not found")
        
        return {"message": f"File {file_id} deleted successfully", "file_id": file_id}
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error deleting file: {str(e)}")


@app.put("/files/{file_id}", response_model=UploadResponse)
async def update_file(file_id: str, file: UploadFile = File(...)):
    """
    Update an existing file by replacing its content.
    
    This will delete all old chunks and create new ones from the uploaded file.
    
    Args:
        file_id: Unique file identifier
        file: New file content to replace the existing file
    """
    # Validate file type
    allowed_extensions = {".txt", ".md", ".pdf"}
    file_ext = os.path.splitext(file.filename)[1].lower()
    
    if file_ext not in allowed_extensions:
        raise HTTPException(
            status_code=400,
            detail=f"Unsupported file type. Allowed: {', '.join(allowed_extensions)}"
        )
    
    # Read file content
    content = await file.read()
    
    # Process document
    try:
        chunks = document_processor.process_file(
            content=content,
            filename=file.filename,
            file_id=file_id,
            file_ext=file_ext
        )
        
        if not chunks:
            raise HTTPException(
                status_code=400,
                detail="File is empty or could not be processed. Please ensure the file contains text content."
            )
        
        # Generate embeddings for chunks
        chunk_texts = [chunk["text"] for chunk in chunks]
        embeddings = None
        embedding_error = None
        
        try:
            embeddings = embedding_service.get_embeddings(chunk_texts)
        except Exception as e:
            embedding_error = str(e)
            print(f"Embedding generation error: {embedding_error}")
        
        if not embeddings:
            if not embedding_service.is_available():
                error_msg = (
                    "Embedding service not available. "
                    "Please check your embedding model configuration."
                )
            elif embedding_error:
                error_msg = (
                    f"Failed to generate embeddings: {embedding_error}. "
                    f"Please check your configuration and network connection."
                )
            else:
                error_msg = "Failed to generate embeddings. Please check your configuration."
            
            raise HTTPException(status_code=400, detail=error_msg)
        
        # Update file in vector store
        try:
            success = vector_store.update_file(file_id, chunks, embeddings=embeddings, filename=file.filename)
            if not success:
                raise HTTPException(status_code=404, detail=f"File with id {file_id} not found")
        except HTTPException:
            raise
        except Exception as e:
            raise HTTPException(
                status_code=500,
                detail=f"Failed to update document in vector database: {str(e)}"
            )
        
        return UploadResponse(file_id=file_id, chunks=len(chunks))
    
    except HTTPException:
        raise
    except Exception as e:
        import traceback
        error_trace = traceback.format_exc()
        print(f"Error updating file: {error_trace}")
        raise HTTPException(
            status_code=500,
            detail=f"Error updating file: {str(e)}"
        )

