from fastapi import APIRouter, UploadFile, File, HTTPException, Depends
from typing import List, Optional
import uuid
import os
import aiofiles
from scorpio.services.ingestion import IngestionService, ChunkConfig
from scorpio.services.database import DocumentProcessor
from scorpio.shared import get_db_manager, get_embedding_config

from scorpio.core.database import VectorDatabaseManager

from scorpio.core.common import EmbeddingConfig
from scorpio.agenthub import IngestionAgent
from .utils import DocumentUploadResponse
from scorpio.core.common import get_logger

logger = get_logger(__name__)

router = APIRouter(prefix="/api/v1", tags=["RAG document API"])

@router.post("/documents/upload", response_model=DocumentUploadResponse)
async def upload_document(
    file: UploadFile = File(...),
    title: Optional[str] = None,
    chunk_size: int = 1000,
    chunk_overlap: int = 200,
    embedding_config=Depends(get_embedding_config),
    db_manager: VectorDatabaseManager = Depends(get_db_manager),
):
    """上传并处理文档"""
    logger.info(f"start to upload and ingest document: {file.filename}") 
    # 创建临时文件
    file_ext = os.path.splitext(file.filename)[1].lower()
    temp_filename = f"temp_{uuid.uuid4()}{file_ext}"
    
    ingestion_service = IngestionService(
       chunk_config=ChunkConfig(chunk_size=chunk_size, 
                        chunk_overlap=chunk_overlap))
    document_processor = DocumentProcessor(db_manager, embedding_config)
    

    try:
        # 保存上传的文件(持久化)
        async with aiofiles.open(temp_filename, 'wb') as temp_file:
            content = await file.read()
            file_size = len(content)
            await temp_file.write(content)
        shared ={
            "context": {
                "step": "document",
                "db_manager": db_manager,
                "ingestion": ingestion_service,
                "processor": document_processor,
                "embedding_config": embedding_config,
                "ingestion": ingestion_service
            },
            "document": {
                "file_path": temp_filename,
                "content_type": file_ext,
                "file_size": file_size,
                "title": title or file.filename,
            }
        }
        agent = IngestionAgent(shared)
        agent.create_flow()
        await agent.run()
        document = agent.shared["document"]
        # 处理文档
        logger.info(f"upload and ingest document success with document: {document['id']}")
        return {
            "document_external_id": document["external_id"],
            "document_title": document["title"],
            "total_chunks": document["total_chunks"],
            "file_size": file_size,
            "status": "success",
        }
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"文档处理失败: {str(e)}")
    
    finally:
        # 清理临时文件
        if os.path.exists(temp_filename):
            os.remove(temp_filename)
            logger.debug(f"remove temp file: {temp_filename}")

@router.get("/documents/{document_external_id}/stats")
async def get_document_stats(document_external_id: str, 
        embedding_config: EmbeddingConfig = Depends(get_embedding_config),
        db_manager: VectorDatabaseManager = Depends(get_db_manager)
    ):
    """获取文档统计信息"""
    logger.info(f"start to get document stats: {document_external_id}")
    processor = DocumentProcessor(db_manager, embedding_config.dimension)
    stats = await processor.get_stats(document_external_id)
    if not stats:
        raise HTTPException(status_code=404, detail="文档不存在")
    return stats

