from fastapi import APIRouter, Query, UploadFile, File, BackgroundTasks,Depends
from starlette.responses import StreamingResponse

from core.schemas.pagination import PaginatedResponse
from src.modules.knowbase.services.Impl.file_service import FileService
from sqlalchemy.ext.asyncio import AsyncSession
from core.schemas.response import success, fail
import os
import shutil
from core.config import settings
from core.dependencies import get_db
from ..services.containers import Container
from src.modules.knowbase.services.containers import Container
from src.modules.knowbase.services.file_process import process_pdf_file
import uuid
import json
from  redis.asyncio import Redis
from core.dependencies import get_redis
import asyncio
# 创建容器实例

# 存储活动解析任务
active_tasks = {}
router = APIRouter(prefix="/file", tags=["file"])
container  =Container()
file_service = container.file_service()

@router.get("/list", response_model=PaginatedResponse)
async def get_file_list(
        user_id: str = Query(None, description="用户id"),
        kb_id: str = Query(None, description="知识库id"),
        keyword: str = Query(None, description="关键词"),
        page: int = Query(1, description="页码", ge=1),
        page_size: int = Query(10, description="每页条数", le=100),
):
    result = await file_service.get_file_list(user_id=user_id, kb_id=kb_id, keyword=keyword, page=page,
                                              page_size=page_size)
    return success(data=result)


@router.delete("/delete/{file_id}")
async def delete_by_file_id(file_id: str):
    result = await file_service.delete_by_file_id(file_id)
    if result:
        return success(data="删除成功！")
    else:
        return fail(data="删除失败")


@router.post("/upload")
async def upload_file(knowbase_name: str, kb_id: str, file: UploadFile = File(...)):
    # file_name_without_ext = os.path.splitext(file.filename)[0]
    # file_dir = os.path.join(settings.MEDIA_ROOT, knowbase_name, file_name_without_ext)
    # full_path = os.path.join(settings.MEDIA_ROOT, knowbase_name, file_name_without_ext, file.filename)
    # os.makedirs(file_dir, exist_ok=True)
    # # with open(full_path, "wb") as f:
    # #     shutil.copyfileobj(file.file, f)
    # print(file_service)
    result = await file_service.upload_file(knowbase_name, kb_id, file)
    return success(message="文件上传成功", data=result)

from pydantic import BaseModel
class FileParseRequest(BaseModel):
    file_path:str
    file_id:str
    file_name:str
    knowbase_id:str

@router.post("/parse/start")
async def process_file(parse_request:FileParseRequest, background_tasks:BackgroundTasks,session: AsyncSession = Depends(get_db)):
    file_path = parse_request.file_path
    file_id = parse_request.file_id
    file_name = parse_request.file_name
    knowbase_id = parse_request.knowbase_id
    # 生成任务ID
    task_id = str(uuid.uuid4())
    file_dir = os.path.dirname(file_path)
    background_tasks.add_task(process_pdf_file, task_id, file_id, file_path, file_dir, file_name, knowbase_id, session)
    active_tasks[task_id] = {
        "file_id": file_id,
        "status": "processing"
    }
    return {
        "statusCode": 200,
        "message": "解析任务已启动",
        "data": {
            "task_id": task_id,
            "file_id": file_id
        }
    }

@router.get("/parse/process/{task_id}")
async def get_parse_progress(task_id:str, redis:Redis = Depends(get_redis)):
    """获取文件解析进度（SSE)"""
    async def event_generator():
        try:
            while True:
                task_exists = await redis.hexists(f"task:{task_id}", "type")
                if not task_exists:
                    yield f"data: {json.dumps({'error': 'Task not found'})}\n\n"
                    break

                # 从Redis获取最新进度
                current_progress = await redis.hgetall(f"task:{task_id}")
                type = current_progress.get("type")

                # 构建SSE格式数据
                sse_data = {
                    "type": current_progress.get("type", ''),
                    "percentage": current_progress.get("percentage", 0),
                    "count": current_progress.get("count", 0),
                    "message": current_progress.get("message", '')
                }
                yield f"data: {json.dumps(sse_data)}\n\n"

                if type == "completed":
                    break
                # 等待一段时间再检查更新，避免过于频繁的请求
                # await asyncio.sleep(1)
        except asyncio.CancelledError:
            # 处理客户端断开连接
            print(f"Client disconnected from task {task_id}")
        finally:
            print(f"SSE stream for task {task_id} closed.")

    return StreamingResponse(
        event_generator(),
        media_type="text/event-stream",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive"
        }
    )
