"""
数据集管理路由
"""
from typing import Optional, List
from fastapi import APIRouter, Depends, HTTPException, Query, File, UploadFile
from fastapi.responses import JSONResponse, FileResponse
from sqlalchemy.orm import Session
from pathlib import Path
from database import get_db
from schemas import (
    DatasetCreate, DatasetUpdate, DatasetResponse,
    GenericResponse, ListResponse, DataResponse,
    DatasetPreviewResponse, FileUploadResponse, DatasetCreationRequest, 
    BaseDatasetListResponse, BaseDatasetInfo
)
from services import DatasetService
from exceptions import NotFoundException, ValidationException
from response_utils import success_response, list_response
from config import config
import uuid

router = APIRouter(prefix="/api/v1", tags=["数据集管理"])

# 数据集服务实例
dataset_service = DatasetService()

@router.get("/datasets", response_model=ListResponse[DatasetResponse])
async def list_datasets(
    page: int = 1,
    page_size: int = 20,
    dataset_type: Optional[str] = None,
    name: Optional[str] = None,
    uploader: Optional[str] = None,
    data_format: Optional[str] = None,
    scenario: Optional[str] = Query(None, description="应用场景过滤：设备运检I, 设备运检S, 环境巡视, 人员安全"),
    origin: Optional[str] = Query(None, description="数据来源过滤：cloud, edge"),
    is_base: Optional[bool] = Query(None, description="是否基础数据集过滤：true/false"),
    db: Session = Depends(get_db)
):
    """获取数据集列表"""
    datasets, total_count = dataset_service.list_datasets(
        db, page, page_size, name, uploader, data_format, dataset_type, scenario, origin, is_base
    )
    from response_utils import create_pagination_info
    pagination_info = create_pagination_info(page, page_size, total_count)
    
    # 调用服务层的统计方法
    summary = dataset_service.get_datasets_summary(db, dataset_type, name, uploader, data_format)
    
    return list_response(
        items=datasets,
        total_count=total_count,
        pagination=pagination_info,
        summary=summary,
        message="查询成功"
    )

@router.get("/datasets/{dataset_id}", response_model=DataResponse[DatasetResponse])
async def get_dataset(dataset_id: str, db: Session = Depends(get_db)):
    """获取数据集详情"""
    try:
        dataset = dataset_service.get_dataset_by_id(db, dataset_id)
        return success_response(data=dataset, message="查询成功")
    except NotFoundException as e:
        raise HTTPException(status_code=404, detail=str(e))

@router.post("/datasets", response_model=DataResponse[DatasetResponse], status_code=201)
async def create_dataset(
    dataset: DatasetCreate,
    db: Session = Depends(get_db)
):
    """创建数据集"""
    try:
        created_dataset = dataset_service.create_dataset(db, dataset)
        return success_response(data=created_dataset, message="数据集创建成功")
    except ValidationException as e:
        raise HTTPException(status_code=400, detail=str(e))

@router.put("/datasets/{dataset_id}", response_model=DataResponse[DatasetResponse])
async def update_dataset(
    dataset_id: str,
    dataset_update: DatasetUpdate,
    db: Session = Depends(get_db)
):
    """更新数据集"""
    try:
        updated_dataset = dataset_service.update_dataset(db, dataset_id, dataset_update)
        return success_response(data=updated_dataset, message="数据集更新成功")
    except NotFoundException as e:
        raise HTTPException(status_code=404, detail=str(e))

@router.delete("/datasets/{dataset_id}", response_model=GenericResponse)
async def delete_dataset(dataset_id: str, db: Session = Depends(get_db)):
    """删除数据集"""
    try:
        dataset_service.delete_dataset(db, dataset_id)
        return success_response(message="数据集删除成功")
    except NotFoundException as e:
        raise HTTPException(status_code=404, detail=str(e))

@router.post("/datasets/reload", response_model=DataResponse[dict])
async def reload_datasets(db: Session = Depends(get_db)):
    """重新加载数据集配置"""
    result = dataset_service.reload_datasets(db)
    return success_response(data=result, message="数据集配置重新加载成功")

@router.post("/datasets/refresh-sizes", response_model=DataResponse[dict])
async def refresh_all_dataset_sizes(db: Session = Depends(get_db)):
    """手动刷新所有数据集的大小信息"""
    result = dataset_service.refresh_all_dataset_sizes(db)
    return success_response(data=result, message="数据集大小信息刷新完成")

@router.post("/datasets/{dataset_id}/refresh-size", response_model=DataResponse[DatasetResponse])
async def refresh_dataset_size(dataset_id: str, db: Session = Depends(get_db)):
    """手动刷新指定数据集的大小信息"""
    try:
        dataset = dataset_service.refresh_dataset_size(db, dataset_id)
        return success_response(data=dataset, message="数据集大小信息刷新成功")
    except NotFoundException as e:
        raise HTTPException(status_code=404, detail=str(e))

@router.get("/datasets/{dataset_id}/preview", response_model=DataResponse[DatasetPreviewResponse])
async def get_dataset_preview(
    dataset_id: str, 
    limit: int = Query(default=10, ge=1, le=50, description="预览样本数量限制"),
    db: Session = Depends(get_db)
):
    """获取数据集预览样本"""
    try:
        preview_data = dataset_service.get_dataset_preview(db, dataset_id, limit)
        return success_response(data=preview_data, message="获取数据集预览成功")
    except NotFoundException as e:
        raise HTTPException(status_code=404, detail=str(e))
    except ValidationException as e:
        raise HTTPException(status_code=400, detail=str(e))

@router.get("/datasets/images/{dataset_name}/{image_name}")
async def get_dataset_image(dataset_name: str, image_name: str):
    """获取数据集图片文件"""
    try:
        # 构建图片文件路径
        image_path = config.paths.datasets_dir / dataset_name / "images" / image_name
        
        # 检查文件是否存在
        if not image_path.exists():
            raise HTTPException(status_code=404, detail="图片文件不存在")
        
        # 检查文件是否在允许的目录内（安全检查）
        if not str(image_path.resolve()).startswith(str(config.paths.datasets_dir.resolve())):
            raise HTTPException(status_code=403, detail="访问被拒绝")
        
        # 根据文件扩展名确定媒体类型
        file_ext = image_path.suffix.lower()
        media_type_map = {
            '.jpg': 'image/jpeg',
            '.jpeg': 'image/jpeg',
            '.png': 'image/png',
            '.bmp': 'image/bmp',
            '.gif': 'image/gif',
            '.tiff': 'image/tiff',
            '.tif': 'image/tiff'
        }
        media_type = media_type_map.get(file_ext, 'image/jpeg')
        
        # 返回图片文件
        return FileResponse(
            path=str(image_path),
            media_type=media_type,
            filename=image_name
        )
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"服务器内部错误: {str(e)}")

@router.get("/datasets/base", response_model=BaseDatasetListResponse)
async def get_base_datasets(db: Session = Depends(get_db)):
    """获取所有基础数据集列表"""
    try:
        base_datasets = dataset_service.get_base_datasets(db)
        
        # 转换为BaseDatasetInfo格式
        base_dataset_infos = []
        for dataset in base_datasets:
            base_dataset_infos.append(BaseDatasetInfo(
                dataset_id=dataset.dataset_id,
                name=dataset.name,
                description=dataset.description,
                sample_count=dataset.sample_count,
                data_size=dataset.data_size,
                created_at=dataset.created_at.isoformat() if dataset.created_at else None
            ))
        
        return BaseDatasetListResponse(
            data=base_dataset_infos,
            total_count=len(base_dataset_infos),
            message="查询成功"
        )
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取基础数据集失败: {str(e)}")

@router.post("/datasets/upload-samples", response_model=DataResponse[FileUploadResponse])
async def upload_samples_zip(
    file: UploadFile = File(..., description="样本压缩包")
):
    """上传样本压缩包并解压"""
    try:
        # 验证文件类型
        if not file.filename.lower().endswith(('.zip', '.rar')):
            raise HTTPException(status_code=400, detail="只支持ZIP或RAR格式的压缩文件")
        
        # 验证文件大小（限制为500MB）
        max_size = 500 * 1024 * 1024  # 500MB
        file_content = await file.read()
        if len(file_content) > max_size:
            raise HTTPException(status_code=400, detail="文件大小不能超过500MB")
        
        # 生成唯一的文件名
        temp_filename = f"samples_{uuid.uuid4().hex[:8]}.zip"
        temp_file_path = config.paths.temp_dir / temp_filename
        
        # 确保temp目录存在
        config.paths.temp_dir.mkdir(parents=True, exist_ok=True)
        
        # 保存文件到临时目录
        with open(temp_file_path, "wb") as buffer:
            buffer.write(file_content)
        
        # 解压文件
        extracted_path = dataset_service.extract_zip_file(temp_file_path)
        
        # 计算文件大小
        file_size = len(file_content)
        size_mb = file_size / (1024 * 1024)
        
        return success_response(
            data=FileUploadResponse(
                temp_path=str(temp_file_path),
                original_filename=file.filename,
                temp_filename=temp_filename,
                file_size=file_size,
                size_mb=round(size_mb, 2),
                extracted_path=str(extracted_path)
            ),
            message="样本文件上传并解压成功"
        )
        
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"文件上传失败: {str(e)}")

@router.post("/datasets/upload-labels", response_model=DataResponse[FileUploadResponse])
async def upload_labels_zip(
    file: UploadFile = File(..., description="标签压缩包")
):
    """上传标签压缩包并解压"""
    try:
        # 验证文件类型
        if not file.filename.lower().endswith(('.zip', '.rar')):
            raise HTTPException(status_code=400, detail="只支持ZIP或RAR格式的压缩文件")
        
        # 验证文件大小（限制为100MB）
        max_size = 100 * 1024 * 1024  # 100MB
        file_content = await file.read()
        if len(file_content) > max_size:
            raise HTTPException(status_code=400, detail="文件大小不能超过100MB")
        
        # 生成唯一的文件名
        temp_filename = f"labels_{uuid.uuid4().hex[:8]}.zip"
        temp_file_path = config.paths.temp_dir / temp_filename
        
        # 确保temp目录存在
        config.paths.temp_dir.mkdir(parents=True, exist_ok=True)
        
        # 保存文件到临时目录
        with open(temp_file_path, "wb") as buffer:
            buffer.write(file_content)
        
        # 解压文件
        extracted_path = dataset_service.extract_zip_file(temp_file_path)
        
        # 计算文件大小
        file_size = len(file_content)
        size_mb = file_size / (1024 * 1024)
        
        return success_response(
            data=FileUploadResponse(
                temp_path=str(temp_file_path),
                original_filename=file.filename,
                temp_filename=temp_filename,
                file_size=file_size,
                size_mb=round(size_mb, 2),
                extracted_path=str(extracted_path)
            ),
            message="标签文件上传并解压成功"
        )
        
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"文件上传失败: {str(e)}")

@router.post("/datasets/create-from-upload", response_model=DataResponse[DatasetResponse])
async def create_dataset_from_upload(
    request: DatasetCreationRequest,
    db: Session = Depends(get_db)
):
    """从上传的文件创建数据集"""
    try:
        created_dataset = dataset_service.create_dataset_from_uploaded_files(db, request)
        return success_response(data=created_dataset, message="数据集创建成功")
    except ValidationException as e:
        raise HTTPException(status_code=400, detail=str(e))
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"数据集创建失败: {str(e)}") 