from fastapi import APIRouter, Depends, Query, HTTPException
from typing import Optional, List
from datetime import datetime, timedelta

from app.core.deps import get_current_active_user
from app.models.models import User
from app.models.log_models import RequestLog, LoginHistory
from app.schemas.log import (
    RequestLogOut, RequestLogQuery, 
    LoginHistoryOut, LoginHistoryQuery
)
from app.schemas.base import ResponseBase, PageResponseBase, PageInfo

router = APIRouter()

@router.get("/request-logs", response_model=PageResponseBase[List[RequestLogOut]])
async def get_request_logs(
    query: RequestLogQuery = Depends(),
    current_user: User = Depends(get_current_active_user)
):
    """获取请求日志列表"""
    # 构建查询条件
    filters = {}
    
    if query.username:
        filters["username__icontains"] = query.username
    
    if query.ip_address:
        filters["ip_address__icontains"] = query.ip_address
    
    if query.method:
        filters["method"] = query.method
    
    if query.url:
        filters["path__icontains"] = query.url
    
    if query.status_code:
        filters["status_code"] = query.status_code
    
    if query.start_time and query.start_time.strip():
        try:
            # 尝试解析日期字符串
            start_time = datetime.fromisoformat(query.start_time.replace('Z', '+00:00'))
            filters["created_at__gte"] = start_time
        except (ValueError, TypeError):
            # 如果解析失败，忽略此参数
            pass
    
    if query.end_time and query.end_time.strip():
        try:
            # 尝试解析日期字符串
            end_time = datetime.fromisoformat(query.end_time.replace('Z', '+00:00'))
            filters["created_at__lte"] = end_time
        except (ValueError, TypeError):
            # 如果解析失败，忽略此参数
            pass
    
    # 计算总数
    total = await RequestLog.filter(**filters).count()
    
    # 分页查询
    logs = await RequestLog.filter(**filters).order_by("-created_at").offset(
        (query.page - 1) * query.page_size
    ).limit(query.page_size).all()
    
    # 将 Tortoise ORM 模型转换为 Pydantic 模型
    log_list = []
    for log in logs:
        log_dict = {
            "id": log.id,
            "user_id": log.user_id,
            "username": log.username,
            "ip_address": log.ip_address,
            "method": log.method,
            "path": log.path,
            "status_code": log.status_code,
            "response_time": log.response_time,
            "user_agent": log.user_agent,
            "created_at": log.created_at,
            "updated_at": log.updated_at
        }
        log_list.append(RequestLogOut(**log_dict))
    
    # 构建分页信息
    page_info = PageInfo(
        page=query.page,
        page_size=query.page_size,
        total=total
    )
    
    return PageResponseBase[List[RequestLogOut]](
        data=log_list,
        page_info=page_info
    )

@router.get("/request-logs/{log_id}", response_model=ResponseBase[RequestLogOut])
async def get_request_log_detail(
    log_id: int,
    current_user: User = Depends(get_current_active_user)
):
    """获取请求日志详情"""
    try:
        log = await RequestLog.get_or_none(id=log_id)
        if not log:
            raise HTTPException(status_code=404, detail="日志记录不存在")
        
        # 将 Tortoise ORM 模型转换为 Pydantic 模型
        log_dict = {
            "id": log.id,
            "user_id": log.user_id,
            "username": log.username,
            "ip_address": log.ip_address,
            "method": log.method,
            "path": log.path,
            "status_code": log.status_code,
            "response_time": log.response_time,
            "user_agent": log.user_agent,
            "request_body": log.request_body,
            "response_body": log.response_body,
            "created_at": log.created_at,
            "updated_at": log.updated_at
        }
        
        return ResponseBase[RequestLogOut](
            data=RequestLogOut(**log_dict)
        )
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取日志详情失败: {str(e)}")

@router.delete("/request-logs/batch", response_model=ResponseBase[dict])
async def delete_request_logs_batch(
    days: int = Query(..., description="保留天数"),
    current_user: User = Depends(get_current_active_user)
):
    """批量删除请求日志"""
    try:
        # 计算删除的截止时间
        cutoff_time = datetime.now() - timedelta(days=days)
        
        # 删除指定时间之前的日志
        deleted_count = await RequestLog.filter(created_at__lt=cutoff_time).delete()
        
        return ResponseBase[dict](
            data={"message": f"成功清理了 {deleted_count} 条日志记录"}
        )
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"清理日志失败: {str(e)}")

@router.get("/login-history", response_model=PageResponseBase[List[LoginHistoryOut]])
async def get_login_history(
    query: LoginHistoryQuery = Depends(),
    current_user: User = Depends(get_current_active_user)
):
    """获取登录历史列表"""
    # 构建查询条件
    filters = {}
    
    if query.username:
        filters["username__icontains"] = query.username
    
    if query.ip:
        filters["ip_address__icontains"] = query.ip
    
    if query.status is not None and query.status.strip():
        # 手动解析布尔值
        if query.status.lower() in ('true', '1', 'yes'):
            filters["login_status"] = True
        elif query.status.lower() in ('false', '0', 'no'):
            filters["login_status"] = False
    
    if query.start_time and query.start_time.strip():
        try:
            # 尝试解析日期字符串
            start_time = datetime.fromisoformat(query.start_time.replace('Z', '+00:00'))
            filters["created_at__gte"] = start_time
        except (ValueError, TypeError):
            # 如果解析失败，忽略此参数
            pass
    
    if query.end_time and query.end_time.strip():
        try:
            # 尝试解析日期字符串
            end_time = datetime.fromisoformat(query.end_time.replace('Z', '+00:00'))
            filters["created_at__lte"] = end_time
        except (ValueError, TypeError):
            # 如果解析失败，忽略此参数
            pass
    
    # 计算总数
    total = await LoginHistory.filter(**filters).count()
    
    # 分页查询
    logs = await LoginHistory.filter(**filters).order_by("-created_at").offset(
        (query.page - 1) * query.page_size
    ).limit(query.page_size).all()
    
    # 将 Tortoise ORM 模型转换为 Pydantic 模型
    log_list = []
    for log in logs:
        log_dict = {
            "id": log.id,
            "user_id": log.user_id,
            "username": log.username,
            "ip_address": log.ip_address,
            "user_agent": log.user_agent,
            "login_status": log.login_status,
            "login_message": log.login_message,
            "created_at": log.created_at,
            "updated_at": log.updated_at
        }
        log_list.append(LoginHistoryOut(**log_dict))
    
    # 构建分页信息
    page_info = PageInfo(
        page=query.page,
        page_size=query.page_size,
        total=total
    )
    
    return PageResponseBase[List[LoginHistoryOut]](
        data=log_list,
        page_info=page_info
    )

@router.get("/my-login-history", response_model=PageResponseBase[List[LoginHistoryOut]])
async def get_my_login_history(
    page: int = Query(1, description="页码"),
    page_size: int = Query(10, description="每页条数"),
    current_user: User = Depends(get_current_active_user)
):
    """获取当前用户的登录历史"""
    # 构建查询条件
    filters = {"user_id": current_user.id}
    
    # 计算总数
    total = await LoginHistory.filter(**filters).count()
    
    # 分页查询
    logs = await LoginHistory.filter(**filters).order_by("-created_at").offset(
        (page - 1) * page_size
    ).limit(page_size).all()
    
    # 将 Tortoise ORM 模型转换为 Pydantic 模型
    log_list = []
    for log in logs:
        log_dict = {
            "id": log.id,
            "user_id": log.user_id,
            "username": log.username,
            "ip_address": log.ip_address,
            "user_agent": log.user_agent,
            "login_status": log.login_status,
            "login_message": log.login_message,
            "created_at": log.created_at,
            "updated_at": log.updated_at
        }
        log_list.append(LoginHistoryOut(**log_dict))
    
    # 构建分页信息
    page_info = PageInfo(
        page=page,
        page_size=page_size,
        total=total
    )
    
    return PageResponseBase[List[LoginHistoryOut]](
        data=log_list,
        page_info=page_info
    )
