from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from datetime import datetime, timedelta, date
import redis
import time as atime

from ..database import get_db 
from ..crud import statistics as crud_stats
from ..schemas import statistics as schemas_stats

# 创建一个 API 路由器
router = APIRouter(
    prefix="/api/v1/statistics",  # 所有这个文件里的路由都会以 /api/v1/statistics 开头
    tags=["Statistics"],         # 在 API 文档中为这组接口分类
)

def calculate_previous_time_slot(request_time: datetime) -> tuple[datetime, datetime]:
    """
    根据请求时间，计算其上一个已结束的 30 分钟时间槽。
    例如: 
    - 请求 13:10 -> 返回 [12:30, 13:00)
    - 请求 13:45 -> 返回 [13:00, 13:30)
    - 请求 13:30 -> 返回 [13:00, 13:30)
    """
    # 1. 先计算出当前时间所在的槽的起始点
    current_minute = request_time.minute
    current_slot_start_minute = 30 if current_minute >= 30 else 0
    current_slot_start_time = request_time.replace(minute=current_slot_start_minute, second=0, microsecond=0)
    
    # 2. 上一个槽的结束时间就是当前槽的开始时间
    previous_slot_end_time = current_slot_start_time
    
    # 3. 上一个槽的开始时间，再往前推30分钟
    previous_slot_start_time = previous_slot_end_time - timedelta(minutes=30)
    
    return previous_slot_start_time, previous_slot_end_time

@router.get("/flow", response_model=schemas_stats.FlowStatsResponse)
def get_bike_flow(time: datetime, db: Session = Depends(get_db)):
    """
    获取指定时间点 **之前** 的、最近一个完整时间槽的单车流动数据。
    - **time**: 请求的时间点，例如 `2021-05-01T13:10:00`
    - 接口会自动计算出上一个完整的时间槽 (`12:30:00` - `13:00:00`) 并返回该槽的数据。
    """
    print(f"Received request for bike flow stats at time: {time.isoformat()}")
    start_time = atime.time()
    start_slot, end_slot = calculate_previous_time_slot(time)
    
    # 调用 crud 函数获取数据
    stats_data = crud_stats.get_flow_stats_by_timeslot(db, start_slot.isoformat(), end_slot.isoformat())
    
    # 将数据库返回的数据格式化为我们定义的 Pydantic 模型
    community_stats = [schemas_stats.CommunityFlowStats(**dict(row._mapping)) for row in stats_data]
    
    response_data = {
        "start_slot": start_slot.isoformat(),
        "end_slot": end_slot.isoformat(),
        "stats": community_stats
    }
    
    end_time = atime.time()
    print(f"Processed bike flow stats request in {end_time - start_time:.3f} seconds")
    
    return {
        "code": 0,
        "message": "success",
        "data": response_data
    }


@router.get("/od", response_model=schemas_stats.ODResponse)
def get_od_pairs(time: datetime, min_flow: int = 1, db: Session = Depends(get_db)):
    """
    获取指定时间点 **之前** 的、最近一个完整时间槽的 OD (起点-终点) 数据。
    - **time**: 请求的时间点，例如 `2021-05-01T13:10:00`。
    - **min_flow**: 最小流量阈值，只返回流量大于或等于此值的路径。默认为 1。
    """
    print(f"Received request for OD pairs at time: {time.isoformat()}, min_flow: {min_flow}")
    start_time = atime.time()
    start_slot, end_slot = calculate_previous_time_slot(time)
    
    # 调用 crud 函数获取数据
    od_data = crud_stats.get_od_pairs_by_timeslot(
        db, 
        start_slot.isoformat(), 
        end_slot.isoformat(),
        min_flow
    )
    
    # 格式化数据
    od_pairs = [schemas_stats.ODPair(**dict(row._mapping)) for row in od_data]
    
    response_data = {
        "start_slot": start_slot.isoformat(),
        "end_slot": end_slot.isoformat(),
        "od_pairs": od_pairs
    }
    
    end_time = atime.time()
    print(f"Processed OD pairs request in {end_time - start_time:.3f} seconds")
    
    return {
        "code": 0,
        "message": "success",
        "data": response_data
    }


@router.get("/timeseries/{community_id}", response_model=schemas_stats.TimeSeriesResponse)
def get_community_timeseries(community_id: int, time: datetime, db: Session = Depends(get_db)):
    """
    获取单个社区从指定时间点开始，过去24小时（48个时间槽）的时序数据。
    - **community_id**: 社区的 ID。
    - **time**: 查询的结束时间点，例如 `2021-05-02T13:10:00`。
    - 接口会返回从 `2021-05-01T13:00:00` 到 `2021-05-02T13:00:00` 的48个槽的数据。
    """
    print(f"Received request for community timeseries at community_id: {community_id}, time: {time.isoformat()}")
    start_atime = atime.time()
    # 1. 计算24小时窗口的结束时间。
    #    这个时间是当前时间所在槽的起始点。
    #    例如，请求 13:10，则窗口结束于 13:00。
    current_minute = time.minute
    current_slot_start_minute = 30 if current_minute >= 30 else 0
    end_time = time.replace(minute=current_slot_start_minute, second=0, microsecond=0)
    
    # 2. 计算24小时窗口的开始时间。
    start_time = end_time - timedelta(days=1)
    
    # 3. 调用 crud 函数获取数据 (crud 函数无需修改)
    timeseries_data = crud_stats.get_timeseries_by_community(
        db,
        community_id,
        start_time.isoformat(),
        end_time.isoformat()
    )
    
    # 4. 格式化数据
    series_points = [schemas_stats.TimeSeriesPoint(**dict(row._mapping)) for row in timeseries_data]
    
    response_data = {
        "community_id": community_id,
        "start_time": start_time,
        "end_time": end_time,
        "series": series_points
    }
    
    end_atime = atime.time()
    print(f"Processed community timeseries request in {end_atime - start_atime:.3f} seconds")
    
    return {
        "code": 0,
        "message": "success",
        "data": response_data
    }


@router.get("/historical-flows", response_model=schemas_stats.AllHistoricalFlowsResponse)
def read_all_historical_flows():
    """
    获取所有社区在所有时间槽的历史净流量数据。
    此接口用于前端一次性加载基础数据。
    """
    print("Received request for all historical flows")
    start_time = atime.time()

    try:
        # 注意：这里我们直接创建了一个 Redis 连接。
        # 在大型项目中，可能会使用依赖注入来管理连接。
        redis_client = redis.Redis(host='localhost', port=6379, db=0, decode_responses=True)
        redis_client.ping()
        
        flows = crud_stats.get_all_historical_flows_from_redis(redis_client)
        end_time = atime.time()
        print(f"Processed all historical flows request in {end_time - start_time:.3f} seconds")
        
        return {"code": 0, "message": "success", "data": flows}
    except redis.ConnectionError as e:
        raise HTTPException(status_code=503, detail=f"无法连接到 Redis: {e}")
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"获取历史数据时发生错误: {e}")