import asyncio
import os
import time
import logging
from fastapi import FastAPI, HTTPException, Request
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
from starlette.staticfiles import StaticFiles
from dj_model import DjModel
import aiosqlite
import arrow

# 设置日志
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")

db_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "project/rebot/src/caches/database.db")
logging.debug(f"数据库路径：{db_path}")

# 设置 FastAPI 应用
app = FastAPI()
# app.mount("/static", StaticFiles(directory="static"), name="static")

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # 允许所有来源访问
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


@app.middleware("jsonResponse")
async def add_json_response_middleware(request: Request, call_next):
    response = await call_next(request)
    response.headers["Content-Type"] = "application/json; charset=utf-8"
    return response


class SearchRequest(BaseModel):
    keyword: str


class FileResponse(BaseModel):
    name: str
    quarkViewLink: str
    quarkAddTime: str
    baiduViewLink: str
    baiduAddTime: str


class FileDataResponse(BaseModel):
    name: str
    viewlink: str
    addtime: str


@app.post("/search_files", response_model=dict)
async def search_files(request: SearchRequest):
    try:
        response_data = []
        keyword = request.keyword
        if not keyword:
            raise HTTPException(status_code=400, detail="参数错误")

        logging.info(f"[API]搜索关键词：{keyword}")
        results = await search_in_db(keyword, 5)

        for model in results:
            name = model.name
            quark_view_link = model.quark_url
            quark_add_time = model.quark_create_at
            if quark_add_time == 0:
                quark_add_time = int(time.time() * 1000)
            quark_add_time = time.strftime("%Y-%m-%d", time.localtime(quark_add_time / 1000))

            baidu_view_link = model.baidu_url
            baidu_add_time = model.baidu_create_at
            if baidu_add_time == 0:
                baidu_add_time = int(time.time() * 1000)
            baidu_add_time = time.strftime("%Y-%m-%d", time.localtime(baidu_add_time / 1000))

            response_data.append({
                "name": name,
                "quarkViewLink": quark_view_link,
                "quarkAddTime": quark_add_time,
                "baiduViewLink": baidu_view_link,
                "baiduAddTime": baidu_add_time
            })
        return {"code": 0, "data": response_data}
    except Exception as e:
        logging.error(f"[API]搜索文件失败！原因：{e}")
        raise HTTPException(status_code=500, detail=str(e))


@app.get("/", response_model=dict)
async def home_request(request: Request):
    try:
        # 随机延迟几秒
        logging.info(f"[API]---")

        query_params = dict(request.query_params)
        time_val = query_params.get("time")
        origin = query_params.get("origin")

        if origin not in ["quark", "baidu"]:
            raise HTTPException(status_code=400, detail="参数错误")

        data = []
        code = 1
        msg = "今日更新"

        logging.info(f"[API]请求参数：time={time_val}, origin={origin}")

        if time_val == "today":
            fetch_result = await get_today_files(origin)
        elif time_val == "inTwoDays":
            fetch_result = await get_some_day_files(origin=origin, day=2)
        elif time_val == "inThreeDays":
            fetch_result = await get_some_day_files(origin=origin, day=3)
        elif time_val == "inFourDays":
            fetch_result = await get_some_day_files(origin=origin, day=4)
        elif time_val == "inFiveDays":
            fetch_result = await get_some_day_files(origin=origin, day=5)
        elif time_val == "inSixDays":
            fetch_result = await get_some_day_files(origin=origin, day=6)
        elif time_val == "inSevenDays":
            fetch_result = await get_some_day_files(origin=origin, day=7)
        elif time_val == "list":
            code = 200
            msg = "列表获取成功"
            fetch_result = await get_all()
        else:
            logging.error(f"[API]时间参数错误：{time_val}")
            raise HTTPException(status_code=400, detail="时间参数错误")

        logging.info(f"[API]获取到的文件数量：{len(fetch_result)}")

        for model in fetch_result:
            name = model.name
            view_link = model.quark_url
            add_time = model.quark_create_at
            if origin == "baidu":
                view_link = model.baidu_url
                add_time = model.baidu_create_at

            if add_time == 0:
                add_time = int(time.time() * 1000)
            if len(view_link) == 0:
                continue
            add_time = time.strftime("%Y-%m-%d", time.localtime(add_time / 1000))
            data.append({"name": name, "viewlink": view_link, "addtime": add_time})
        return {"code": code, "msg": msg, "data": data}
    except Exception as e:
        logging.error(f"[API]获取文件失败！原因：{e}")
        raise HTTPException(status_code=500, detail=str(e))


async def get_today_files(origin: str) -> list[DjModel]:
    """
    获取今日更新的文件
    """
    async with aiosqlite.connect(db_path) as db:
        min_time = arrow.now().floor('day').timestamp() * 1000
        max_time = arrow.now().ceil('day').timestamp() * 1000
        if origin == "quark":
            cursor = await db.execute("SELECT * FROM dj WHERE QUARK_CREATE_AT >= ? AND QUARK_CREATE_AT <= ?",
                                      (min_time, max_time))
        else:
            cursor = await db.execute("SELECT * FROM dj WHERE BAIDU_CREATE_AT >= ? AND BAIDU_CREATE_AT <= ?",
                                      (min_time, max_time))
        rows = await cursor.fetchall()
        models = [_convert_to_model(row) for row in rows]
        return [model for model in models if model]


async def get_all() -> list[DjModel]:
    """
    获取所有文件
    """
    async with aiosqlite.connect(db_path) as db:
        cursor = await db.execute("SELECT * FROM dj")
        rows = await cursor.fetchall()
        models = [_convert_to_model(row) for row in rows]
        return [model for model in models if model]


async def get_some_day_files(origin: str, day: int) -> list[DjModel]:
    """
    获取指定日期更新的文件
    day: 几天内
    """
    today = arrow.now().floor('day')  # 今天的 00:00:00
    start_of_day = today.shift(days=-day).timestamp() * 1000
    end_of_day = today.timestamp() * 1000
    async with aiosqlite.connect(db_path) as db:
        if origin == "quark":
            cursor = await db.execute("SELECT * FROM dj WHERE QUARK_CREATE_AT >= ? AND QUARK_CREATE_AT <= ?",
                                      (start_of_day, end_of_day))
        else:
            cursor = await db.execute("SELECT * FROM dj WHERE BAIDU_CREATE_AT >= ? AND BAIDU_CREATE_AT <= ?",
                                      (start_of_day, end_of_day))
        rows = await cursor.fetchall()
        models = [_convert_to_model(row) for row in rows]
        return [model for model in models if model]


async def search_in_db(keyword: str, limit: int) -> list[DjModel]:
    """
    在数据库中搜索文件
    :param keyword: 关键词
    :param limit: 限制数量
    """
    async with aiosqlite.connect(db_path) as db:
        cursor = await db.execute("SELECT * FROM dj WHERE NAME LIKE ? LIMIT ?", (f"%{keyword}%", limit))
        rows = await cursor.fetchall()
        models = [_convert_to_model(row) for row in rows]
        return [model for model in models if model]


def _convert_to_model(values) -> DjModel:
    """
    将数据库查询结果转换为字典
    """
    # NAME: 短剧名称 (主键)
    # quark_url: 夸克网盘链接
    # quark_create_at: 夸克网盘创建时间
    # quark_file_id: 夸克网盘文件ID
    # quark_share_id: 夸克网盘分享ID
    # baidu_url: 百度网盘链接
    # baidu_create_at: 百度网盘创建时间
    # baidu_file_id: 百度网盘文件ID
    # baidu_share_id: 百度网盘分享ID
    if not values:
        return None
    return DjModel(
        name=values[0],
        quark_url=values[1],
        quark_create_at=values[2],
        quark_file_id=values[3],
        quark_share_id=values[4],
        baidu_url=values[5],
        baidu_create_at=values[6],
        baidu_file_id=values[7],
        baidu_share_id=values[8]
    )
