from fastapi import APIRouter, Depends, Form, Request
from pydantic_validation_decorator import ValidateFields
from sqlalchemy.ext.asyncio import AsyncSession
from config.enums import BusinessType
from config.get_db import get_db
from module_admin.annotation.log_annotation import Log
from module_admin.aspect.interface_auth import CheckUserInterfaceAuth
from module_admin.entity.vo.user_vo import CurrentUserModel
from module_admin.service.login_service import LoginService
from module_admin.service.llm_service import LlmService
from module_admin.entity.vo.llm_vo import DeleteLlmModel, LlmModel, LlmPageQueryModel
from utils.common_util import bytes2file_response
from utils.log_util import logger
from utils.page_util import PageResponseModel
from utils.response_util import ResponseUtil
from utils.ocr_llm_util import OcrLlmConfig


llmController = APIRouter(
    prefix="/tool/llm", dependencies=[Depends(LoginService.get_current_user)]
)


@llmController.get(
    "/list",
    response_model=PageResponseModel,
    dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:list"))],
)
async def get_tool_llm_list(
    request: Request,
    llm_page_query: LlmPageQueryModel = Depends(LlmPageQueryModel.as_query),
    query_db: AsyncSession = Depends(get_db),
):
    # 获取分页数据
    llm_page_query_result = await LlmService.get_llm_list_services(
        query_db, llm_page_query, is_page=True
    )
    logger.info("获取成功")

    return ResponseUtil.success(model_content=llm_page_query_result)


@llmController.post("", dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:add"))])
@ValidateFields(validate_model="add_llm")
@Log(title="大模型配置", business_type=BusinessType.INSERT)
async def add_tool_llm(
    request: Request,
    add_llm: LlmModel,
    query_db: AsyncSession = Depends(get_db),
    current_user: CurrentUserModel = Depends(LoginService.get_current_user),
):
    add_llm_result = await LlmService.add_llm_services(query_db, add_llm)
    logger.info(add_llm_result.message)

    return ResponseUtil.success(msg=add_llm_result.message)


@llmController.put("", dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:edit"))])
@ValidateFields(validate_model="edit_llm")
@Log(title="大模型配置", business_type=BusinessType.UPDATE)
async def edit_tool_llm(
    request: Request,
    edit_llm: LlmModel,
    query_db: AsyncSession = Depends(get_db),
    current_user: CurrentUserModel = Depends(LoginService.get_current_user),
):
    edit_llm_result = await LlmService.edit_llm_services(query_db, edit_llm)
    logger.info(edit_llm_result.message)

    return ResponseUtil.success(msg=edit_llm_result.message)


@llmController.delete(
    "/{ids}", dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:remove"))]
)
@Log(title="大模型配置", business_type=BusinessType.DELETE)
async def delete_tool_llm(
    request: Request, ids: str, query_db: AsyncSession = Depends(get_db)
):
    delete_llm = DeleteLlmModel(ids=ids)
    delete_llm_result = await LlmService.delete_llm_services(query_db, delete_llm)
    logger.info(delete_llm_result.message)

    return ResponseUtil.success(msg=delete_llm_result.message)


@llmController.get(
    "/{id}",
    response_model=LlmModel,
    dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:query"))],
)
async def query_detail_tool_llm(
    request: Request, id: int, query_db: AsyncSession = Depends(get_db)
):
    llm_detail_result = await LlmService.llm_detail_services(query_db, id)
    logger.info(f"获取id为{id}的信息成功")
    return ResponseUtil.success(data=llm_detail_result)


@llmController.post(
    "/export", dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:export"))]
)
@Log(title="大模型配置", business_type=BusinessType.EXPORT)
async def export_tool_llm_list(
    request: Request,
    llm_page_query: LlmPageQueryModel = Form(),
    query_db: AsyncSession = Depends(get_db),
):
    # 获取全量数据
    llm_query_result = await LlmService.get_llm_list_services(
        query_db, llm_page_query, is_page=False
    )
    llm_export_result = await LlmService.export_llm_list_services(
        request, llm_query_result
    )
    logger.info("导出成功")

    return ResponseUtil.streaming(data=bytes2file_response(llm_export_result))


@llmController.put(
    "/cache", dependencies=[Depends(CheckUserInterfaceAuth("tool:llm:export"))]
)
async def cache_tool_llm_list(request: Request):
    # 获取全量数据
    await OcrLlmConfig.init_llm_config(request.app.state.redis)
    logger.info("缓存 LLM 配置成功")
    return ResponseUtil.streaming(data="")
