"""
大模型问答的api服务接口
"""
import json
from fastapi import Body
from server.api_wrap import async_api_wrapper, BaseResponse
from core.llm_answer import answer_with_llm
from loguru import logger


@async_api_wrapper
async def llm_answer_api(
        prompt: str = Body(description='大模型对话输入', examples=None),
        parse_type = Body(default='dict', description='结果解析的返回类型', examples=None)
):
    logger.info(f'LLM问答输入: {json.dumps(prompt, ensure_ascii=False)}')

    result = await answer_with_llm(prompt, parse_type)

    logger.info(f'LLM问答输出: {json.dumps(result, ensure_ascii=False)}')
    return BaseResponse(data=result)