from openai import OpenAI
from ..settings.settings import Settings
from fastapi import APIRouter, Query
from fastapi.responses import JSONResponse
from langchain_core.prompts import PromptTemplate

router = APIRouter()

# 读取配置
settings = Settings()
DeepSeekUrl = settings.DeepSeekUrl
DeepSeekKey = settings.DeepSeekKey

# 初始化客户端
client = OpenAI(api_key=DeepSeekKey, base_url=DeepSeekUrl)

# 定义 Prompt 模板
prompt = PromptTemplate.from_template(
    "你是一个专业的律师，请你回答我提出的法律问题，并给出法律条文依据，我的问题是：{question}"
)

@router.get("/d21prompttemplate")
async def d21prompttemplate(question: str = Query(..., description="请输入你的法律问题")):
    """接收用户输入的问题，返回大模型的回答"""
    # 生成 prompt
    prompt_value = prompt.invoke({"question": question})
    prompt_text = prompt_value.to_string()

    # 调用 DeepSeek ChatCompletion
    response = client.chat.completions.create(
        model="deepseek-chat",   # ⚠️ 模型名称按 DeepSeek API 文档来
        messages=[
            {"role": "system", "content": "你是一个专业的律师，回答需有法律条文依据"},
            {"role": "user", "content": prompt_text},
        ],
        temperature=0.7,
    )

    # 获取回答内容
    answer = response.choices[0].message.content

    return JSONResponse(content={
        "question": question,
        "prompt": prompt_text,
        "answer": answer
    })
