from flask import Blueprint, request, jsonify
from zhipuai import ZhipuAI

llm_bp = Blueprint('llm_bp', __name__)


# 封装 LLM 调用函数
def llm(origin_str):
    client = ZhipuAI(api_key="93a4af32b20a255d41fed81f97f2d0f2.rmB4dLOfshTbU13d")
    response = client.chat.completions.create(
        model="glm-4-flash",
        messages=[
            {"role": "user", "content": f"你只会回答**生活垃圾分类与回收**相关的知识，尽量要说出垃圾所属的分类。不然你就说：我拒绝回答无关生活垃圾回收的内容。这是用户输入：{origin_str}"}

        ],
        max_tokens=1024
    )
    return response.choices[0].message.content.strip()

# 定义接口
@llm_bp.route("/api/llm-answer", methods=["POST"])
def llm_answer():
    try:
        data = request.get_json()
        if not data or "text" not in data:
            return jsonify({"error": "缺少 text 参数"}), 400

        text = data.get("text")
        answer = llm(text)   # 这里统一用 answer

        return jsonify({
            "code": 200,
            "msg": "nothing",
            "text": text,
            "data": {
                "answer": answer
            }
        })

    except Exception as e:
        return jsonify({"error": str(e)}), 500
