from flask import Flask, render_template, request, jsonify
from sparkai.llm.llm import ChatSparkLLM, ChunkPrintHandler
from sparkai.core.messages import ChatMessage

app = Flask(__name__)

# 讯飞星火认知大模型的配置信息（请确保从讯飞开放平台控制台获取正确的值）
SPARKAI_URL = 'wss://spark-api.xf-yun.com/v3.5/chat'
SPARKAI_APP_ID = "78e9814f"  # 请替换为您的实际APP ID
SPARKAI_API_SECRET = "MTExYjQzOWQ2Zjk1YjhhMmExNTcyMzgy"  # 请替换为您的实际API Secret
SPARKAI_API_KEY = "4f7fe46be3f20bca0e568bceeb3e7941"  # 请替换为您的实际API Key
SPARKAI_DOMAIN = 'generalv3.5'


# 前端页面路由
@app.route('/')
def index():
    return render_template('index.html')


# 初始化星火认知大模型
spark = ChatSparkLLM(
    spark_api_url=SPARKAI_URL,
    spark_app_id=SPARKAI_APP_ID,
    spark_api_key=SPARKAI_API_KEY,
    spark_api_secret=SPARKAI_API_SECRET,
    spark_llm_domain=SPARKAI_DOMAIN,
    streaming=False,
)

def answer():
    data = request.get_json()
    question = data['question'] + "，回答不要超过10字"
    print("我收到了你的问题：" + question
          )


# 后端处理提问请求路由
@app.route('/ask', methods=['POST'])
def ask_question():
    user_question = request.json.get('question', '')
    if not user_question:
        return jsonify({"answer": "请输入"}),
        # 构建对话消息
    messages = [ChatMessage(role="user", content=user_question)]
    handler = ChunkPrintHandler()


    response = spark.generate([messages], callbacks=[handler])

        # 检查并处理 response.generations 中的嵌套列表
    if response and isinstance(response.generations, list) and response.generations:
        generations = response.generations[0]
        if isinstance(generations, list) and generations:
            chat_generation = generations[0]
            answer = chat_generation.text
            return jsonify({"answer": answer})
        else:
            return jsonify({"answer": "回复超时"}),

if __name__ == "__main__":
    app.run(debug=True)