from flask import Flask, render_template, request, jsonify
from sparkai.llm.llm import ChatSparkLLM, ChunkPrintHandler
from sparkai.core.messages import ChatMessage
import json
from flask import Flask
from flask_cors import CORS

app = Flask(__name__)
CORS(app)

#星火认知大模型配置
SPARKAI_URL = 'wss://spark-api.xf-yun.com/v3.5/chat'
SPARKAI_APP_ID = '982c0054'
SPARKAI_API_SECRET = 'NzRjMTdhNWUyYmU4MTNiNGQxZDgzMWQ0'
SPARKAI_API_KEY = 'f6a31ec70344fd3350694509a8fec7e3'
SPARKAI_DOMAIN = 'generalv3.5'

# 初始化星火认知大模型
spark = ChatSparkLLM(
    spark_api_url=SPARKAI_URL,
    spark_app_id=SPARKAI_APP_ID,
    spark_api_key=SPARKAI_API_KEY,
    spark_api_secret=SPARKAI_API_SECRET,
    spark_llm_domain=SPARKAI_DOMAIN,
    streaming=False,
)

@app.route('/')
def index():
    return render_template('index.html')

@app.route('/ask', methods=['POST'])
def ask():
    question = request.json.get('question', '')
    
    # 使用星火认知大模型生成回答
    messages = [ChatMessage(role="user", content=question)]
    handler = ChunkPrintHandler()
    response = spark.generate([messages], callbacks=[handler])
    
    # 提取大模型的回答
    answer = response.generations[0][0].text
    
    # 如果大模型没有生成回答，则使用默认回复
    if not answer:
        answer = "对不起，我不确定如何回答您的问题。"
    
    return jsonify({'answer': answer})

if __name__ == '__main__':
    app.run(debug=True)