from flask import Flask, request, jsonify
from sparkai.llm.llm import ChatSparkLLM, ChunkPrintHandler
from sparkai.core.messages import ChatMessage
from flask_cors import CORS, cross_origin

# Flask应用初始化
app = Flask(__name__)

#星火认知大模型的相关配置信息
SPARKAI_URL = 'wss://spark-api.xf-yun.com/v3.5/chat'
SPARKAI_APP_ID = '982c0054'
SPARKAI_API_SECRET = 'NzRjMTdhNWUyYmU4MTNiNGQxZDgzMWQ0'
SPARKAI_API_KEY = 'f6a31ec70344fd3350694509a8fec7e3'
SPARKAI_DOMAIN = 'generalv3.5'

# 初始化大模型实例
spark = ChatSparkLLM(
    spark_api_url=SPARKAI_URL,
    spark_app_id=SPARKAI_APP_ID,
    spark_api_key=SPARKAI_API_KEY,
    spark_api_secret=SPARKAI_API_SECRET,
    spark_llm_domain=SPARKAI_DOMAIN,
    streaming=False,
)

# 定义POST接口
@app.route('/chat', methods=['POST'])
@cross_origin()
def chat():
    # 从POST请求中获取用户输入
    user_input = request.json.get('user_input', '')

    # 构造ChatMessage对象
    messages = [ChatMessage(role="user", content=user_input)]

    # 使用大模型生成响应
    handler = ChunkPrintHandler()
    response = spark.generate([messages], callbacks=[handler])

    # 将响应格式化为JSON并返回
    return jsonify({'response': response.generations[0][0].text}), 200

# 运行Flask  应用
if __name__ == '__main__':
    app.run(debug=True)