from flask import Flask, request, jsonify
from sparkai.llm.llm import ChatSparkLLM
from sparkai.core.messages import ChatMessage

# Flask应用初始化
app = Flask(__name__)

#星火认知大模型相关配置
SPARKAI_URL = 'wss://spark-api.xf-yun.com/v3.5/chat'
SPARKAI_APP_ID = '982c0054'
SPARKAI_API_SECRET = 'NzRjMTdhNWUyYmU4MTNiNGQxZDgzMWQ0'
SPARKAI_API_KEY = 'f6a31ec70344fd3350694509a8fec7e3'
SPARKAI_DOMAIN = 'generalv3.5'

# 初始化大模型实例
spark = ChatSparkLLM(
    spark_api_url=SPARKAI_URL,
    spark_app_id=SPARKAI_APP_ID,
    spark_api_key=SPARKAI_API_KEY,
    spark_api_secret=SPARKAI_API_SECRET,
    spark_llm_domain=SPARKAI_DOMAIN,
    streaming=False,
)

@app.route('/chat', methods=['POST'])
def chat():
    # 从POST请求中获取用户输入
    data = request.json
    user_input = data.get('input', '')
    
    # 构造ChatMessage对象
    messages = [ChatMessage(role="user", content=user_input)]
    
    # 调用大模型生成响应
    response = spark.generate(messages)
    
    # 提取并返回响应内容
    if response and response[0].content:
        return jsonify({'response': response[0].content}), 200
    else:
        return jsonify({'error': 'No response from the model.'}), 500

if __name__ == '__main__':
    # 运行Flask应用
    app.run(debug=True)