from flask import Flask, render_template, request, Response, stream_with_context
from openai import OpenAI
import json

app = Flask(__name__)

# OpenAI 客户端配置
client = OpenAI(
    # 星火讯飞提供:
    # api_key="sk-AkWQPkEaEjKg4hvLnD2ueDL6Lzf3zQfr92L2wrgInyuwYmCE",
    # base_url="https://api.lkeap.cloud.tencent.com/v1",
    # 腾讯云提供:
    api_key="sk-trK2UHXj8g5PGpeK6555Fc95795d4728A22e899878D34dEc",
    base_url="https://maas-api.cn-huabei-1.xf-yun.com/v1",
)


@app.route('/')
def index():

    return render_template('index.html')

@app.route('/chat', methods=['POST'])
def chat():
    message = request.json.get('message')

    def generate():
        chat_completion = client.chat.completions.create(
            # model="deepseek-r1",
            model="xdeepseekr1",
            messages=[
                {
                    "role": "user",
                    "content": message,
                }
            ],
            stream=True,
        )

        for chunk in chat_completion:
            if hasattr(chunk.choices[0].delta, 'content'):
                if chunk.choices[0].delta.content:
                    yield f"data: {json.dumps({'content': chunk.choices[0].delta.content})}\n\n"

    return Response(stream_with_context(generate()), mimetype='text/event-stream')

if __name__ == '__main__':
    app.run(debug=True)