from flask import Flask, request, Response, jsonify
from flask_cors import CORS
from openai import OpenAI
import os

app = Flask(__name__)
CORS(app)  # 解决跨域问题

# 初始化OpenAI客户端
client = OpenAI(
    api_key="sk-wtXHi86oVQ58p6yCNFy0QC9xIzVz6RND0hepONfheYK3Mh3W",
    base_url="https://api.lkeap.cloud.tencent.com/v1",
)

def generate_stream(prompt):
    reasoning_content = ""
    answer_content = ""
    is_answering = False
    # 创建流式请求
    stream = client.chat.completions.create(
        model="deepseek-r1",
        messages=[{"role": "user", "content": prompt}],
        stream=True
    )

    # 生成流式响应
    for chunk in stream:
        if not getattr(chunk, 'choices', None):
            #yield f"data: {chunk.usage.model_dump_json()}\n\n"
            continue

        delta = chunk.choices[0].delta

        if not getattr(delta, 'reasoning_content', None) and not getattr(delta, 'content', None):
            continue

        # 处理状态切换
        if not getattr(delta, 'reasoning_content', None) and not is_answering:
            is_answering = True
            #yield "event: answer_start\ndata: {}\n\n"

        # 构建事件流
        if getattr(delta, 'reasoning_content', None):
            yield f"{delta.reasoning_content}"
        elif getattr(delta, 'content', None):
            yield f"{delta.content}"

@app.route('/chat', methods=['POST'])
def chat():
    data = request.get_json()
    print(data )
    prompt = data.get('content', '')
    
    if not prompt:
        return jsonify({"error": "Content is required"}), 400

    return Response(
        generate_stream(prompt),
        mimetype='text/event-stream',
        headers={
            'Cache-Control': 'no-cache',
            'Connection': 'keep-alive'
        }
    )

if __name__ == '__main__':
    app.run(port=5000, threaded=True,debug=True)