from flask import Blueprint, render_template, request, Response, stream_with_context
from openai import OpenAI
from flask import current_app
import json

main = Blueprint('main', __name__)

@main.route('/')
def index():
    return render_template('index.html')

@main.route('/chat', methods=['POST'])
def chat():
    # 使用配置中的 OpenAI 设置
    client = OpenAI(
        api_key=current_app.config['OPENAI_API_KEY'],
        base_url=current_app.config['OPENAI_BASE_URL'],
    )

    message = request.json.get('message')

    def generate():
        chat_completion = client.chat.completions.create(
            model=current_app.config['OPENAI_MODEL'],
            messages=[
                {
                    "role": "user",
                    "content": message,
                }
            ],
            stream=True,
        )

        for chunk in chat_completion:
            if hasattr(chunk.choices[0].delta, 'content'):
                if chunk.choices[0].delta.content:
                    yield f"data: {json.dumps({'content': chunk.choices[0].delta.content})}\n\n"

    return Response(stream_with_context(generate()), mimetype='text/event-stream')