from flask import Flask, render_template, request, jsonify, Response, stream_with_context
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
import os
import json

app = Flask(__name__, template_folder=os.path.dirname(__file__))  # 设置模板文件夹为当前目录

# 初始化 DeepSeek AI 模型
llm = ChatOpenAI(
    model="deepseek-chat",
    openai_api_key="sk-e39b058a837f49bc80f521305d34ce44",
    openai_api_base="https://api.deepseek.com/v1",
    streaming=True  # 启用流式传输
)

# 创建聊天提示模板
prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一个AI助手"),
    ("human", "{input}")
])

# 创建聊天链
chain = prompt | llm


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/chat', methods=['POST'])
def chat():
    data = request.json
    input_text = data.get('input', '')

    if input_text.lower() == 'quit':
        return jsonify({'response': '聊天结束。', 'continue': False})

    def generate():
        try:
            # 使用流式调用
            for chunk in chain.stream({"input": input_text}):
                if hasattr(chunk, 'content'):
                    # 返回每个token作为服务器发送事件
                    yield f"data: {json.dumps({'token': chunk.content})}\n\n"

            # 发送结束信号
            yield f"data: {json.dumps({'done': True, 'continue': True})}\n\n"
        except Exception as e:
            # 处理异常
            yield f"data: {json.dumps({'error': str(e)})}\n\n"

    return Response(stream_with_context(generate()), mimetype='text/event-stream')


if __name__ == '__main__':
    app.run(host="0.0.0.0", debug=True)