from pydoc import html

from flask import Blueprint
from flask import Flask, request, jsonify,stream_with_context,Response
from flask_cors import CORS
import ollama
import json
from APP.database import db
from APP.models.ai_conversation import AIConversation

ai_conversation = Blueprint('ai_conversation', __name__)
model_name='deepseek-r1:7b'
# 初始化Ollama客户端
ollama_client = ollama.Client(host='http://localhost:11434')


@ai_conversation.route("/messages", methods=['GET'])
def getMessages():
    ai_conversation = AIConversation.query.all()
    return jsonify([{'id':item.id,'sender': item.sender, 'content': item.content,'created_at':item.created_at} for item
                    in ai_conversation])

@ai_conversation.route("/delmessages")
def deleteMessages():
    db.session.query(AIConversation).delete()
    db.session.commit()
    ai_conversation = AIConversation.query.all()
    return jsonify([{'id': item.id, 'sender': item.sender, 'content': item.content, 'created_at': item.created_at} for item
                    in ai_conversation])
@ai_conversation.route('/stream', methods=['GET'])
def chat():
    try:
        prompt = request.args.get('prompt', '')
        ai_conversation1 = AIConversation(
            sender='user',
            content=prompt
        )
        db.session.add(ai_conversation1)
        db.session.commit()
        print("Prompt:", prompt)

        # 定义生成器函数
        def generate():
            # 调用 Ollama 流式 API
            response = ollama.chat(
                model=model_name,  # 更改为你的模型名称
                messages=[{"role": "user", "content": prompt}],
                stream=True
            )

            # 收集完整响应
            full_response = []

            # 逐 Token 发送
            for chunk in response:
                if 'message' in chunk and 'content' in chunk['message']:
                    token = chunk['message']['content']
                    full_response.append(token)

                    # 修复1：处理换行符和特殊字符
                    safe_token = html.escape(token).replace('\n', '<br>')

                    # 修复2：规范SSE格式
                    yield f"data: {token}\n\n".encode('utf-8')

            ai_conversation2 = AIConversation(
                sender='ai',
                content=''.join(full_response).replace('<think>','').replace('</think>','').strip() # 将列表转为完整字符串
            )
            db.session.add(ai_conversation2)
            db.session.commit()
            print("Response:", ''.join(full_response))
            # # 发送结束信号
            # final_data = {
            #     'completed': True,
            #     'full_response': ''.join(full_response)
            # }
            # yield  f"data: {final_data}\n\n".encode('utf-8')


        # 返回流式响应
        return Response(
            stream_with_context(generate()),
            content_type='text/event-stream',
            headers={
                'Cache-Control': 'no-cache',
                'Connection': 'keep-alive',
                'X-Accel-Buffering': 'no',  # 关键头
                'Access-Control-Allow-Origin': '*'  # 按需设置
            }
        )

    except Exception as e:
        return jsonify({'error': str(e)}), 500

