from flask import Blueprint, request, jsonify, Response, stream_with_context
from models.document import Knowledge
from services.chat_service import ChatService

chat_bp = Blueprint('chat', __name__)
chat_service = ChatService()
@chat_bp.route('/models', methods=['GET'])
def get_models():
    """获取可用的模型列表"""
    try:
        
        models = chat_service.get_available_models()
        return jsonify(models)
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@chat_bp.route('/chat', methods=['POST'])
def chat():
    """处理聊天请求"""
    try:
        data = request.json
        question = data.get('question')
        knowledge_id = data.get('knowledge_id')
        model = data.get('model', 'llama2')
        top_k = data.get('top_k', 3)  # 获取前端传来的 top_k 参数，默认为3
        
        if not question or not knowledge_id:
            return jsonify({"error": "Missing required parameters"}), 400
            
        # 获取知识库信息
        knowledge = Knowledge.query.get_or_404(knowledge_id)
        
        chat_service = ChatService()
        chain = chat_service.create_chain(
            collection_name=knowledge.collection_name,
            model_name=model,
            embedding_model=knowledge.embedding_model,
            top_k=top_k  # 传入 top_k 参数
        )
        
        result = chain({"question": question})
        
        sources = []
        if result.get("source_documents"):
            sources = [
                {
                    "content": doc.page_content,
                    "metadata": doc.metadata
                }
                for doc in result["source_documents"]
            ]
        
        return jsonify({
            "response": result["answer"],
            "sources": sources
        })
        
    except Exception as e:
        return jsonify({"error": str(e)}), 500

@chat_bp.route('/chat/stream', methods=['GET'])
def chat_stream():
    """流式聊天接口"""
    try:
        question = request.args.get('question')
        knowledge_id = request.args.get('knowledge_id')
        model = request.args.get('model', 'llama2')
        top_k = int(request.args.get('top_k', 3))

        if not question or not knowledge_id:
            return jsonify({"error": "Missing required parameters"}), 400

        # 获取知识库信息
        knowledge = Knowledge.query.get_or_404(knowledge_id)
        
        # 创建响应生成器
        def generate():
            chat_service = ChatService()
            try:
                for chunk in chat_service.stream_chat(
                    question=question,
                    knowledge=knowledge,
                    model_name=model,
                    top_k=top_k
                ):
                    yield f"data: {chunk}\n\n"
            except Exception as e:
                yield f"data: Error: {str(e)}\n\n"
            finally:
                yield "data: [DONE]\n\n"

        # 返回流式响应
        return Response(
            stream_with_context(generate()),
            mimetype='text/event-stream',
            headers={
                'Cache-Control': 'no-cache',
                'X-Accel-Buffering': 'no',
                'Access-Control-Allow-Origin': '*'
            }
        )

    except Exception as e:
        return jsonify({"error": str(e)}), 500