from flask import Blueprint, render_template, request, jsonify
from app.models.llm_agent import LLMAgent
from loguru import logger

main_bp = Blueprint('main', __name__)


def init_main_routes(llm_agent: LLMAgent):
    """初始化主路由"""
    @main_bp.route('/')
    def index():
        """首页"""
        logger.info("############## 请求首页 ############## ")
        return render_template('index.html')

    @main_bp.route('/api/chat', methods=['POST'])
    def chat():
        logger.info("############## 处理聊天请求... ############## ")
        """处理聊天请求"""
        data = request.get_json()
        query = data.get('question')
        if not query:
            return jsonify({"status": "error", "message": "问题不能为空"}), 400
        logger.info(f"############## 用户初始提问:{query} ############## ")
        try:
            # 运行LLM代理，获取结果
            result = llm_agent.run(query)
            if not result["success"]:
                return jsonify({"status": "error", "message": result["answer"]}), 500
            logger.info(f"############## 回答完成 {result['answer']} ############## ")
            return jsonify({
                "status": "success",
                "answer": result["answer"],
                "thoughts": result["thoughts"],
                "tool_usage": result["tool_usage"],
                "chart_data": result["visualization_data"]
            }), 200
        except Exception as e:
            logger.error(f"处理聊天请求失败: {e}")
            return jsonify({"status": "error", "message": str(e)}), 500

    return main_bp