import json
import os
import subprocess
import sys
import time
import traceback

from models.matching_result import MatchingResult, MatchingProcess
from utils.cpp_executor import CppExecutor
from utils.data_processor import DataProcessor

# 添加项目根目录到Python路径
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, project_root)

# 现在可以正常导入所有模块
from fastapi import APIRouter, HTTPException, UploadFile, File, Form, BackgroundTasks
from pydantic import BaseModel
from typing import Optional, Dict, Any
import logging

# 配置日志
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# 修复导入路径
from config import settings
from routers.websocket import manager

router = APIRouter()

# 创建全局数据处理器实例
data_processor = DataProcessor()

# 创建C++执行器实例
cpp_executor = CppExecutor()

# 存储上传的文件路径
uploaded_files = {}

# 存储算法执行状态和进程信息
algorithm_status = {
    "is_running": False,
    "progress": 0,
    "message": "Ready",
    "process": None  # 用于存储正在运行的进程对象
}


class AlgorithmControlRequest(BaseModel):
    """
    算法控制请求模型
    """
    action: str  # start, stop, pause, resume
    parameters: Optional[Dict[str, Any]] = None


@router.get("/results", response_model=MatchingResult)
async def get_matching_results():
    """
    获取匹配结果数据
    """
    try:
        # 检查是否有结果数据
        if data_processor.matching_results is None:
            # 如果没有，尝试生成示例数据
            sample_results = data_processor._generate_sample_results()
            results = data_processor.process_matching_results(sample_results)
        else:
            results = data_processor.process_matching_results()
        return results
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/process", response_model=MatchingProcess)
async def get_matching_process():
    """
    获取匹配过程数据
    """
    try:
        # 检查是否有过程数据
        if data_processor.process_data is None:
            # 如果没有，尝试生成示例数据
            sample_process = data_processor._generate_sample_process()
            process = data_processor.process_matching_process(sample_process)
        else:
            process = data_processor.process_matching_process()
        return process
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@router.post("/run")
async def run_matching_algorithm(background_tasks: BackgroundTasks):
    """
    执行匹配算法
    """
    global algorithm_status

    # 检查是否已经在运行
    if algorithm_status["is_running"]:
        raise HTTPException(
            status_code=409,
            detail="Algorithm is already running"
        )

    # 这里将调用实际的C++程序
    try:
        logger.info("Starting matching algorithm execution")

        # 设置运行状态
        algorithm_status["is_running"] = True
        algorithm_status["progress"] = 0
        algorithm_status["message"] = "Starting algorithm"
        algorithm_status["process"] = None  # 初始化进程对象

        # 检查C++可执行文件是否存在
        logger.info(f"Checking C++ executable at: {cpp_executor.cpp_executable_path}")
        if not cpp_executor.check_executable_exists():
            algorithm_status["is_running"] = False
            error_msg = f"C++ executable not found at {cpp_executor.cpp_executable_path}"
            logger.error(error_msg)
            raise HTTPException(
                status_code=500,
                detail=error_msg
            )

        # 确定使用的数据文件
        network_file = uploaded_files.get("network") or cpp_executor.default_data_paths["network"]
        shipment_file = uploaded_files.get("shipment") or cpp_executor.default_data_paths["shipment"]
        route_file = uploaded_files.get("route") or cpp_executor.default_data_paths["route"]
        cooperation_file = uploaded_files.get("cooperation") or cpp_executor.default_data_paths["cooperation"]

        logger.info(
            f"Data files: network={network_file}, shipment={shipment_file}, route={route_file}, cooperation={cooperation_file}")

        # 更新状态
        algorithm_status["progress"] = 10
        algorithm_status["message"] = "Running algorithm"

        # 调用C++程序
        logger.info("Calling C++ matching algorithm")

        # 创建临时结果文件
        import tempfile
        with tempfile.NamedTemporaryFile(suffix='.csv', delete=False) as tmp_result:
            result_file_path = tmp_result.name

        try:
            # 构建命令行参数
            cmd = [
                cpp_executor.cpp_executable_path,
                network_file,
                shipment_file,
                route_file,
                cooperation_file,
                result_file_path
            ]

            # 记录开始时间
            start_time = time.time()

            # 启动进程
            process = subprocess.Popen(
                cmd,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                text=True
            )

            # 保存进程对象以便后续控制
            algorithm_status["process"] = process
            process._start_time = start_time  # 保存开始时间

            # 等待进程完成
            stdout, stderr = process.communicate()

            # 检查是否被中断
            if not algorithm_status["is_running"]:
                logger.info("Algorithm execution was stopped")
                return {
                    "status": "stopped",
                    "message": "Algorithm execution was stopped"
                }

            # 记录结束时间
            end_time = time.time()
            execution_time = end_time - start_time

            # 检查执行结果
            if process.returncode != 0:
                raise RuntimeError(f"C++ program execution failed with return code {process.returncode}: {stderr}")

            # 解析结果文件
            cpp_results = cpp_executor._parse_result_file(result_file_path)

            # 添加执行信息
            cpp_results["execution_info"] = {
                "stdout": stdout,
                "stderr": stderr,
                "return_code": process.returncode,
                "execution_time": execution_time
            }

            logger.info("C++ algorithm completed successfully")

            # 处理结果并保存到数据处理器
            data_processor.load_matching_results_data(cpp_results)

            # 转换为匹配结果对象
            results = data_processor.process_matching_results(cpp_results)
            process_result = data_processor.process_matching_process()

            # 更新状态
            algorithm_status["progress"] = 100
            algorithm_status["message"] = "Algorithm completed"
            algorithm_status["is_running"] = False
            algorithm_status["process"] = None

            # 通过WebSocket推送结果更新
            await manager.broadcast(json.dumps({
                "type": "results_update",
                "data": results.dict()
            }))

            # 通过WebSocket推送过程更新
            await manager.broadcast(json.dumps({
                "type": "process_update",
                "data": process_result.dict()
            }))

            return {
                "status": "success",
                "message": "Matching algorithm completed",
                "results": results.dict(),
                "process": process_result.dict(),
                "execution_info": cpp_results.get("execution_info", {})
            }
        finally:
            # 清理临时文件
            if os.path.exists(result_file_path):
                os.unlink(result_file_path)

    except Exception as e:
        algorithm_status["is_running"] = False
        algorithm_status["process"] = None
        logger.error(f"Error running matching algorithm: {str(e)}")
        logger.error(traceback.format_exc())

        # 通过WebSocket推送错误信息
        await manager.broadcast(json.dumps({
            "type": "error",
            "data": {"message": str(e)}
        }))

        raise HTTPException(status_code=500, detail=str(e))


@router.post("/upload")
async def upload_data_file(file_type: str = Form(...), file: UploadFile = File(...)):
    """
    上传数据文件
    """
    global uploaded_files

    # 验证文件类型
    allowed_types = ["shipment", "route", "network", "cooperation", "results"]
    if file_type not in allowed_types:
        raise HTTPException(
            status_code=400,
            detail=f"Invalid file type. Allowed types: {allowed_types}"
        )

    # 确定文件名映射
    file_name_mapping = {
        "shipment": "shipment.csv",
        "route": "route.csv",
        "network": "network.csv",
        "cooperation": "cooperation_parameter.csv",
        "results": "stable_matching.csv"
    }

    try:
        # 确保数据目录存在
        os.makedirs(settings.DATA_DIR, exist_ok=True)

        # 构造目标文件路径
        target_file_name = file_name_mapping.get(file_type, f"{file_type}.csv")
        target_file_path = os.path.join(settings.DATA_DIR, target_file_name)

        # 读取上传的文件内容
        contents = await file.read()

        # 写入目标文件
        with open(target_file_path, "wb") as f:
            f.write(contents)

        # 存储文件路径
        uploaded_files[file_type] = target_file_path

        return {
            "status": "success",
            "message": f"File {file_type} uploaded successfully",
            "file_path": target_file_path
        }
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@router.get("/data/info")
async def get_uploaded_data_info():
    """
    获取已上传数据信息
    """
    global uploaded_files

    info = {}
    for file_type, file_path in uploaded_files.items():
        try:
            if os.path.exists(file_path):
                file_size = os.path.getsize(file_path) / 1024  # KB
                info[file_type] = {
                    "file_path": file_path,
                    "size_kb": round(file_size, 2)
                }
            else:
                info[file_type] = {
                    "file_path": file_path,
                    "error": "File not found"
                }
        except Exception as e:
            info[file_type] = {
                "file_path": file_path,
                "error": str(e)
            }

    return info


@router.delete("/data/clear")
async def clear_uploaded_data():
    """
    清除已上传的数据
    """
    global uploaded_files, algorithm_status

    # 删除临时文件
    for file_path in uploaded_files.values():
        try:
            if os.path.exists(file_path):
                os.unlink(file_path)
        except Exception as e:
            print(f"Warning: Failed to delete file {file_path}: {e}")

    # 清空文件记录
    uploaded_files.clear()

    # 重置算法状态
    algorithm_status["is_running"] = False
    algorithm_status["progress"] = 0
    algorithm_status["message"] = "Ready"

    # 清空数据处理器中的数据
    data_processor.clear_data()

    return {
        "status": "success",
        "message": "All uploaded data cleared"
    }


@router.get("/status")
async def get_algorithm_status():
    """
    获取算法执行状态
    """
    global algorithm_status
    return algorithm_status


@router.post("/control")
async def control_algorithm(request: AlgorithmControlRequest):
    """
    控制算法执行
    """
    global algorithm_status

    action = request.action.lower()

    if action == "stop":
        # 这里应该实现停止算法的逻辑
        algorithm_status["is_running"] = False
        algorithm_status["message"] = "Algorithm stopped"
        return {
            "status": "success",
            "message": "Algorithm stopped"
        }

    elif action == "pause":
        return {
            "status": "error",
            "message": "Pause functionality is not supported by the algorithm"
        }

    elif action == "resume":
        return {
            "status": "error",
            "message": "Resume functionality is not supported by the algorithm"
        }

    else:
        raise HTTPException(
            status_code=400,
            detail=f"Invalid action: {action}. Supported actions: stop, pause, resume"
        )
