import os
import shutil
import subprocess
import tempfile
from datetime import datetime
from sqlalchemy.orm import Session
from pathlib import Path
import zipfile
import uuid

from api.schemas.compile import CompileOptions
from db.models import CompilationHistory, CompilationFile, PlatformStats
from compiler.core import compile_py_to_so
from utils.file_handling import ensure_directory, get_file_size
from services.stats_service import update_platform_stats
from loguru import logger

# 用于存储编译进度和日志的内存存储（实际生产环境可能需要使用Redis等）
compilation_progress = {}

def create_compilation_task(db: Session, task_id: str, options: CompileOptions, files):
    """创建编译任务记录"""
    # 创建输出目录
    output_dir = ensure_directory(os.path.join("outputs", task_id))
    
    # 调整输出目录
    if options.output_dir:
        final_output_dir = os.path.join(output_dir, options.output_dir)
        ensure_directory(final_output_dir)
    else:
        final_output_dir = output_dir
    
    # 创建历史记录
    history = CompilationHistory(
        task_id=task_id,
        status="pending",
        optimize_level=options.optimize,
        python_version=options.python_version,
        platform=options.platform,
        keep_source=options.keep_source,
        zip_output=options.zip_output,
        delete_temp=options.delete_temp,
        extra_args=options.extra_args,
        output_dir=str(final_output_dir),
        created_at=datetime.utcnow()
    )
    db.add(history)
    
    # 添加文件记录
    for file in files:
        file_record = CompilationFile(
            history=history,
            original_filename=file.filename,
            original_filesize=file.size,
            is_source=True
        )
        db.add(file_record)
    
    db.commit()
    db.refresh(history)
    
    # 初始化编译进度
    compilation_progress[task_id] = {
        "progress": 0,
        "log": "",
        "status": "pending"
    }
    
    return history

def save_uploaded_files(files, upload_dir):
    """保存上传的文件"""
    for file in files:
        file_path = os.path.join(upload_dir, file.filename)
        with open(file_path, "wb") as buffer:
            shutil.copyfileobj(file.file, buffer)
        logger.info(f"已保存上传文件: {file_path}")

def get_compilation_status(db: Session, task_id: str):
    """获取编译状态"""
    if task_id not in compilation_progress:
        # 检查数据库中是否存在该任务
        history = db.query(CompilationHistory).filter(
            CompilationHistory.task_id == task_id
        ).first()
        if not history:
            return None
        
        # 如果存在但不在内存中，初始化它
        compilation_progress[task_id] = {
            "progress": 100 if history.status in ["success", "failed"] else 0,
            "log": "",
            "status": history.status
        }
    
    # 从数据库获取任务信息
    history = db.query(CompilationHistory).filter(
        CompilationHistory.task_id == task_id
    ).first()
    
    if not history:
        return None
    
    return {
        "task_id": task_id,
        "status": history.status,
        "progress": compilation_progress[task_id]["progress"],
        "log": compilation_progress[task_id]["log"],
        "completed_at": history.completed_at,
        "error_message": history.error_message
    }

def get_compilation_result(db: Session, task_id: str):
    """获取编译结果"""
    history = db.query(CompilationHistory).filter(
        CompilationHistory.task_id == task_id
    ).first()
    
    if not history:
        return None
    
    # 获取输出文件信息
    output_files = db.query(CompilationFile).filter(
        CompilationFile.history_id == history.id,
        CompilationFile.is_source == False
    ).all()
    
    return {
        "task_id": task_id,
        "status": history.status,
        "output_files": [
            {
                "filename": file.output_filename,
                "filesize": file.output_filesize
            } for file in output_files
        ],
        "completed_at": history.completed_at
    }

def update_compilation_progress(task_id: str, progress: int, log_message: str = None, status: str = None):
    """更新编译进度"""
    if task_id not in compilation_progress:
        compilation_progress[task_id] = {"progress": 0, "log": "", "status": "pending"}
    
    compilation_progress[task_id]["progress"] = progress
    
    if log_message:
        timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
        compilation_progress[task_id]["log"] += f"[{timestamp}] {log_message}\n"
    
    if status:
        compilation_progress[task_id]["status"] = status

def compile_files_task(task_id: str, upload_dir: str, options: CompileOptions, db: Session):
    """编译文件的后台任务"""
    try:
        # 在后台任务中创建新的数据库会话
        from db.database import SessionLocal
        db = SessionLocal()
        
        # 更新任务状态为处理中
        history = db.query(CompilationHistory).filter(
            CompilationHistory.task_id == task_id
        ).first()
        
        if not history:
            logger.error(f"编译任务不存在: {task_id}")
            db.close()
            return
        
        history.status = "processing"
        db.commit()
        update_compilation_progress(task_id, 5, "开始编译任务", "processing")
        
        # 获取所有上传的文件
        uploaded_files = [f for f in os.listdir(upload_dir) if os.path.isfile(os.path.join(upload_dir, f))]
        update_compilation_progress(task_id, 10, f"发现 {len(uploaded_files)} 个文件需要处理")
        
        # 处理ZIP文件
        zip_files = [f for f in uploaded_files if f.endswith(".zip")]
        py_files = [f for f in uploaded_files if f.endswith(".py")]
        
        # 创建临时工作目录
        with tempfile.TemporaryDirectory() as work_dir:
            # 解压ZIP文件
            for zip_file in zip_files:
                zip_path = os.path.join(upload_dir, zip_file)
                with zipfile.ZipFile(zip_path, 'r') as zip_ref:
                    zip_ref.extractall(work_dir)
                update_compilation_progress(
                    task_id, 15, f"已解压 ZIP 文件: {zip_file}"
                )
            
            # 复制Python文件到工作目录
            for py_file in py_files:
                src_path = os.path.join(upload_dir, py_file)
                dst_path = os.path.join(work_dir, py_file)
                shutil.copy2(src_path, dst_path)
            
            # 查找所有需要编译的Python文件
            all_py_files = []
            for root, dirs, files in os.walk(work_dir):
                for file in files:
                    if file.endswith(".py"):
                        all_py_files.append(os.path.join(root, file))
            
            update_compilation_progress(
                task_id, 20, f"共发现 {len(all_py_files)} 个Python文件需要编译"
            )
            
            if not all_py_files:
                raise Exception("未找到任何Python文件进行编译")
            
            # 创建输出目录
            output_dir = history.output_dir
            ensure_directory(output_dir)
            
            # 计算每个文件的进度增量
            progress_per_file = 70 / len(all_py_files)
            current_progress = 20
            
            # 编译每个Python文件
            for i, py_file in enumerate(all_py_files):
                # 更新进度
                current_progress += progress_per_file
                update_compilation_progress(
                    task_id, min(int(current_progress), 90), 
                    f"正在编译 {os.path.basename(py_file)} ({i+1}/{len(all_py_files)})"
                )
                
                # 编译文件
                try:
                    so_file = compile_py_to_so(
                        py_file,
                        output_dir=output_dir,
                        optimize_level=options.optimize,
                        python_version=options.python_version,
                        keep_source=options.keep_source,
                        delete_temp=options.delete_temp,
                        extra_args=options.extra_args,
                        log_callback=lambda msg: update_compilation_progress(task_id, int(current_progress), msg)
                    )
                    
                    # 记录输出文件
                    file_record = CompilationFile(
                        history_id=history.id,
                        original_filename=os.path.basename(py_file),
                        original_filesize=get_file_size(py_file),
                        output_filename=os.path.basename(so_file),
                        output_filesize=get_file_size(so_file),
                        is_source=False
                    )
                    db.add(file_record)
                    db.commit()
                    
                except Exception as e:
                    update_compilation_progress(
                        task_id, int(current_progress), 
                        f"编译 {os.path.basename(py_file)} 失败: {str(e)}"
                    )
                    logger.error(f"编译文件失败: {str(e)}")
                    # 如果不是最后一个文件，继续处理其他文件
                    if i < len(all_py_files) - 1:
                        continue
                    # 如果是最后一个文件或者所有文件都失败，则标记任务失败
                    raise
            
            # 如果需要压缩输出
            if options.zip_output and os.listdir(output_dir):
                update_compilation_progress(task_id, 95, "正在压缩输出文件")
                
                # 创建ZIP文件
                zip_path = os.path.join(os.path.dirname(output_dir), f"output_{task_id}.zip")
                with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
                    for root, dirs, files in os.walk(output_dir):
                        for file in files:
                            file_path = os.path.join(root, file)
                            arcname = os.path.relpath(file_path, output_dir)
                            zipf.write(file_path, arcname)
                
                # 清理原始输出文件，只保留ZIP
                for item in os.listdir(output_dir):
                    item_path = os.path.join(output_dir, item)
                    if os.path.isfile(item_path):
                        os.remove(item_path)
                    elif os.path.isdir(item_path):
                        shutil.rmtree(item_path)
                
                # 移动ZIP文件到输出目录
                shutil.move(zip_path, os.path.join(output_dir, os.path.basename(zip_path)))
                
                # 更新文件记录为ZIP文件
                db.query(CompilationFile).filter(
                    CompilationFile.history_id == history.id,
                    CompilationFile.is_source == False
                ).delete()
                
                zip_file_record = CompilationFile(
                    history_id=history.id,
                    output_filename=os.path.basename(zip_path),
                    output_filesize=get_file_size(os.path.join(output_dir, os.path.basename(zip_path))),
                    is_source=False
                )
                db.add(zip_file_record)
                db.commit()
        
        # 更新平台统计
        update_platform_stats(db, options.platform)
        
        # 标记任务成功完成
        update_compilation_progress(task_id, 100, "所有文件编译完成", "success")
        history.status = "success"
        history.completed_at = datetime.utcnow()
        db.commit()
        
        logger.info(f"编译任务成功完成: {task_id}")
        
    except Exception as e:
        # 标记任务失败
        error_msg = str(e)
        logger.error(f"编译任务失败: {task_id}, 错误: {error_msg}")
        
        update_compilation_progress(
            task_id, 100, f"编译失败: {error_msg}", "failed"
        )
        
        history.status = "failed"
        history.error_message = error_msg
        history.completed_at = datetime.utcnow()
        db.commit()
        
    finally:
        # 确保数据库会话被关闭
        if 'db' in locals():
            db.close()
