from flask import Blueprint, request, current_app
import json
import threading
import time
import os
import shutil
from app.utils.flink import flink_request, get_flink_api_url, set_flink_api_url
from app.utils.logger import FlinkLogger, log_api_call, log_business_logic, log_flink_operation
from app.services.task_service import task_manager


bp = Blueprint("jars", __name__)

# 从环境变量获取临时上传目录，默认为 /tmp/flink_jar_uploads
JAR_UPLOAD_TEMP_DIR = os.environ.get('JAR_UPLOAD_TEMP_DIR', '/tmp/flink_jar_uploads')


def cleanup_old_temp_files(max_age_hours=24):
    """
    清理超过指定时间的临时文件
    
    Args:
        max_age_hours: 文件最大保留时间（小时），默认24小时
    """
    temp_dir = JAR_UPLOAD_TEMP_DIR
    
    if not os.path.exists(temp_dir):
        return
    
    try:
        current_time = time.time()
        max_age_seconds = max_age_hours * 3600
        deleted_count = 0
        
        for filename in os.listdir(temp_dir):
            file_path = os.path.join(temp_dir, filename)
            
            try:
                # 获取文件最后修改时间
                file_mtime = os.path.getmtime(file_path)
                file_age = current_time - file_mtime
                
                # 如果文件超过最大保留时间，删除它
                if file_age > max_age_seconds:
                    os.remove(file_path)
                    deleted_count += 1
                    current_app.logger.info(f"已删除过期临时文件: {file_path} (文件年龄: {file_age/3600:.1f}小时)")
            except Exception as e:
                current_app.logger.warning(f"清理临时文件失败: {file_path}, 错误: {e}")
        
        if deleted_count > 0:
            current_app.logger.info(f"临时文件清理完成，共删除 {deleted_count} 个过期文件")
    except Exception as e:
        current_app.logger.error(f"清理临时文件目录失败: {e}")


def parse_entry_class(entry_data):
    """解析入口类JSON数据，提取name字段"""
    if not entry_data:
        return "-"
    
    try:
        # 如果已经是字符串且不是JSON格式，直接返回
        if isinstance(entry_data, str) and not entry_data.strip().startswith(('[', '{')):
            return entry_data
        
        # 尝试解析JSON
        parsed = json.loads(entry_data) if isinstance(entry_data, str) else entry_data
        
        # 如果是数组，取第一个元素
        if isinstance(parsed, list) and len(parsed) > 0:
            first_item = parsed[0]
            return first_item.get('name') or first_item.get('className') or first_item.get('class') or "-"
        
        # 如果是对象，直接取name字段
        if isinstance(parsed, dict):
            return parsed.get('name') or parsed.get('className') or parsed.get('class') or "-"
        
        return str(entry_data)
    except (json.JSONDecodeError, TypeError, AttributeError):
        # JSON解析失败，返回原始数据
        return str(entry_data)


@bp.get("")
@log_api_call("jars.list_jars")
def list_jars():
    current_app.logger.info("Listing JAR files")
    flink_env = request.args.get("flink_env")
    
    # 获取排序参数（默认按上传时间降序）
    sort_field = request.args.get("sort", "uploaded")
    sort_order = request.args.get("order", "desc").lower()
    
    original_url = None
    if flink_env:
        original_url = get_flink_api_url()
        set_flink_api_url(flink_env)
    try:
        files = []
        ok, result = flink_request("GET", "/jars")
        if ok and isinstance(result, dict):
            raw_files = result.get("files", []) or []
            # 处理每个JAR文件的入口类数据
            for jar_file in raw_files:
                if isinstance(jar_file, dict) and 'entry' in jar_file:
                    # 解析入口类JSON，提取name字段
                    jar_file['entry'] = parse_entry_class(jar_file['entry'])
                files.append(jar_file)
            
            # 排序处理
            if sort_field == "uploaded" and files:
                reverse = (sort_order == "desc")
                try:
                    files.sort(key=lambda x: x.get('uploaded', 0) or 0, reverse=reverse)
                except Exception as e:
                    current_app.logger.info(f"Sort failed: {e}")
        else:
            # 如果请求失败，记录错误信息
            current_app.logger.info(f"Flink API request failed: {result}")
        # 计算统计信息
        total = len(files)
        total_all = total  # JAR包列表没有筛选条件，全局总数等于当前总数
        
        return {
            "success": True, 
            "message": "ok", 
            "data": {
                "jars": files,
                "total": total,
                "total_all": total_all
            }
        }
    except Exception as e:
        current_app.logger.info(f"Error in list_jars: {str(e)}")
        return {"success": False, "error": str(e), "code": "SERVER_ERROR"}, 500
    finally:
        if original_url:
            set_flink_api_url(original_url)


@bp.post("/delete-all")
@log_api_call("jars.delete_all")
def delete_all():
    current_app.logger.info("Deleting all JAR files")
    flink_env = (request.json or {}).get("flink_env") or request.form.get("flink_env")
    original_url = None
    if flink_env:
        original_url = get_flink_api_url()
        set_flink_api_url(flink_env)
    try:
        ok, result = flink_request("GET", "/jars")
        deleted = 0
        failed = 0
        if ok and isinstance(result, dict):
            for jar in result.get("files", []) or []:
                jar_id = jar.get("id")
                if not jar_id:
                    failed += 1
                    continue
                ok_del, _ = flink_request("DELETE", f"/jars/{jar_id}")
                if ok_del:
                    deleted += 1
                else:
                    failed += 1
        return {"success": True, "message": "ok", "data": {"deleted": deleted, "failed": failed}}
    except Exception as e:
        return {"success": False, "error": str(e), "code": "SERVER_ERROR"}, 500
    finally:
        if original_url:
            set_flink_api_url(original_url)


@bp.post("/<jar_id>/delete")
@log_api_call("jars.delete_single")
def delete_single(jar_id):
    current_app.logger.info(f"Deleting single JAR file: {jar_id}")
    flink_env = (request.json or {}).get("flink_env") or request.form.get("flink_env")
    original_url = None
    if flink_env:
        original_url = get_flink_api_url()
        set_flink_api_url(flink_env)
    try:
        ok, _ = flink_request("DELETE", f"/jars/{jar_id}")
        if ok:
            return {"success": True, "message": "deleted"}
        return {"success": False, "error": "delete failed", "code": "FLINK_DELETE_FAILED"}, 500
    finally:
        if original_url:
            set_flink_api_url(original_url)


@bp.post("/upload-temp")
@log_api_call("jars.upload_temp")
def upload_temp():
    """临时上传JAR文件到服务器，不直接同步到Flink"""
    current_app.logger.info("Uploading temporary JAR files")
    from werkzeug.utils import secure_filename
    
    # 获取Flink环境参数
    flink_env = request.args.get("flink_env") or request.form.get("flink_env")
    
    # 创建临时存储目录
    temp_dir = JAR_UPLOAD_TEMP_DIR
    os.makedirs(temp_dir, exist_ok=True)
    
    # 清理超过24小时的旧临时文件
    try:
        cleanup_old_temp_files(max_age_hours=24)
    except Exception as e:
        current_app.logger.warning(f"清理旧临时文件时出错: {e}")
    
    files = request.files.getlist("jarfiles")
    if not files:
        return {"success": False, "error": "no files", "code": "NO_FILES"}, 400
    
    uploaded_files = []
    for fs in files:
        if fs.filename:
            # 只提取文件名，不使用secure_filename以保留原始文件名
            # 如果有路径分隔符，只取最后的文件名部分
            filename = os.path.basename(fs.filename)
            # 直接使用文件名作为临时文件名，相同文件名会被覆盖
            temp_path = os.path.join(temp_dir, filename)
            
            try:
                # 保存文件到临时目录（相同文件名直接覆盖）
                fs.save(temp_path)
                uploaded_files.append({
                    "filename": filename,
                    "tempPath": temp_path,
                    "size": os.path.getsize(temp_path)
                })
                current_app.logger.info(f"已上传文件到临时目录: {temp_path}")
            except Exception as e:
                current_app.logger.info(f"Error saving file {fs.filename}: {str(e)}")
                return {"success": False, "error": f"保存文件失败: {str(e)}", "code": "SAVE_ERROR"}, 500
    
    return {
        "success": True,
        "message": "文件上传成功",
        "data": {
            "files": uploaded_files,
            "flinkEnv": flink_env
        }
    }

@bp.post("/register-to-flink")
@log_api_call("jars.register_to_flink")
@log_business_logic("register_jars_to_flink")
def register_to_flink():
    """将临时文件同步到Flink集群"""
    current_app.logger.info("Registering temporary files to Flink cluster")
    import os
    import json
    
    data = request.get_json()
    temp_ids = data.get("tempIds", [])
    flink_env = data.get("flinkEnv")
    clean_first = data.get("cleanFirst", False)
    
    if not temp_ids:
        return {"success": False, "error": "no temp ids", "code": "NO_TEMP_IDS"}, 400
    
    if not flink_env:
        return {"success": False, "error": "no flink env", "code": "NO_FLINK_ENV"}, 400
    
    # 设置Flink环境
    original_url = get_flink_api_url()
    set_flink_api_url(flink_env)
    
    try:
        # 先获取要上传的文件列表
        temp_dir = JAR_UPLOAD_TEMP_DIR
        files_to_upload = []
        
        for temp_id in temp_ids:
            # 查找临时文件
            temp_files = [f for f in os.listdir(temp_dir) if f.startswith(temp_id)]
            if temp_files:
                files_to_upload.append(temp_files[0])
        
        # 如果需要先清理，则删除与要上传文件同名的已有JAR包
        deleted_count = 0
        if clean_first and files_to_upload:
            try:
                ok, result = flink_request("GET", "/jars")
                if ok and isinstance(result, dict):
                    jars = result.get("files", []) or []
                    for jar in jars:
                        jar_id = jar.get("id")
                        jar_name = jar.get("name", "")
                        # 只删除与要上传的文件同名的JAR包
                        if jar_id and jar_name in files_to_upload:
                            try:
                                delete_ok, delete_result = flink_request("DELETE", f"/jars/{jar_id}")
                                if delete_ok:
                                    current_app.logger.info(f"已删除同名JAR包: {jar_name} (ID: {jar_id})")
                                    deleted_count += 1
                                else:
                                    current_app.logger.info(f"删除同名JAR包失败: {jar_name}, 错误: {delete_result}")
                            except Exception as e:
                                current_app.logger.info(f"删除同名JAR包时发生异常: {jar_name}, 错误: {str(e)}")
            except Exception as e:
                current_app.logger.info(f"Warning: Failed to clean existing same-name JARs: {str(e)}")
        
        # 注册文件到Flink
        success_count = 0
        failed_count = 0
        registered_jars = []
        
        for temp_id in temp_ids:
            try:
                # 查找临时文件
                temp_files = [f for f in os.listdir(temp_dir) if f.startswith(temp_id)]
                if not temp_files:
                    current_app.logger.info(f"Temp file not found for ID: {temp_id}")
                    failed_count += 1
                    continue
                
                temp_file = temp_files[0]
                temp_path = os.path.join(temp_dir, temp_file)
                
                # 上传到Flink
                base = get_flink_api_url().rstrip('/')
                url = f"{base}/jars/upload"
                
                with open(temp_path, 'rb') as f:
                    files = {"jarfile": (temp_file, f, "application/x-java-archive")}
                    import requests as _r
                    resp = _r.post(url, files=files, timeout=60)
                    
                    if 200 <= resp.status_code < 300:
                        success_count += 1
                        registered_jars.append({
                            "tempId": temp_id,
                            "filename": temp_file,
                            "flinkResponse": resp.json() if resp.content else {}
                        })
                    else:
                        failed_count += 1
                        current_app.logger.info(f"Failed to upload {temp_file} to Flink: {resp.status_code}")
                
                # 删除临时文件
                try:
                    os.remove(temp_path)
                except Exception:
                    pass
                    
            except Exception as e:
                current_app.logger.info(f"Error registering {temp_id}: {str(e)}")
                failed_count += 1
        
        return {
            "success": True,
            "message": f"成功同步 {success_count} 个JAR包到Flink",
            "data": {
                "registered": success_count,
                "failed": failed_count,
                "deleted": deleted_count,
                "jars": registered_jars
            }
        }
        
    except Exception as e:
        return {"success": False, "error": f"同步到Flink失败: {str(e)}", "code": "FLINK_ERROR"}, 500
    finally:
        if original_url:
            set_flink_api_url(original_url)

@bp.post("/register-single-to-flink")
@log_api_call("jars.register_single_to_flink")
def register_single_to_flink():
    """将单个临时文件同步到Flink集群"""
    current_app.logger.info("Registering single temporary file to Flink cluster")
    import os
    
    data = request.get_json()
    filename = data.get("filename")
    flink_env = data.get("flinkEnv")
    clean_first = data.get("cleanFirst", False)
    
    if not filename:
        return {"success": False, "error": "no filename", "code": "NO_FILENAME"}, 400
    
    if not flink_env:
        return {"success": False, "error": "no flink env", "code": "NO_FLINK_ENV"}, 400
    
    # 设置Flink环境
    original_url = get_flink_api_url()
    set_flink_api_url(flink_env)
    
    try:
        # 如果需要先清理，则删除与当前文件同名的已有JAR包
        deleted_count = 0
        if clean_first:
            try:
                ok, result = flink_request("GET", "/jars")
                if ok and isinstance(result, dict):
                    jars = result.get("files", []) or []
                    for jar in jars:
                        jar_id = jar.get("id")
                        jar_name = jar.get("name", "")
                        # 只删除与当前要上传的文件同名的JAR包
                        if jar_id and jar_name == filename:
                            try:
                                delete_ok, delete_result = flink_request("DELETE", f"/jars/{jar_id}")
                                if delete_ok:
                                    current_app.logger.info(f"已删除同名JAR包: {jar_name} (ID: {jar_id})")
                                    deleted_count += 1
                                else:
                                    current_app.logger.info(f"删除同名JAR包失败: {jar_name}, 错误: {delete_result}")
                            except Exception as e:
                                current_app.logger.info(f"删除同名JAR包时发生异常: {jar_name}, 错误: {str(e)}")
            except Exception as e:
                current_app.logger.info(f"Warning: Failed to clean existing same-name JARs: {str(e)}")
        
        # 注册单个文件到Flink
        temp_dir = JAR_UPLOAD_TEMP_DIR
        
        # 直接使用文件名查找临时文件
        temp_path = os.path.join(temp_dir, filename)
        if not os.path.exists(temp_path):
            return {"success": False, "error": f"Temp file not found: {filename}", "code": "TEMP_FILE_NOT_FOUND"}, 404
        
        # 上传到Flink
        base = get_flink_api_url().rstrip('/')
        url = f"{base}/jars/upload"
        
        with open(temp_path, 'rb') as f:
            # 使用原文件名上传到Flink
            files = {"jarfile": (filename, f, "application/x-java-archive")}
            import requests as _r
            resp = _r.post(url, files=files, timeout=60)
            
            if 200 <= resp.status_code < 300:
                # 同步成功后立即删除临时文件
                try:
                    os.remove(temp_path)
                    current_app.logger.info(f"已删除临时文件: {temp_path}")
                except Exception as e:
                    current_app.logger.info(f"删除临时文件失败: {e}")
                
                return {
                    "success": True,
                    "message": f"成功同步 {filename} 到Flink",
                    "data": {
                        "filename": filename,
                        "deleted": deleted_count,
                        "flinkResponse": resp.json() if resp.content else {}
                    }
                }
            else:
                # 同步失败也删除临时文件
                try:
                    os.remove(temp_path)
                    current_app.logger.info(f"已删除失败文件的临时文件: {temp_path}")
                except Exception as e:
                    current_app.logger.warning(f"删除临时文件失败: {e}")
                return {"success": False, "error": f"Failed to upload {filename} to Flink: {resp.status_code}", "code": "FLINK_UPLOAD_FAILED"}, 500
                
    except Exception as e:
        # 异常时也删除临时文件
        try:
            temp_dir = JAR_UPLOAD_TEMP_DIR
            temp_path = os.path.join(temp_dir, filename)
            if os.path.exists(temp_path):
                os.remove(temp_path)
                current_app.logger.info(f"已删除异常文件的临时文件: {temp_path}")
        except Exception as del_e:
            current_app.logger.warning(f"删除临时文件失败: {del_e}")
        return {"success": False, "error": f"同步到Flink失败: {str(e)}", "code": "FLINK_ERROR"}, 500
    finally:
        if original_url:
            set_flink_api_url(original_url)

@bp.post("/upload")
@log_api_call("jars.upload")
def upload():
    current_app.logger.info("Uploading JAR files directly to Flink")
    # Expect multipart/form-data with files under field name 'jarfiles'
    # 优先从URL参数获取flink_env，其次从form数据获取
    flink_env = request.args.get("flink_env") or request.form.get("flink_env")
    # 获取是否在上传前清理所有JAR包的选项
    clean_first = request.args.get("clean_first", "false").lower() == "true"
    
    # 调试日志
    current_app.logger.info(f"Upload request - flink_env: {flink_env}, clean_first: {clean_first}")
    current_app.logger.info(f"Request files: {list(request.files.keys())}")
    current_app.logger.info(f"Request form: {dict(request.form)}")
    
    original_url = None
    if flink_env:
        original_url = get_flink_api_url()
        set_flink_api_url(flink_env)
    try:
        files = request.files.getlist("jarfiles")
        if not files:
            return {"success": False, "error": "no files", "code": "NO_FILES"}, 400
        
        # 获取要上传的文件名列表
        upload_filenames = [fs.filename for fs in files if fs.filename]
        
        # 如果需要先清理，则删除与要上传文件同名的已有JAR包
        deleted_count = 0
        if clean_first and upload_filenames:
            try:
                # 获取所有已上传的JAR包
                ok, result = flink_request("GET", "/jars")
                if ok and isinstance(result, dict):
                    jars = result.get("files", []) or []
                    for jar in jars:
                        jar_id = jar.get("id")
                        jar_name = jar.get("name", "")
                        # 只删除与要上传的文件同名的JAR包
                        if jar_id and jar_name in upload_filenames:
                            try:
                                delete_ok, delete_result = flink_request("DELETE", f"/jars/{jar_id}")
                                if delete_ok:
                                    current_app.logger.info(f"已删除同名JAR包: {jar_name} (ID: {jar_id})")
                                    deleted_count += 1
                                else:
                                    current_app.logger.info(f"删除同名JAR包失败: {jar_name}, 错误: {delete_result}")
                            except Exception as e:
                                current_app.logger.info(f"删除同名JAR包时发生异常: {jar_name}, 错误: {str(e)}")
            except Exception as e:
                current_app.logger.info(f"Warning: Failed to clean existing same-name JARs: {str(e)}")
        
        success = 0
        failed = 0
        for fs in files:
            try:
                base = get_flink_api_url().rstrip('/')
                url = f"{base}/jars/upload"
                files_m = {"jarfile": (fs.filename, fs.stream, "application/x-java-archive")}
                import requests as _r
                resp = _r.post(url, files=files_m, timeout=60)
                if 200 <= resp.status_code < 300:
                    success += 1
                else:
                    failed += 1
            except Exception:
                failed += 1
        return {"success": True, "message": "ok", "data": {"success": success, "failed": failed, "deleted": deleted_count}}
    finally:
        if original_url:
            set_flink_api_url(original_url)


@bp.post("/scan-directory")
@log_api_call("jars.scan_directory")
def scan_directory():
    """扫描服务器目录，查找符合条件的JAR文件"""
    current_app.logger.info("Scanning directory for JAR files")
    import os
    
    data = request.get_json()
    directory_path = data.get("directoryPath")
    
    if not directory_path:
        return {"success": False, "error": "目录路径不能为空", "code": "NO_DIRECTORY"}, 400
    
    if not os.path.exists(directory_path):
        return {"success": False, "error": "目录不存在", "code": "DIRECTORY_NOT_FOUND"}, 404
    
    if not os.path.isdir(directory_path):
        return {"success": False, "error": "路径不是目录", "code": "NOT_A_DIRECTORY"}, 400
    
    try:
        # 扫描目录，递归查找所有JAR文件
        matched_jars = []
        for root, dirs, files in os.walk(directory_path):
            # 排除yc-flink-common模块
            if 'yc-flink-common' in root:
                current_app.logger.info(f"跳过yc-flink-common模块: {root}")
                continue
            
            for file in files:
                # 只匹配 yc-*-SNAPSHOT.jar，排除 original- 开头
                if (file.startswith('yc-') and 
                    file.endswith('-SNAPSHOT.jar') and 
                    not file.startswith('original-')):
                    
                    full_path = os.path.join(root, file)
                    try:
                        # 获取模块名（假设结构是 module/target/xxx.jar）
                        module = os.path.basename(os.path.dirname(os.path.dirname(full_path)))
                        matched_jars.append({
                            "filename": file,
                            "path": full_path,
                            "size": os.path.getsize(full_path),
                            "module": module
                        })
                        current_app.logger.info(f"找到匹配的JAR: {file} (模块: {module})")
                    except Exception as e:
                        current_app.logger.warning(f"处理文件时出错: {full_path}, 错误: {str(e)}")
        
        current_app.logger.info(f"扫描完成，找到 {len(matched_jars)} 个符合条件的JAR文件")
        
        return {
            "success": True,
            "message": f"找到 {len(matched_jars)} 个符合条件的JAR文件",
            "data": {
                "jars": matched_jars,
                "total": len(matched_jars)
            }
        }
    except Exception as e:
        current_app.logger.error(f"扫描目录时发生错误: {str(e)}")
        return {"success": False, "error": f"扫描目录失败: {str(e)}", "code": "SCAN_ERROR"}, 500


@bp.post("/copy-jars-to-temp")
@log_api_call("jars.copy_jars_to_temp")
def copy_jars_to_temp():
    """将扫描到的JAR文件复制到临时目录"""
    current_app.logger.info("Copying JAR files to temp directory")
    import os
    import shutil
    
    data = request.get_json()
    jar_paths = data.get("jarPaths", [])
    
    if not jar_paths:
        return {"success": False, "error": "没有指定要复制的文件", "code": "NO_FILES"}, 400
    
    temp_dir = JAR_UPLOAD_TEMP_DIR
    os.makedirs(temp_dir, exist_ok=True)
    
    copied_files = []
    failed_files = []
    
    for jar_path in jar_paths:
        if not os.path.exists(jar_path):
            current_app.logger.warning(f"文件不存在: {jar_path}")
            failed_files.append({"path": jar_path, "error": "文件不存在"})
            continue
        
        filename = os.path.basename(jar_path)
        temp_path = os.path.join(temp_dir, filename)
        
        try:
            shutil.copy2(jar_path, temp_path)
            copied_files.append({
                "filename": filename,
                "tempPath": temp_path,
                "size": os.path.getsize(temp_path)
            })
            current_app.logger.info(f"成功复制文件: {filename}")
        except Exception as e:
            current_app.logger.error(f"复制文件失败: {jar_path}, 错误: {str(e)}")
            failed_files.append({"path": jar_path, "error": str(e)})
    
    current_app.logger.info(f"复制完成: 成功 {len(copied_files)} 个, 失败 {len(failed_files)} 个")
    
    return {
        "success": True,
        "message": f"成功复制 {len(copied_files)} 个文件",
        "data": {
            "files": copied_files,
            "total": len(copied_files),
            "failed": failed_files,
            "failedCount": len(failed_files)
        }
    }


@bp.post("/upload-batch-async")
@log_api_call("jars.upload_batch_async")
def upload_batch_async():
    """批量上传JAR包到Flink（异步）- 立即返回任务ID"""
    import os
    
    data = request.get_json() or {}
    filenames = data.get("filenames", [])
    flink_env = data.get("flinkEnv")
    clean_first = data.get("cleanFirst", False)
    
    if not filenames:
        return {"success": False, "error": "没有选择文件", "code": "BAD_REQUEST"}, 400
    
    if not flink_env:
        return {"success": False, "error": "未指定Flink环境", "code": "BAD_REQUEST"}, 400
    
    # 创建任务
    task_id = task_manager.create_task(
        task_type='jar_upload',
        total_items=len(filenames),
        metadata={
            'filenames': filenames,
            'flink_env': flink_env,
            'clean_first': clean_first
        }
    )
    
    # 保存 Flask 应用实例，用于后台线程中的应用上下文
    app = current_app._get_current_object()
    
    # 启动后台线程执行批量上传
    def worker():
        with app.app_context():
            temp_dir = JAR_UPLOAD_TEMP_DIR
            
            try:
                # 设置Flink环境
                original_url = get_flink_api_url()
                set_flink_api_url(flink_env)
                
                # 如果需要先清理，则删除与要上传文件同名的已有JAR包
                deleted_count = 0
                if clean_first:
                    try:
                        ok, result = flink_request("GET", "/jars")
                        if ok and isinstance(result, dict):
                            jars = result.get("files", []) or []
                            for jar in jars:
                                jar_id = jar.get("id")
                                jar_name = jar.get("name", "")
                                # 只删除与要上传的文件同名的JAR包
                                if jar_id and jar_name in filenames:
                                    try:
                                        delete_ok, delete_result = flink_request("DELETE", f"/jars/{jar_id}")
                                        if delete_ok:
                                            current_app.logger.info(f"已删除同名JAR包: {jar_name} (ID: {jar_id})")
                                            deleted_count += 1
                                    except Exception as e:
                                        current_app.logger.warning(f"删除同名JAR包时发生异常: {jar_name}, 错误: {str(e)}")
                    except Exception as e:
                        current_app.logger.warning(f"清理同名JAR包失败: {str(e)}")
                
                # 处理每个文件
                cancelled = False
                for i, filename in enumerate(filenames):
                    # 检查任务是否被取消
                    if task_manager.is_task_cancelled(task_id):
                        current_app.logger.info(f"[任务{task_id}] 任务已被取消")
                        cancelled = True
                        # 标记剩余未处理的文件为取消，并清理临时文件
                        for j in range(i, len(filenames)):
                            task_manager.update_progress(task_id, j + 1, {
                                'success': None,
                                'filename': filenames[j],
                                'message': '任务已取消',
                                'cancelled': True
                            })
                            # 清理未处理文件的临时文件
                            try:
                                temp_path = os.path.join(temp_dir, filenames[j])
                                if os.path.exists(temp_path):
                                    os.remove(temp_path)
                                    current_app.logger.info(f"已删除取消任务的临时文件: {temp_path}")
                            except Exception as e:
                                current_app.logger.warning(f"删除临时文件失败: {e}")
                        break
                    
                    current_app.logger.info(f"[任务{task_id}] 正在处理文件 {i+1}/{len(filenames)}: {filename}")
                    
                    try:
                        temp_path = os.path.join(temp_dir, filename)
                        
                        if not os.path.exists(temp_path):
                            error_msg = f"临时文件不存在: {filename}"
                            current_app.logger.warning(error_msg)
                            task_manager.update_progress(task_id, i + 1, {
                                'success': False,
                                'filename': filename,
                                'message': error_msg
                            })
                            continue
                        
                        # 上传到Flink
                        base = get_flink_api_url().rstrip('/')
                        url = f"{base}/jars/upload"
                        
                        with open(temp_path, 'rb') as f:
                            files = {"jarfile": (filename, f, "application/x-java-archive")}
                            import requests as _r
                            resp = _r.post(url, files=files, timeout=120)
                            
                            if 200 <= resp.status_code < 300:
                                # 上传成功
                                task_manager.update_progress(task_id, i + 1, {
                                    'success': True,
                                    'filename': filename,
                                    'message': '上传成功'
                                })
                                
                                # 删除临时文件
                                try:
                                    os.remove(temp_path)
                                    current_app.logger.info(f"已删除临时文件: {temp_path}")
                                except Exception as e:
                                    current_app.logger.warning(f"删除临时文件失败: {e}")
                            else:
                                error_msg = f"上传失败: HTTP {resp.status_code}"
                                current_app.logger.warning(f"{filename}: {error_msg}")
                                task_manager.update_progress(task_id, i + 1, {
                                    'success': False,
                                    'filename': filename,
                                    'message': error_msg
                                })
                                # 上传失败也删除临时文件
                                try:
                                    os.remove(temp_path)
                                    current_app.logger.info(f"已删除失败文件的临时文件: {temp_path}")
                                except Exception as e:
                                    current_app.logger.warning(f"删除临时文件失败: {e}")
                    
                    except Exception as e:
                        error_msg = f"处理失败: {str(e)}"
                        current_app.logger.error(f"处理文件 {filename} 时出错: {str(e)}")
                        task_manager.update_progress(task_id, i + 1, {
                            'success': False,
                            'filename': filename,
                            'message': error_msg
                        })
                        # 处理失败也删除临时文件
                        try:
                            if os.path.exists(temp_path):
                                os.remove(temp_path)
                                current_app.logger.info(f"已删除异常文件的临时文件: {temp_path}")
                        except Exception as del_e:
                            current_app.logger.warning(f"删除临时文件失败: {del_e}")
                
                # 恢复原始URL
                if original_url:
                    set_flink_api_url(original_url)
                
                # 标记任务完成（如果被取消则标记为cancelled）
                if cancelled:
                    task_manager.complete_task(task_id, 'cancelled')
                else:
                    task_manager.complete_task(task_id, 'completed')
                
            except Exception as e:
                current_app.logger.error(f"批量上传任务失败: {str(e)}")
                task_manager.complete_task(task_id, 'failed', str(e))
    
    # 启动后台线程
    thread = threading.Thread(target=worker, daemon=True)
    thread.start()
    
    return {
        "success": True,
        "message": "批量上传任务已启动",
        "data": {
            "task_id": task_id
        }
    }


@bp.get("/task/<task_id>")
@log_api_call("jars.get_task_status")
def get_task_status(task_id):
    """查询任务状态"""
    task = task_manager.get_task(task_id)
    
    if not task:
        return {"success": False, "error": "任务不存在", "code": "TASK_NOT_FOUND"}, 404
    
    return {
        "success": True,
        "message": "ok",
        "data": task
    }


@bp.post("/task/<task_id>/cancel")
@log_api_call("jars.cancel_task")
def cancel_task(task_id):
    """取消任务"""
    success = task_manager.cancel_task(task_id)
    
    if not success:
        return {"success": False, "error": "无法取消任务（任务不存在或已完成）", "code": "CANCEL_FAILED"}, 400
    
    return {
        "success": True,
        "message": "任务已取消"
    }


@bp.post("/cleanup-temp-files")
@log_api_call("jars.cleanup_temp_files")
def cleanup_temp_files_api():
    """手动清理临时文件"""
    max_age_hours = request.json.get("maxAgeHours", 24) if request.json else 24
    
    temp_dir = JAR_UPLOAD_TEMP_DIR
    
    if not os.path.exists(temp_dir):
        return {
            "success": True,
            "message": "临时目录不存在",
            "data": {
                "deletedCount": 0,
                "tempDir": temp_dir
            }
        }
    
    try:
        current_time = time.time()
        max_age_seconds = max_age_hours * 3600
        deleted_files = []
        kept_files = []
        
        for filename in os.listdir(temp_dir):
            file_path = os.path.join(temp_dir, filename)
            
            try:
                file_stat = os.stat(file_path)
                file_mtime = file_stat.st_mtime
                file_size = file_stat.st_size
                file_age = current_time - file_mtime
                file_age_hours = file_age / 3600
                
                if file_age > max_age_seconds:
                    os.remove(file_path)
                    deleted_files.append({
                        "filename": filename,
                        "size": file_size,
                        "ageHours": round(file_age_hours, 2)
                    })
                    current_app.logger.info(f"已删除过期临时文件: {file_path} (年龄: {file_age_hours:.1f}小时)")
                else:
                    kept_files.append({
                        "filename": filename,
                        "size": file_size,
                        "ageHours": round(file_age_hours, 2)
                    })
            except Exception as e:
                current_app.logger.warning(f"处理文件失败: {file_path}, 错误: {e}")
        
        return {
            "success": True,
            "message": f"临时文件清理完成，共删除 {len(deleted_files)} 个文件",
            "data": {
                "deletedCount": len(deleted_files),
                "keptCount": len(kept_files),
                "deletedFiles": deleted_files,
                "keptFiles": kept_files,
                "tempDir": temp_dir,
                "maxAgeHours": max_age_hours
            }
        }
    except Exception as e:
        current_app.logger.error(f"清理临时文件失败: {e}")
        return {"success": False, "error": f"清理临时文件失败: {str(e)}", "code": "CLEANUP_FAILED"}, 500


