# =============================================================================
# ComfyUI Flux API 服务
#
# S1: 环境准备 - 导入依赖包和构建基础镜像
# S2: 模型下载 - 从HuggingFace和远程URL下载模型文件
# S3: 服务配置 - 创建Modal应用和存储卷
# S4: UI服务 - 提供交互式Web界面
# S5: API服务 - 提供图像生成API接口
# =============================================================================
# 启动命令 ： modal deploy comfyapp_multi_flux.py
# =============================================================================

# --- S1: 环境准备阶段 ---
import json
import os
import subprocess
import uuid
from pathlib import Path
from typing import Dict

import modal
import modal.experimental
import requests

# S1.1: 构建基础Docker镜像，安装Python环境和基础依赖
image = (
    modal.Image.debian_slim(python_version="3.11")
    .apt_install("git")
    .pip_install("fastapi[standard]==0.115.4")
    .pip_install("comfy-cli==1.5.1")
    .run_commands("python -m pip uninstall llama-cpp-python")
    .run_commands("pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cu124")
    .pip_install("requests==2.32.3")
    .run_commands("comfy --skip-prompt install --fast-deps --nvidia --version 0.3.59")
)

# S1.2: 安装ComfyUI自定义节点扩展
image = image.run_commands(
    "comfy node install --fast-deps was-node-suite-comfyui@1.0.2",
    "git clone https://github.com/judian17/ComfyUI-joycaption-beta-one-GGUF.git /root/comfy/ComfyUI/custom_nodes/ComfyUI-joycaption-beta-one-GGUF"
)

# S1.3: 配置HuggingFace Secret
hf_secret = modal.Secret.from_name("huggingface-secret")

# --- S2: 模型下载阶段 ---


def hf_download():
    """
    S2: 下载所需的AI模型文件
    S2.1: 从HuggingFace下载基础模型和LoRA模型
    S2.2: 从远程URL下载额外的模型文件
    S2.3: 创建软链接到ComfyUI模型目录
    """
    from huggingface_hub import hf_hub_download

    # S2.0: 获取HuggingFace Token
    hf_token = os.getenv("HF_TOKEN")
    print(f"🔑 S2.0: HuggingFace Token状态: {'已配置' if hf_token else '未配置'}")

    # S2.1: 下载HuggingFace基础模型
    print("📥 S2.1: 开始下载Flux基础模型...")
    flux_model = hf_hub_download(
        repo_id="Comfy-Org/flux1-dev",
        filename="flux1-dev-fp8.safetensors",
        cache_dir="/cache",
    )
    subprocess.run(
        f"ln -s {flux_model} /root/comfy/ComfyUI/models/checkpoints/flux1-dev-fp8.safetensors",
        shell=True,
        check=True,
    )

    # S2.1.2: 下载Clip模型文件
    print("📥 S2.1.2: 开始下载Clip模型文件...")
    clip_models = [
        {
            "repo_id": "stabilityai/stable-diffusion-3-medium",
            "filename": "text_encoders/clip_g.safetensors",
            "local_name": "clip_g.safetensors"
        },
        {
            "repo_id": "stabilityai/stable-diffusion-3-medium",
            "filename": "text_encoders/clip_l.safetensors",
            "local_name": "clip_l.safetensors"
        },
        {
            "repo_id": "stabilityai/stable-diffusion-3-medium",
            "filename": "text_encoders/t5xxl_fp8_e4m3fn.safetensors",
            "local_name": "t5xxl_fp8_e4m3fn.safetensors"
        }
    ]

    clip_dir = "/root/comfy/ComfyUI/models/clip"
    os.makedirs(clip_dir, exist_ok=True)

    for clip_model in clip_models:
        try:
            print(f"  📦 下载Clip模型: {clip_model['filename']}")
            clip_path = hf_hub_download(
                repo_id=clip_model["repo_id"],
                filename=clip_model["filename"],
                cache_dir="/cache",
                token=hf_token
            )
            subprocess.run(
                f"ln -s {clip_path} /root/comfy/ComfyUI/models/clip/{clip_model['local_name']}",
                shell=True,
                check=True
            )
        except Exception as e:
            print(f"❌ Clip模型下载失败 {clip_model['filename']}: {e}")

    # S2.1.3: 下载 vae 基础模型
    print("📥 S2.1.3: 开始下载vae基础模型...")
    flux_model = hf_hub_download(
        repo_id="black-forest-labs/FLUX.1-dev",
        filename="ae.safetensors",
        cache_dir="/cache",
        token=hf_token
    )
    subprocess.run(
        f"ln -s {flux_model} /root/comfy/ComfyUI/models/vae/ae.safetensors",
        shell=True,
        check=True,
    )

    # S2.2: 下载HuggingFace LoRA模型列表
    print("📥 S2.2: 开始下载LoRA模型...")
    lora_dir = "/root/comfy/ComfyUI/models/loras"
    os.makedirs(lora_dir, exist_ok=True)

    lora_models = [
        {
            "repo_id": "UmeAiRT/FLUX.1-dev-LoRA-Ume_Sky",
            "filename": "ume_sky_v2.safetensors",
            "local_name": "ume_sky_v2.safetensors"
        },
        {
            "repo_id": "Shakker-Labs/FLUX.1-dev-LoRA-Dark-Fantasy",
            "filename": "FLUX.1-dev-lora-Dark-Fantasy.safetensors",
            "local_name": "FLUX.1-dev-lora-Dark-Fantasy.safetensors"
        },
        {
            "repo_id": "aleksa-codes/flux-ghibsky-illustration",
            "filename": "lora_v2.safetensors",
            "local_name": "lora_v2.safetensors"
        }
    ]

    for lora in lora_models:
        try:
            print(f"  📦 下载LoRA: {lora['repo_id']}")
            lora_path = hf_hub_download(
                repo_id=lora["repo_id"],
                filename=lora["filename"],
                cache_dir="/cache",
            )
            subprocess.run(
                f"ln -s {lora_path} /root/comfy/ComfyUI/models/loras/{lora['local_name']}",
                shell=True,
                check=True
            )
        except Exception as e:
            print(f"❌ LoRA下载失败 {lora['repo_id']}: {e}")

    # S2.3: 下载LLAVA GGUF模型
    print("📥 S2.3: 开始下载LLAVA GGUF模型...")
    llava_gguf_dir = "/root/comfy/ComfyUI/models/llava_gguf"
    os.makedirs(llava_gguf_dir, exist_ok=True)

    llava_gguf_models = [
        {
            "repo_id": "concedo/llama-joycaption-beta-one-hf-llava-mmproj-gguf",
            "filename": "Llama-Joycaption-Beta-One-Hf-Llava-F16.gguf",
            "local_name": "Llama-Joycaption-Beta-One-Hf-Llava-F16.gguf"
        },
        {
            "repo_id": "concedo/llama-joycaption-beta-one-hf-llava-mmproj-gguf",
            "filename": "llama-joycaption-beta-one-llava-mmproj-model-f16.gguf",
            "local_name": "llama-joycaption-beta-one-llava-mmproj-model-f16.gguf"
        }
    ]

    for llava_model in llava_gguf_models:
        try:
            print(f"  📦 下载LLAVA GGUF模型: {llava_model['filename']}")
            llava_path = hf_hub_download(
                repo_id=llava_model["repo_id"],
                filename=llava_model["filename"],
                cache_dir="/cache",
            )
            subprocess.run(
                f"ln -s {llava_path} /root/comfy/ComfyUI/models/llava_gguf/{llava_model['local_name']}",
                shell=True,
                check=True
            )
        except Exception as e:
            print(f"❌ LLAVA GGUF模型下载失败 {llava_model['filename']}: {e}")

    # S2.3: 从远程URL下载额外模型
    print("📥 S2.3: 开始从远程URL下载模型...")
    url_models = [
        # https://civitai.com/models/416205?modelVersionId=951641
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/994980/xuer20E7BBAAE584BF20E4B880E99D.rPLX.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22xuerOneCyanTenColor_fluxV10.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250907/us-east-1/s3/aws4_request&X-Amz-Date=20250907T092255Z&X-Amz-SignedHeaders=host&X-Amz-Signature=623d3344c404f1479831b1f9a6908d5e215059a052d28d9123a67be437223e75",
            "filename": "一青十色.safetensors",
            "type": "checkpoints"
        },
        # https://civitai.com/models/736706/epic-gorgeous-details?modelVersionId=863655
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/17651/flux1LoraFlywayEpic.NKkZ.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22flux.1_lora_flyway_Epic-Characters_v1.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250907/us-east-1/s3/aws4_request&X-Amz-Date=20250907T083947Z&X-Amz-SignedHeaders=host&X-Amz-Signature=4ae9dcbd8c0205fb258b7839bb5895a94db6831ab9bf87c10936f4eafd6c028a",
            "filename": "中世纪风格.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/443821/cyberrealistic-pony
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/933225/newFantasyCorev4FLUX.pt13.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22New_Fantasy_CoreV4_FLUX.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Az-Credential=e01358d793ad6966166af8b3064953ad/20250907/us-east-1/s3/aws4_request&X-Amz-Date=20250907T084003Z&X-Amz-SignedHeaders=host&X-Amz-Signature=45b8f8e990b9105872964a0d6a440b131bcdfcde0e0d8d0d5de29756b24b55d9",
            "filename": "奇幻幻想风.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/632900/the-space-marines-warhammer-40k-or-flux-pony-illustrious?modelVersionId=1198851
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/4768839/fluxthous40k.YPhQ.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22FluxThouS40k.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250907/us-east-1/s3/aws4_request&X-Amz-Date=20250907T084019Z&X-Amz-SignedHeaders=host&X-Amz-Signature=2c3aa9cd675bd52d2a190ffc033b6f98bf434d990b1053be3da5885e57571aa5",
            "filename": "中世纪铠甲风.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/714022/neonfantasy-style-flux-ponyxl?modelVersionId=959406
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/3182257/bustywomenV3.c0P4.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22BustyWomen-v3.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T121512Z&X-Amz-SignedHeaders=host&X-Amz-Signature=042811cfcc72b1b0dad382f504c2963c367137b5382a7a83351cf974afc25ab9",
            "filename": "异世界风格.safetensors",
            "type": "loras"
        },  # https://civitai.com/models/416205?modelVersionId=951641
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/7156478/fluxlisimoV5LoraFLUX.lPnA.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22fluxlisimo_v5_lora_FLUX.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T123032Z&X-Amz-SignedHeaders=host&X-Amz-Signature=0154bafbf69aceac56ae628fa4581fbf5de619fb75f83fc492394e26d87b9de6",
            "filename": "提升细节.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/736706/epic-gorgeous-details?modelVersionId=863655
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/4821834/msFantasyFluxV3.ZUzM.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22MS_Fantasy_Flux_V3.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T122846Z&X-Amz-SignedHeaders=host&X-Amz-Signature=56293c78b11ced23a246413ae8162e6fcfc9e5a271445d01fd02ab1a8fe8c55f",
            "filename": "MS幻想风格.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/443821/cyberrealistic-pony
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/modelVersion/2029387/Dystopian_Mythology_Fantasy.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22Dystopian_Mythology_Fantasy.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T122735Z&X-Amz-SignedHeaders=host&X-Amz-Signature=5ffe9b52bd74e7792b34895f5e4d8f21a5f8f0294262555c14410964147193b4",
            "filename": "反乌托邦幻想.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/632900/the-space-marines-warhammer-40k-or-flux-pony-illustrious?modelVersionId=1198851
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/1490212/cheongsamF1Rank4Bf16.oUML.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22cheongsam_f1_rank4_bf16.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T121558Z&X-Amz-SignedHeaders=host&X-Amz-Signature=a6d1bda3a8b1d51317c1a90cb709646fabdd0ead8325f3966a4475dc831d9df1",
            "filename": "旗袍风.safetensors",
            "type": "loras"
        },
        # https://civitai.com/models/714022/neonfantasy-style-flux-ponyxl?modelVersionId=959406
        {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/3182257/bustywomenV3.c0P4.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22BustyWomen-v3.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T121512Z&X-Amz-SignedHeaders=host&X-Amz-Signature=042811cfcc72b1b0dad382f504c2963c367137b5382a7a83351cf974afc25ab9",
            "filename": "好身材.safetensors",
            "type": "loras"
        }, {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/1247607/mechaII.mgu7.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22Mecha_II.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T123151Z&X-Amz-SignedHeaders=host&X-Amz-Signature=f3a2c4232cef7c8c6cdda210460dbe79e22f7c155ce077ed0ec8c98d3a50ee20",
            "filename": "机械风格.safetensors",
            "type": "loras"
        }, {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/3879899/retroAnimeGITS95Style.uaNv.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22Retro_Anime_-_GITS_95_style_IL_v1.0.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T123317Z&X-Amz-SignedHeaders=host&X-Amz-Signature=2d8df89101f999b47fabbd712b1a77b94efda726a935dc955fd3aba79adb810c",
            "filename": "日漫风.safetensors",
            "type": "loras"
        }, {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/3343899/yfgChatgpt4oStyle.qbhG.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22YFG-ChatGPT-4o-Style-v2e16.safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250909/us-east-1/s3/aws4_request&X-Amz-Date=20250909T123433Z&X-Amz-SignedHeaders=host&X-Amz-Signature=28ee2b9322ac58775c304df97c24ffff30241a7d8d40e9d742af2daee6b05e91",
            "filename": "暗黑电影.safetensors",
            "type": "loras"
        }, {
            "url": "https://civitai-delivery-worker-prod.5ac0637cfd0766c97916cefa3764fbdf.r2.cloudflarestorage.com/model/532363/tensorxyGufengBDLora.Oyq1.safetensors?X-Amz-Expires=86400&response-content-disposition=attachment%3B%20filename%3D%22Tensorxy_Gufeng_BD_LoRA_v1..safetensors%22&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=e01358d793ad6966166af8b3064953ad/20250921/us-east-1/s3/aws4_request&X-Amz-Date=20250921T052055Z&X-Amz-SignedHeaders=host&X-Amz-Signature=4e42ff1a60f8e0aabd5ae41729bccc2f61d94b405dfb86b0050b462a8c30a1ab",
            "filename": "古风美女.safetensors",
            "type": "loras"
        }
    ]

    url_download_dir = "/cache"
    os.makedirs(url_download_dir, exist_ok=True)

    for model in url_models:
        final_model_path = os.path.join(
            "/root/comfy/ComfyUI/models", model["type"], model["filename"])
        cached_file_path = os.path.join(url_download_dir, model["filename"])

        # S2.3.1: 优先检查缓存目录是否已有文件
        if os.path.exists(cached_file_path):
            print(f"📦 缓存中发现模型 '{model['filename']}'，直接使用缓存")
            # 如果目标链接不存在，创建软链接
            if not os.path.exists(final_model_path):
                os.makedirs(os.path.dirname(final_model_path), exist_ok=True)
                subprocess.run(
                    f"ln -s {cached_file_path} {final_model_path}",
                    shell=True,
                    check=True
                )
                print(f"✅ 从缓存创建链接: '{model['filename']}'")
            else:
                print(f"✅ 模型 '{model['filename']}' 缓存和链接都已存在")
            continue

        # S2.3.2: 缓存中没有文件，需要下载
        print(f"⬇️ 缓存中未找到，开始下载 '{model['filename']}' ...")

        try:
            # 下载到缓存目录
            with requests.get(model["url"], stream=True, allow_redirects=True) as r:
                r.raise_for_status()
                with open(cached_file_path, 'wb') as f:
                    for chunk in r.iter_content(chunk_size=8192):
                        f.write(chunk)

            print(f"📥 下载完成，已保存到缓存: '{model['filename']}'")

            # S2.3.3: 创建软链接到ComfyUI目标目录
            os.makedirs(os.path.dirname(final_model_path), exist_ok=True)
            subprocess.run(
                f"ln -s {cached_file_path} {final_model_path}",
                shell=True,
                check=True
            )
            print(f"✅ 模型 '{model['filename']}' 下载并链接完成")

        except Exception as e:
            print(f"❌ URL下载失败 {model['url']}: {e}")
            # 如果下载失败，清理可能的不完整文件
            if os.path.exists(cached_file_path):
                os.remove(cached_file_path)
                print(f"🧹 已清理不完整的缓存文件: '{model['filename']}'")


# --- S3: 服务配置阶段 ---
print("🔧 S3: 开始配置Modal服务...")

# S3.1: 创建持久化存储卷
vol = modal.Volume.from_name("hf-hub-cache", create_if_missing=True)

# S3.2: 完成镜像构建，添加HuggingFace支持和模型文件
image = (
    image.pip_install("huggingface_hub[hf_transfer]==0.34.4")
    .env({"HF_HUB_ENABLE_HF_TRANSFER": "1"})
    .run_function(
        hf_download,
        volumes={"/cache": vol},
        secrets=[hf_secret]
    )
    .add_local_file(
        Path(__file__).parent / "workflow_api.json",
        "/root/workflow_api.json"
    )
    .add_local_file(
        local_path="C:\\Users\\zzg\Downloads\\F.1风格模型 _ Miboso光影纹理大师_V1.safetensors",
        remote_path="/root/comfy/ComfyUI/models/loras/F.1风格模型 _ Miboso光影纹理大师_V1.safetensors"
    )
)

# S3.3: 创建Modal应用实例
app = modal.App(name="example-comfyapp", image=image)

# --- S4: UI服务阶段 ---


@app.function(max_containers=1, gpu="L40S", volumes={"/cache": vol})
@modal.concurrent(max_inputs=10)
@modal.web_server(8000, startup_timeout=60)
def ui():
    """
    S4: 提供ComfyUI交互式Web界面服务
    - 启动ComfyUI Web服务器
    - 监听0.0.0.0:8000端口
    - 支持最多10个并发用户
    """
    print("🌐 S4: 启动ComfyUI交互式Web界面...")
    subprocess.Popen(
        "comfy launch -- --listen 0.0.0.0 --port 8000",
        shell=True
    )

# --- S5: API服务阶段 ---


@app.cls(scaledown_window=300, gpu="L40S", volumes={"/cache": vol})
@modal.concurrent(max_inputs=5)
class ComfyUI:
    """
    S5: ComfyUI API服务类
    提供图像生成的RESTful API接口
    支持最多5个并发请求处理
    """
    port: int = 8000

    @modal.enter()
    def launch_comfy_background(self):
        """
        S5.1: 容器启动时初始化ComfyUI后台服务
        """
        print(f"🚀 S5.1: 启动ComfyUI后台服务，端口: {self.port}")
        cmd = f"comfy launch --background -- --port {self.port}"
        subprocess.run(cmd, shell=True, check=True)

    @modal.method()
    def infer(self, workflow_path: str = "/root/workflow_api.json"):
        """
        S5.2: 执行图像生成推理
        - 检查服务健康状态
        - 运行ComfyUI工作流
        - 返回生成的图像字节数据
        """
        print("🎨 S5.2: 开始执行图像生成推理...")

        # S5.2.1: 检查服务健康状态
        self.poll_server_health()

        # S5.2.2: 执行工作流
        cmd = f"comfy run --workflow {workflow_path} --wait --timeout 1200 --verbose"
        subprocess.run(cmd, shell=True, check=True)

        # S5.2.3: 获取生成的图像文件
        output_dir = "/root/comfy/ComfyUI/output"
        workflow = json.loads(Path(workflow_path).read_text())
        file_prefix = [
            node.get("inputs")
            for node in workflow.values()
            if node.get("class_type") == "SaveImage"
        ][0]["filename_prefix"]

        # S5.2.4: 返回图像字节数据
        for f in Path(output_dir).iterdir():
            if f.name.startswith(file_prefix):
                return f.read_bytes()

    @modal.fastapi_endpoint(method="POST")
    def api(self, item: Dict):
        """
        S5.3: FastAPI端点 - 处理HTTP POST请求
        - 接收用户提示词
        - 生成唯一的工作流文件
        - 调用推理方法生成图像
        - 返回图像响应
        """
        from fastapi import Response

        print("📡 S5.3: 处理API请求...")

        # S5.3.1: 加载工作流模板
        workflow_data = json.loads(
            (Path(__file__).parent / "workflow_api.json").read_text()
        )

        # S5.3.2: 设置用户提示词
        workflow_data["6"]["inputs"]["text"] = item["prompt"]

        # S5.3.3: 生成唯一的客户端ID和文件名
        client_id = uuid.uuid4().hex
        workflow_data["9"]["inputs"]["filename_prefix"] = client_id

        # S5.3.4: 保存自定义工作流文件
        new_workflow_file = f"{client_id}.json"
        json.dump(workflow_data, Path(new_workflow_file).open("w"))

        # S5.3.5: 执行推理并返回图像
        img_bytes = self.infer.local(new_workflow_file)
        return Response(img_bytes, media_type="image/jpeg")

    def poll_server_health(self) -> Dict:
        """
        S5.4: 健康检查 - 确保ComfyUI服务正常运行
        """
        import socket
        import urllib

        try:
            req = urllib.request.Request(
                f"http://127.0.0.1:{self.port}/system_stats"
            )
            urllib.request.urlopen(req, timeout=5)
            print("✅ ComfyUI服务健康检查通过")
        except (socket.timeout, urllib.error.URLError) as e:
            print("❌ ComfyUI服务健康检查失败，停止容器")
            modal.experimental.stop_fetching_inputs()
            raise Exception(
                "ComfyUI server is not healthy, stopping container")
