next-scene-qwen-image-lora-2509 / workflow-comfyui-basic-next-scene-v2.json
lovis93's picture
Add V2 demo GIFs, workflow and reorganize README
9b9da69
{
"id": "908d0bfb-e192-4627-9b57-147496e6e2dd",
"revision": 0,
"last_node_id": 82,
"last_link_id": 103,
"nodes": [
{
"id": 40,
"type": "DualCLIPLoader",
"pos": [
-1054.8211950403152,
313.2223343219331
],
"size": [
270,
130
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
64
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "DualCLIPLoader",
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors",
"directory": "text_encoders"
}
]
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "VAELoader",
"pos": [
-1054.8211950403152,
493.2223343219331
],
"size": [
270,
58
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [
58
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "VAELoader",
"models": [
{
"name": "ae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors",
"directory": "vae"
}
]
},
"widgets_values": [
"ae.safetensors"
]
},
{
"id": 52,
"type": "CFGNorm",
"pos": [
2230,
230
],
"size": [
290,
60
],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 71
}
],
"outputs": [
{
"name": "patched_model",
"type": "MODEL",
"links": [
73
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.50",
"Node name for S&R": "CFGNorm",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"ue_properties": {
"widget_ue_connectable": {
"strength": true
}
}
},
"widgets_values": [
1
]
},
{
"id": 55,
"type": "ModelSamplingAuraFlow",
"pos": [
2230,
120
],
"size": [
290,
60
],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 95
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
71
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "ModelSamplingAuraFlow",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": [
3
]
},
{
"id": 59,
"type": "EmptySD3LatentImage",
"pos": [
2240,
1110
],
"size": [
270,
106
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": []
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
]
},
{
"id": 60,
"type": "VAEEncode",
"pos": [
1980,
1150
],
"size": [
140,
46
],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 78
},
{
"name": "vae",
"type": "VAE",
"link": 79
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
76
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.50",
"Node name for S&R": "VAEEncode",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"ue_properties": {
"widget_ue_connectable": {}
}
},
"widgets_values": []
},
{
"id": 61,
"type": "TextEncodeQwenImageEditPlus",
"pos": [
1710,
430
],
"size": [
400,
200
],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 80
},
{
"name": "vae",
"shape": 7,
"type": "VAE",
"link": 81
},
{
"name": "image1",
"shape": 7,
"type": "IMAGE",
"link": 82
},
{
"name": "image2",
"shape": 7,
"type": "IMAGE",
"link": null
},
{
"name": "image3",
"shape": 7,
"type": "IMAGE",
"link": null
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
75
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "TextEncodeQwenImageEditPlus"
},
"widgets_values": [
""
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 69,
"type": "VAEDecode",
"pos": [
2570,
120
],
"size": [
210,
46
],
"flags": {
"collapsed": false
},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 91
},
{
"name": "vae",
"type": "VAE",
"link": 92
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
77
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "VAEDecode",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": []
},
{
"id": 38,
"type": "UNETLoader",
"pos": [
-1054.8211950403152,
173.22233432193306
],
"size": [
270,
82
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
61
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "UNETLoader",
"models": [
{
"name": "flux1-krea-dev_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"flux1-krea-dev_fp8_scaled.safetensors",
"default"
]
},
{
"id": 53,
"type": "VAELoader",
"pos": [
1251.3350830078125,
769.044189453125
],
"size": [
330,
60
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"slot_index": 0,
"links": [
79,
81,
87,
92
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "VAELoader",
"models": [
{
"name": "qwen_image_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
"directory": "vae"
}
],
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": [
"qwen_image_vae.safetensors"
]
},
{
"id": 54,
"type": "CLIPLoader",
"pos": [
1246.7269287109375,
558.9865112304688
],
"size": [
330,
110
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"slot_index": 0,
"links": [
80,
86
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "CLIPLoader",
"models": [
{
"name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
"directory": "text_encoders"
}
],
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": [
"qwen_2.5_vl_7b_fp8_scaled.safetensors",
"qwen_image",
"default"
]
},
{
"id": 68,
"type": "TextEncodeQwenImageEditPlus",
"pos": [
1703.297607421875,
179.4862518310547
],
"size": [
400,
200
],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 86
},
{
"name": "vae",
"shape": 7,
"type": "VAE",
"link": 87
},
{
"name": "image1",
"shape": 7,
"type": "IMAGE",
"link": 98
},
{
"name": "image2",
"shape": 7,
"type": "IMAGE",
"link": null
},
{
"name": "image3",
"shape": 7,
"type": "IMAGE",
"link": null
},
{
"name": "prompt",
"type": "STRING",
"widget": {
"name": "prompt"
},
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
74
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.59",
"Node name for S&R": "TextEncodeQwenImageEditPlus"
},
"widgets_values": [
"Next Scene: The camera pushes in from behind the keeper, showing him gripping the rail as the storm rages and lightning illuminates his weathered face. realistic cinematic style"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [
-1078.043561000177,
654.8812058190549
],
"size": [
270,
120
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
51
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1280,
720,
1
]
},
{
"id": 71,
"type": "ImageScaleToTotalPixels",
"pos": [
1408.750732421875,
1016.1314697265625
],
"size": [
270,
82
],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 97
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
78,
82,
98
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.50",
"Node name for S&R": "ImageScaleToTotalPixels",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"ue_properties": {
"widget_ue_connectable": {
"upscale_method": true,
"megapixels": true
}
}
},
"widgets_values": [
"lanczos",
1
]
},
{
"id": 43,
"type": "MarkdownNote",
"pos": [
-1671.9521416746438,
144.27145904592498
],
"size": [
520,
390
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Model links",
"properties": {},
"widgets_values": [
"## Model links\n\n**Diffusion Model**\n\n- [flux1-krea-dev_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors)\n\nIf you need the original weights, head to [black-forest-labs/FLUX.1-Krea-dev](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/), accept the agreement in the repo, then click the link below to download the models:\n\n- [flux1-krea-dev.safetensors](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/resolve/main/flux1-krea-dev.safetensors)\n\n**Text Encoder**\n\n- [clip_l.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors)\n\n- [t5xxl_fp16.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors) or [t5xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors)\n\n**VAE**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors)\n\n\n```\nComfyUI/\n├── models/\n│ ├── diffusion_models/\n│ │ └─── flux1-krea-dev_fp8_scaled.safetensors\n│ ├── text_encoders/\n│ │ ├── clip_l.safetensors\n│ │ └─── t5xxl_fp16.safetensors # or t5xxl_fp8_e4m3fn_scaled.safetensors\n│ └── vae/\n│ └── ae.safetensors\n```\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
91.50024745911058,
208.9554308869794
],
"size": [
210,
46
],
"flags": {
"collapsed": false
},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 58
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
9,
97
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 42,
"type": "ConditioningZeroOut",
"pos": [
53.35103539963648,
141.6342719840394
],
"size": [
200,
30
],
"flags": {
"collapsed": false
},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 66
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
63
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 74,
"type": "Text Load Line From File",
"pos": [
1652.8071816703605,
-52.12227061246014
],
"size": [
270,
174
],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "multiline_text",
"shape": 7,
"type": "STRING",
"link": 103
},
{
"name": "index",
"type": "INT",
"widget": {
"name": "index"
},
"link": 100
}
],
"outputs": [
{
"name": "line_text",
"type": "STRING",
"links": [
101,
102
]
},
{
"name": "dictionary",
"type": "DICT",
"links": null
}
],
"properties": {
"cnr_id": "was-node-suite-comfyui",
"ver": "ea935d1044ae5a26efa54ebeb18fe9020af49a45",
"Node name for S&R": "Text Load Line From File"
},
"widgets_values": [
"",
"[filename]",
"TextBatch",
"index",
0
]
},
{
"id": 82,
"type": "Note",
"pos": [
438.7688374380804,
-145.55126237291242
],
"size": [
399.0292082823976,
353.23574924486223
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"you can make the list of your prompt in the text multiline and the incrementer will select each line for each of you run.\nyou need to restart at 0 when you have no more lines. ( seed )"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 77,
"type": "PreviewAny",
"pos": [
-340,
1420
],
"size": [
631.8195224440549,
130.5215140178923
],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "source",
"type": "*",
"link": 101
}
],
"outputs": [],
"title": "NEXT SCENE PROMPT",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.64",
"Node name for S&R": "PreviewAny"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [
-350,
950
],
"size": [
640.8512471405897,
415.6048561340251
],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "SaveImage"
},
"widgets_values": [
"flux"
]
},
{
"id": 58,
"type": "SaveImage",
"pos": [
310,
950
],
"size": [
755.8363004499893,
591.7956866416107
],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 77
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "SaveImage",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": [
"nextscene"
]
},
{
"id": 65,
"type": "LoraLoaderModelOnly",
"pos": [
1234.2114882922415,
275.26656409464516
],
"size": [
310,
82
],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
94
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.50",
"Node name for S&R": "LoraLoaderModelOnly",
"models": [
{
"name": "Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
"url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Edit-2509/Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
"directory": "loras"
}
],
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"ue_properties": {
"widget_ue_connectable": {
"lora_name": true,
"strength_model": true
}
}
},
"widgets_values": [
"Qwen-Image-Edit-2509-Lightning-4steps-V1.0-bf16.safetensors",
1
]
},
{
"id": 57,
"type": "UNETLoader",
"pos": [
1232.2099994831387,
129.63747452349656
],
"size": [
330,
90
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
85
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "UNETLoader",
"models": [
{
"name": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
"url": "https://huggingface.co/Comfy-Org/Qwen-Image-Edit_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_edit_2509_fp8_e4m3fn.safetensors",
"directory": "diffusion_models"
}
],
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": [
"qwen_image_edit_2509_fp8_e4m3fn.safetensors",
"default"
]
},
{
"id": 73,
"type": "LoraLoaderModelOnly",
"pos": [
1224.4778739012954,
415.52477852979905
],
"size": [
270,
82
],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 94
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
95
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.64",
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"next-scene_lora-v2-3000.safetensors",
1
]
},
{
"id": 45,
"type": "CLIPTextEncode",
"pos": [
-670.7454588082238,
177.3197207116143
],
"size": [
460.6943359375,
187.2991485595703
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 64
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
65,
66
]
}
],
"title": "PROMPT",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.47",
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"A cinematic interior of a modern apartment at night. a man and a woman stand in the living room, mid-argument. the warm light from a floor lamp contrasts with the cold blue glow from the city outside the window. tension fills the space, their faces flushed with emotion, half-lit by shadows. cinematic realism, shallow depth of field."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 81,
"type": "Text Multiline",
"pos": [
856.0914498023825,
-165.02829581710353
],
"size": [
400,
200
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"links": [
103
]
}
],
"properties": {
"cnr_id": "was-node-suite-comfyui",
"ver": "ea935d1044ae5a26efa54ebeb18fe9020af49a45",
"Node name for S&R": "Text Multiline"
},
"widgets_values": [
"Next Scene: The camera moves closer to the woman, showing tears forming as she tries to speak but hesitates, her reflection faintly visible in the window behind her. realistic cinematic style\nNext Scene: The camera cuts to the man pacing back and forth, his hand in his hair, the city lights flickering behind him through the curtains. realistic cinematic style\nNext Scene: The camera moves to a medium shot between them, framing their silhouettes across the table, the soft lamp glow splitting them in two. realistic cinematic style\nNext Scene: The camera tilts down to the woman’s hand gripping her glass, trembling slightly before setting it down. realistic cinematic style\nNext Scene: The camera pans slowly as he stops and looks toward her, silence stretching between them, the hum of the city barely audible. realistic cinematic style\nNext Scene: The camera pulls back to a wide shot from the hallway, both figures motionless in the warm, fragile light. realistic cinematic style"
]
},
{
"id": 56,
"type": "KSampler",
"pos": [
2207.7734375,
510.546875
],
"size": [
300,
474
],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 73
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 74
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 75
},
{
"name": "latent_image",
"type": "LATENT",
"link": 76
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
91
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.48",
"Node name for S&R": "KSampler",
"enableTabs": false,
"tabWidth": 65,
"tabXOffset": 10,
"hasSecondTab": false,
"secondTabText": "Send Back",
"secondTabOffset": 80,
"secondTabWidth": 65,
"widget_ue_connectable": {}
},
"widgets_values": [
551042521370195,
"fixed",
8,
1,
"euler",
"simple",
1
]
},
{
"id": 31,
"type": "KSampler",
"pos": [
44.924703211736876,
371.35104434029375
],
"size": [
315,
262
],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 61
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 65
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 63
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
52
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.40",
"Node name for S&R": "KSampler"
},
"widgets_values": [
943556351579141,
"fixed",
30,
1,
"euler",
"simple",
1
]
},
{
"id": 76,
"type": "MasqueradeIncrementer",
"pos": [
869.1791853417404,
106.91182894128491
],
"size": [
270,
106
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [
100
]
}
],
"properties": {
"cnr_id": "masquerade-nodes-comfyui",
"ver": "432cb4d146a391b387a0cd25ace824328b5b61cf",
"Node name for S&R": "MasqueradeIncrementer"
},
"widgets_values": [
6,
"increment",
10
]
}
],
"links": [
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
51,
27,
0,
31,
3,
"LATENT"
],
[
52,
31,
0,
8,
0,
"LATENT"
],
[
58,
39,
0,
8,
1,
"VAE"
],
[
61,
38,
0,
31,
0,
"MODEL"
],
[
63,
42,
0,
31,
2,
"CONDITIONING"
],
[
64,
40,
0,
45,
0,
"CLIP"
],
[
65,
45,
0,
31,
1,
"CONDITIONING"
],
[
66,
45,
0,
42,
0,
"CONDITIONING"
],
[
71,
55,
0,
52,
0,
"MODEL"
],
[
73,
52,
0,
56,
0,
"MODEL"
],
[
74,
68,
0,
56,
1,
"CONDITIONING"
],
[
75,
61,
0,
56,
2,
"CONDITIONING"
],
[
76,
60,
0,
56,
3,
"LATENT"
],
[
77,
69,
0,
58,
0,
"IMAGE"
],
[
78,
71,
0,
60,
0,
"IMAGE"
],
[
79,
53,
0,
60,
1,
"VAE"
],
[
80,
54,
0,
61,
0,
"CLIP"
],
[
81,
53,
0,
61,
1,
"VAE"
],
[
82,
71,
0,
61,
2,
"IMAGE"
],
[
85,
57,
0,
65,
0,
"MODEL"
],
[
86,
54,
0,
68,
0,
"CLIP"
],
[
87,
53,
0,
68,
1,
"VAE"
],
[
91,
56,
0,
69,
0,
"LATENT"
],
[
92,
53,
0,
69,
1,
"VAE"
],
[
94,
65,
0,
73,
0,
"MODEL"
],
[
95,
73,
0,
55,
0,
"MODEL"
],
[
97,
8,
0,
71,
0,
"IMAGE"
],
[
98,
71,
0,
68,
2,
"IMAGE"
],
[
100,
76,
0,
74,
1,
"INT"
],
[
101,
74,
0,
77,
0,
"*"
],
[
102,
74,
0,
68,
5,
"STRING"
],
[
103,
81,
0,
74,
0,
"STRING"
]
],
"groups": [
{
"id": 1,
"title": "Step 1 - Load Models Here",
"bounding": [
-1064.8211950403152,
103.22233432193308,
300,
460
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Step 2 - Image Size",
"bounding": [
-1088.043561000177,
584.8812058190549,
300,
200
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Step 3 - Prompt",
"bounding": [
-738.4809656995598,
102.6588158981322,
611.5129083311813,
680.2799152758953
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Step1 - Load models",
"bounding": [
1220,
80,
370,
570
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Step 2 - Upload image for editing",
"bounding": [
1220,
680,
970,
550
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Step 4 - Prompt",
"bounding": [
1620,
80,
570,
570
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Step3 - Image Size",
"bounding": [
2220,
1030,
310,
200
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.8544135795549737,
"offset": [
848.3933584607535,
-834.9028248115114
]
},
"frontendVersion": "1.28.7",
"VHS_latentpreview": false,
"VHS_latentpreviewrate": 0,
"VHS_MetadataImage": true,
"VHS_KeepIntermediate": true
},
"version": 0.4
}