renyuxi commited on
Commit
ff8f0b1
1 Parent(s): e8e7a23

upload comfyui workflows

Browse files
comfyui/Hyper-SD15-Nsteps-lora-workflow.json ADDED
@@ -0,0 +1,439 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "last_node_id": 20,
3
+ "last_link_id": 27,
4
+ "nodes": [
5
+ {
6
+ "id": 6,
7
+ "type": "CLIPTextEncode",
8
+ "pos": [
9
+ -31,
10
+ -31
11
+ ],
12
+ "size": {
13
+ "0": 422.84503173828125,
14
+ "1": 164.31304931640625
15
+ },
16
+ "flags": {},
17
+ "order": 4,
18
+ "mode": 0,
19
+ "inputs": [
20
+ {
21
+ "name": "clip",
22
+ "type": "CLIP",
23
+ "link": 23,
24
+ "slot_index": 0
25
+ }
26
+ ],
27
+ "outputs": [
28
+ {
29
+ "name": "CONDITIONING",
30
+ "type": "CONDITIONING",
31
+ "links": [
32
+ 17
33
+ ],
34
+ "slot_index": 0
35
+ }
36
+ ],
37
+ "properties": {
38
+ "Node name for S&R": "CLIPTextEncode"
39
+ },
40
+ "widgets_values": [
41
+ "a photo of a cat"
42
+ ]
43
+ },
44
+ {
45
+ "id": 19,
46
+ "type": "CLIPTextEncode",
47
+ "pos": [
48
+ 6,
49
+ 541
50
+ ],
51
+ "size": {
52
+ "0": 422.84503173828125,
53
+ "1": 164.31304931640625
54
+ },
55
+ "flags": {},
56
+ "order": 5,
57
+ "mode": 0,
58
+ "inputs": [
59
+ {
60
+ "name": "clip",
61
+ "type": "CLIP",
62
+ "link": 25,
63
+ "slot_index": 0
64
+ }
65
+ ],
66
+ "outputs": [
67
+ {
68
+ "name": "CONDITIONING",
69
+ "type": "CONDITIONING",
70
+ "links": [
71
+ 27
72
+ ],
73
+ "slot_index": 0
74
+ }
75
+ ],
76
+ "properties": {
77
+ "Node name for S&R": "CLIPTextEncode"
78
+ },
79
+ "widgets_values": [
80
+ ""
81
+ ]
82
+ },
83
+ {
84
+ "id": 20,
85
+ "type": "Note",
86
+ "pos": [
87
+ -509,
88
+ -66
89
+ ],
90
+ "size": {
91
+ "0": 322.0227355957031,
92
+ "1": 164.27613830566406
93
+ },
94
+ "flags": {},
95
+ "order": 0,
96
+ "mode": 0,
97
+ "properties": {
98
+ "text": ""
99
+ },
100
+ "widgets_values": [
101
+ "Use the LoRA corresponding to the number of inference steps to obtain the best inference effect.\n\nAttention: The scheduler type must be set to sgm_uniform!!! "
102
+ ],
103
+ "color": "#432",
104
+ "bgcolor": "#653"
105
+ },
106
+ {
107
+ "id": 8,
108
+ "type": "VAEDecode",
109
+ "pos": [
110
+ 426,
111
+ 145
112
+ ],
113
+ "size": {
114
+ "0": 210,
115
+ "1": 46
116
+ },
117
+ "flags": {},
118
+ "order": 7,
119
+ "mode": 0,
120
+ "inputs": [
121
+ {
122
+ "name": "samples",
123
+ "type": "LATENT",
124
+ "link": 7
125
+ },
126
+ {
127
+ "name": "vae",
128
+ "type": "VAE",
129
+ "link": 8
130
+ }
131
+ ],
132
+ "outputs": [
133
+ {
134
+ "name": "IMAGE",
135
+ "type": "IMAGE",
136
+ "links": [
137
+ 9
138
+ ],
139
+ "slot_index": 0
140
+ }
141
+ ],
142
+ "properties": {
143
+ "Node name for S&R": "VAEDecode"
144
+ }
145
+ },
146
+ {
147
+ "id": 9,
148
+ "type": "SaveImage",
149
+ "pos": [
150
+ 1274,
151
+ -187
152
+ ],
153
+ "size": {
154
+ "0": 391.4791564941406,
155
+ "1": 700.6646728515625
156
+ },
157
+ "flags": {},
158
+ "order": 8,
159
+ "mode": 0,
160
+ "inputs": [
161
+ {
162
+ "name": "images",
163
+ "type": "IMAGE",
164
+ "link": 9
165
+ }
166
+ ],
167
+ "properties": {},
168
+ "widgets_values": [
169
+ "ComfyUI"
170
+ ]
171
+ },
172
+ {
173
+ "id": 4,
174
+ "type": "CheckpointLoaderSimple",
175
+ "pos": [
176
+ -568,
177
+ 290
178
+ ],
179
+ "size": {
180
+ "0": 315,
181
+ "1": 98
182
+ },
183
+ "flags": {},
184
+ "order": 1,
185
+ "mode": 0,
186
+ "outputs": [
187
+ {
188
+ "name": "MODEL",
189
+ "type": "MODEL",
190
+ "links": [
191
+ 20
192
+ ],
193
+ "slot_index": 0
194
+ },
195
+ {
196
+ "name": "CLIP",
197
+ "type": "CLIP",
198
+ "links": [
199
+ 23,
200
+ 25
201
+ ],
202
+ "slot_index": 1
203
+ },
204
+ {
205
+ "name": "VAE",
206
+ "type": "VAE",
207
+ "links": [
208
+ 8
209
+ ],
210
+ "slot_index": 2
211
+ }
212
+ ],
213
+ "properties": {
214
+ "Node name for S&R": "CheckpointLoaderSimple"
215
+ },
216
+ "widgets_values": [
217
+ "v1-5-pruned-emaonly.ckpt"
218
+ ]
219
+ },
220
+ {
221
+ "id": 5,
222
+ "type": "EmptyLatentImage",
223
+ "pos": [
224
+ 463,
225
+ 707
226
+ ],
227
+ "size": {
228
+ "0": 315,
229
+ "1": 106
230
+ },
231
+ "flags": {},
232
+ "order": 2,
233
+ "mode": 0,
234
+ "outputs": [
235
+ {
236
+ "name": "LATENT",
237
+ "type": "LATENT",
238
+ "links": [
239
+ 26
240
+ ],
241
+ "slot_index": 0
242
+ }
243
+ ],
244
+ "properties": {
245
+ "Node name for S&R": "EmptyLatentImage"
246
+ },
247
+ "widgets_values": [
248
+ 512,
249
+ 512,
250
+ 1
251
+ ]
252
+ },
253
+ {
254
+ "id": 3,
255
+ "type": "KSampler",
256
+ "pos": [
257
+ 867,
258
+ 250
259
+ ],
260
+ "size": {
261
+ "0": 315,
262
+ "1": 262
263
+ },
264
+ "flags": {},
265
+ "order": 6,
266
+ "mode": 0,
267
+ "inputs": [
268
+ {
269
+ "name": "model",
270
+ "type": "MODEL",
271
+ "link": 22
272
+ },
273
+ {
274
+ "name": "positive",
275
+ "type": "CONDITIONING",
276
+ "link": 17
277
+ },
278
+ {
279
+ "name": "negative",
280
+ "type": "CONDITIONING",
281
+ "link": 27
282
+ },
283
+ {
284
+ "name": "latent_image",
285
+ "type": "LATENT",
286
+ "link": 26
287
+ }
288
+ ],
289
+ "outputs": [
290
+ {
291
+ "name": "LATENT",
292
+ "type": "LATENT",
293
+ "links": [
294
+ 7
295
+ ],
296
+ "slot_index": 0
297
+ }
298
+ ],
299
+ "properties": {
300
+ "Node name for S&R": "KSampler"
301
+ },
302
+ "widgets_values": [
303
+ 229623203641397,
304
+ "randomize",
305
+ 2,
306
+ 1,
307
+ "ddim",
308
+ "sgm_uniform",
309
+ 1
310
+ ]
311
+ },
312
+ {
313
+ "id": 15,
314
+ "type": "LoraLoaderModelOnly",
315
+ "pos": [
316
+ 55,
317
+ 380
318
+ ],
319
+ "size": {
320
+ "0": 315,
321
+ "1": 82
322
+ },
323
+ "flags": {},
324
+ "order": 3,
325
+ "mode": 0,
326
+ "inputs": [
327
+ {
328
+ "name": "model",
329
+ "type": "MODEL",
330
+ "link": 20
331
+ }
332
+ ],
333
+ "outputs": [
334
+ {
335
+ "name": "MODEL",
336
+ "type": "MODEL",
337
+ "links": [
338
+ 22
339
+ ],
340
+ "shape": 3,
341
+ "slot_index": 0
342
+ }
343
+ ],
344
+ "properties": {
345
+ "Node name for S&R": "LoraLoaderModelOnly"
346
+ },
347
+ "widgets_values": [
348
+ "Hyper-SD15-2steps-lora.safetensors",
349
+ 1
350
+ ]
351
+ }
352
+ ],
353
+ "links": [
354
+ [
355
+ 7,
356
+ 3,
357
+ 0,
358
+ 8,
359
+ 0,
360
+ "LATENT"
361
+ ],
362
+ [
363
+ 8,
364
+ 4,
365
+ 2,
366
+ 8,
367
+ 1,
368
+ "VAE"
369
+ ],
370
+ [
371
+ 9,
372
+ 8,
373
+ 0,
374
+ 9,
375
+ 0,
376
+ "IMAGE"
377
+ ],
378
+ [
379
+ 17,
380
+ 6,
381
+ 0,
382
+ 3,
383
+ 1,
384
+ "CONDITIONING"
385
+ ],
386
+ [
387
+ 20,
388
+ 4,
389
+ 0,
390
+ 15,
391
+ 0,
392
+ "MODEL"
393
+ ],
394
+ [
395
+ 22,
396
+ 15,
397
+ 0,
398
+ 3,
399
+ 0,
400
+ "MODEL"
401
+ ],
402
+ [
403
+ 23,
404
+ 4,
405
+ 1,
406
+ 6,
407
+ 0,
408
+ "CLIP"
409
+ ],
410
+ [
411
+ 25,
412
+ 4,
413
+ 1,
414
+ 19,
415
+ 0,
416
+ "CLIP"
417
+ ],
418
+ [
419
+ 26,
420
+ 5,
421
+ 0,
422
+ 3,
423
+ 3,
424
+ "LATENT"
425
+ ],
426
+ [
427
+ 27,
428
+ 19,
429
+ 0,
430
+ 3,
431
+ 2,
432
+ "CONDITIONING"
433
+ ]
434
+ ],
435
+ "groups": [],
436
+ "config": {},
437
+ "extra": {},
438
+ "version": 0.4
439
+ }
comfyui/Hyper-SDXL-Nsteps-lora-workflow.json ADDED
@@ -0,0 +1,439 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "last_node_id": 20,
3
+ "last_link_id": 27,
4
+ "nodes": [
5
+ {
6
+ "id": 9,
7
+ "type": "SaveImage",
8
+ "pos": [
9
+ 1449,
10
+ -79
11
+ ],
12
+ "size": {
13
+ "0": 391.4791564941406,
14
+ "1": 700.6646728515625
15
+ },
16
+ "flags": {},
17
+ "order": 8,
18
+ "mode": 0,
19
+ "inputs": [
20
+ {
21
+ "name": "images",
22
+ "type": "IMAGE",
23
+ "link": 9
24
+ }
25
+ ],
26
+ "properties": {},
27
+ "widgets_values": [
28
+ "ComfyUI"
29
+ ]
30
+ },
31
+ {
32
+ "id": 6,
33
+ "type": "CLIPTextEncode",
34
+ "pos": [
35
+ -31,
36
+ -31
37
+ ],
38
+ "size": {
39
+ "0": 422.84503173828125,
40
+ "1": 164.31304931640625
41
+ },
42
+ "flags": {},
43
+ "order": 4,
44
+ "mode": 0,
45
+ "inputs": [
46
+ {
47
+ "name": "clip",
48
+ "type": "CLIP",
49
+ "link": 23,
50
+ "slot_index": 0
51
+ }
52
+ ],
53
+ "outputs": [
54
+ {
55
+ "name": "CONDITIONING",
56
+ "type": "CONDITIONING",
57
+ "links": [
58
+ 17
59
+ ],
60
+ "slot_index": 0
61
+ }
62
+ ],
63
+ "properties": {
64
+ "Node name for S&R": "CLIPTextEncode"
65
+ },
66
+ "widgets_values": [
67
+ "a photo of a cat"
68
+ ]
69
+ },
70
+ {
71
+ "id": 8,
72
+ "type": "VAEDecode",
73
+ "pos": [
74
+ 463,
75
+ 96
76
+ ],
77
+ "size": {
78
+ "0": 210,
79
+ "1": 46
80
+ },
81
+ "flags": {},
82
+ "order": 7,
83
+ "mode": 0,
84
+ "inputs": [
85
+ {
86
+ "name": "samples",
87
+ "type": "LATENT",
88
+ "link": 7
89
+ },
90
+ {
91
+ "name": "vae",
92
+ "type": "VAE",
93
+ "link": 8
94
+ }
95
+ ],
96
+ "outputs": [
97
+ {
98
+ "name": "IMAGE",
99
+ "type": "IMAGE",
100
+ "links": [
101
+ 9
102
+ ],
103
+ "slot_index": 0
104
+ }
105
+ ],
106
+ "properties": {
107
+ "Node name for S&R": "VAEDecode"
108
+ }
109
+ },
110
+ {
111
+ "id": 4,
112
+ "type": "CheckpointLoaderSimple",
113
+ "pos": [
114
+ -568,
115
+ 290
116
+ ],
117
+ "size": {
118
+ "0": 315,
119
+ "1": 98
120
+ },
121
+ "flags": {},
122
+ "order": 0,
123
+ "mode": 0,
124
+ "outputs": [
125
+ {
126
+ "name": "MODEL",
127
+ "type": "MODEL",
128
+ "links": [
129
+ 20
130
+ ],
131
+ "slot_index": 0
132
+ },
133
+ {
134
+ "name": "CLIP",
135
+ "type": "CLIP",
136
+ "links": [
137
+ 23,
138
+ 25
139
+ ],
140
+ "slot_index": 1
141
+ },
142
+ {
143
+ "name": "VAE",
144
+ "type": "VAE",
145
+ "links": [
146
+ 8
147
+ ],
148
+ "slot_index": 2
149
+ }
150
+ ],
151
+ "properties": {
152
+ "Node name for S&R": "CheckpointLoaderSimple"
153
+ },
154
+ "widgets_values": [
155
+ "sd_xl_base_1.0.safetensors"
156
+ ]
157
+ },
158
+ {
159
+ "id": 15,
160
+ "type": "LoraLoaderModelOnly",
161
+ "pos": [
162
+ 55,
163
+ 380
164
+ ],
165
+ "size": {
166
+ "0": 315,
167
+ "1": 82
168
+ },
169
+ "flags": {},
170
+ "order": 3,
171
+ "mode": 0,
172
+ "inputs": [
173
+ {
174
+ "name": "model",
175
+ "type": "MODEL",
176
+ "link": 20
177
+ }
178
+ ],
179
+ "outputs": [
180
+ {
181
+ "name": "MODEL",
182
+ "type": "MODEL",
183
+ "links": [
184
+ 22
185
+ ],
186
+ "shape": 3,
187
+ "slot_index": 0
188
+ }
189
+ ],
190
+ "properties": {
191
+ "Node name for S&R": "LoraLoaderModelOnly"
192
+ },
193
+ "widgets_values": [
194
+ "Hyper-SDXL-2steps-lora.safetensors",
195
+ 1
196
+ ]
197
+ },
198
+ {
199
+ "id": 19,
200
+ "type": "CLIPTextEncode",
201
+ "pos": [
202
+ 6,
203
+ 541
204
+ ],
205
+ "size": {
206
+ "0": 422.84503173828125,
207
+ "1": 164.31304931640625
208
+ },
209
+ "flags": {},
210
+ "order": 5,
211
+ "mode": 0,
212
+ "inputs": [
213
+ {
214
+ "name": "clip",
215
+ "type": "CLIP",
216
+ "link": 25,
217
+ "slot_index": 0
218
+ }
219
+ ],
220
+ "outputs": [
221
+ {
222
+ "name": "CONDITIONING",
223
+ "type": "CONDITIONING",
224
+ "links": [
225
+ 27
226
+ ],
227
+ "slot_index": 0
228
+ }
229
+ ],
230
+ "properties": {
231
+ "Node name for S&R": "CLIPTextEncode"
232
+ },
233
+ "widgets_values": [
234
+ ""
235
+ ]
236
+ },
237
+ {
238
+ "id": 3,
239
+ "type": "KSampler",
240
+ "pos": [
241
+ 903,
242
+ 244
243
+ ],
244
+ "size": {
245
+ "0": 315,
246
+ "1": 262
247
+ },
248
+ "flags": {},
249
+ "order": 6,
250
+ "mode": 0,
251
+ "inputs": [
252
+ {
253
+ "name": "model",
254
+ "type": "MODEL",
255
+ "link": 22
256
+ },
257
+ {
258
+ "name": "positive",
259
+ "type": "CONDITIONING",
260
+ "link": 17
261
+ },
262
+ {
263
+ "name": "negative",
264
+ "type": "CONDITIONING",
265
+ "link": 27
266
+ },
267
+ {
268
+ "name": "latent_image",
269
+ "type": "LATENT",
270
+ "link": 26
271
+ }
272
+ ],
273
+ "outputs": [
274
+ {
275
+ "name": "LATENT",
276
+ "type": "LATENT",
277
+ "links": [
278
+ 7
279
+ ],
280
+ "slot_index": 0
281
+ }
282
+ ],
283
+ "properties": {
284
+ "Node name for S&R": "KSampler"
285
+ },
286
+ "widgets_values": [
287
+ 526612100585553,
288
+ "randomize",
289
+ 2,
290
+ 1,
291
+ "ddim",
292
+ "sgm_uniform",
293
+ 1
294
+ ]
295
+ },
296
+ {
297
+ "id": 5,
298
+ "type": "EmptyLatentImage",
299
+ "pos": [
300
+ 463,
301
+ 707
302
+ ],
303
+ "size": {
304
+ "0": 315,
305
+ "1": 106
306
+ },
307
+ "flags": {},
308
+ "order": 1,
309
+ "mode": 0,
310
+ "outputs": [
311
+ {
312
+ "name": "LATENT",
313
+ "type": "LATENT",
314
+ "links": [
315
+ 26
316
+ ],
317
+ "slot_index": 0
318
+ }
319
+ ],
320
+ "properties": {
321
+ "Node name for S&R": "EmptyLatentImage"
322
+ },
323
+ "widgets_values": [
324
+ 1024,
325
+ 1024,
326
+ 1
327
+ ]
328
+ },
329
+ {
330
+ "id": 20,
331
+ "type": "Note",
332
+ "pos": [
333
+ -509,
334
+ -66
335
+ ],
336
+ "size": [
337
+ 322.0227373046864,
338
+ 164.27613183593667
339
+ ],
340
+ "flags": {},
341
+ "order": 2,
342
+ "mode": 0,
343
+ "properties": {
344
+ "text": ""
345
+ },
346
+ "widgets_values": [
347
+ "Use the LoRA corresponding to the number of inference steps to obtain the best inference effect.\n\nAttention: The scheduler type must be set to sgm_uniform!!! "
348
+ ],
349
+ "color": "#432",
350
+ "bgcolor": "#653"
351
+ }
352
+ ],
353
+ "links": [
354
+ [
355
+ 7,
356
+ 3,
357
+ 0,
358
+ 8,
359
+ 0,
360
+ "LATENT"
361
+ ],
362
+ [
363
+ 8,
364
+ 4,
365
+ 2,
366
+ 8,
367
+ 1,
368
+ "VAE"
369
+ ],
370
+ [
371
+ 9,
372
+ 8,
373
+ 0,
374
+ 9,
375
+ 0,
376
+ "IMAGE"
377
+ ],
378
+ [
379
+ 17,
380
+ 6,
381
+ 0,
382
+ 3,
383
+ 1,
384
+ "CONDITIONING"
385
+ ],
386
+ [
387
+ 20,
388
+ 4,
389
+ 0,
390
+ 15,
391
+ 0,
392
+ "MODEL"
393
+ ],
394
+ [
395
+ 22,
396
+ 15,
397
+ 0,
398
+ 3,
399
+ 0,
400
+ "MODEL"
401
+ ],
402
+ [
403
+ 23,
404
+ 4,
405
+ 1,
406
+ 6,
407
+ 0,
408
+ "CLIP"
409
+ ],
410
+ [
411
+ 25,
412
+ 4,
413
+ 1,
414
+ 19,
415
+ 0,
416
+ "CLIP"
417
+ ],
418
+ [
419
+ 26,
420
+ 5,
421
+ 0,
422
+ 3,
423
+ 3,
424
+ "LATENT"
425
+ ],
426
+ [
427
+ 27,
428
+ 19,
429
+ 0,
430
+ 3,
431
+ 2,
432
+ "CONDITIONING"
433
+ ]
434
+ ],
435
+ "groups": [],
436
+ "config": {},
437
+ "extra": {},
438
+ "version": 0.4
439
+ }