renyuxi commited on
Commit
c9d3150
1 Parent(s): 7dbf2e9

support 1-step unet inference for comfyui

Browse files
comfyui/ComfyUI-HyperSDXL1StepUnetScheduler/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .node import NODE_CLASS_MAPPINGS
2
+ __all__ = ['NODE_CLASS_MAPPINGS']
comfyui/ComfyUI-HyperSDXL1StepUnetScheduler/node.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import comfy.samplers
2
+ import comfy.sample
3
+ from comfy.k_diffusion import sampling as k_diffusion_sampling
4
+ import latent_preview
5
+ import torch
6
+ import comfy.utils
7
+
8
+
9
+ class HyperSDXL1StepUnetScheduler:
10
+ @classmethod
11
+ def INPUT_TYPES(s):
12
+ return {"required":
13
+ {"model": ("MODEL",),
14
+ "steps": ("INT", {"default": 1, "min": 1, "max": 10}),
15
+ }
16
+ }
17
+ RETURN_TYPES = ("SIGMAS",)
18
+ CATEGORY = "sampling/custom_sampling/schedulers"
19
+
20
+ FUNCTION = "get_sigmas"
21
+
22
+ def get_sigmas(self, model, steps):
23
+ timesteps = torch.tensor([800])
24
+ sigmas = model.model.model_sampling.sigma(timesteps)
25
+ sigmas = torch.cat([sigmas, sigmas.new_zeros([1])])
26
+ return (sigmas, )
27
+
28
+
29
+ NODE_CLASS_MAPPINGS = {
30
+ "HyperSDXL1StepUnetScheduler": HyperSDXL1StepUnetScheduler,
31
+ }
comfyui/Hyper-SDXL-1step-Unet-workflow.json ADDED
@@ -0,0 +1,502 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "last_node_id": 24,
3
+ "last_link_id": 39,
4
+ "nodes": [
5
+ {
6
+ "id": 8,
7
+ "type": "VAEDecode",
8
+ "pos": [
9
+ 967,
10
+ 130
11
+ ],
12
+ "size": {
13
+ "0": 210,
14
+ "1": 46
15
+ },
16
+ "flags": {},
17
+ "order": 8,
18
+ "mode": 0,
19
+ "inputs": [
20
+ {
21
+ "name": "samples",
22
+ "type": "LATENT",
23
+ "link": 35
24
+ },
25
+ {
26
+ "name": "vae",
27
+ "type": "VAE",
28
+ "link": 8
29
+ }
30
+ ],
31
+ "outputs": [
32
+ {
33
+ "name": "IMAGE",
34
+ "type": "IMAGE",
35
+ "links": [
36
+ 9
37
+ ],
38
+ "slot_index": 0
39
+ }
40
+ ],
41
+ "properties": {
42
+ "Node name for S&R": "VAEDecode"
43
+ }
44
+ },
45
+ {
46
+ "id": 19,
47
+ "type": "CLIPTextEncode",
48
+ "pos": [
49
+ 6,
50
+ 541
51
+ ],
52
+ "size": {
53
+ "0": 422.84503173828125,
54
+ "1": 164.31304931640625
55
+ },
56
+ "flags": {},
57
+ "order": 6,
58
+ "mode": 0,
59
+ "inputs": [
60
+ {
61
+ "name": "clip",
62
+ "type": "CLIP",
63
+ "link": 25,
64
+ "slot_index": 0
65
+ }
66
+ ],
67
+ "outputs": [
68
+ {
69
+ "name": "CONDITIONING",
70
+ "type": "CONDITIONING",
71
+ "links": [
72
+ 31
73
+ ],
74
+ "slot_index": 0
75
+ }
76
+ ],
77
+ "properties": {
78
+ "Node name for S&R": "CLIPTextEncode"
79
+ },
80
+ "widgets_values": [
81
+ ""
82
+ ]
83
+ },
84
+ {
85
+ "id": 5,
86
+ "type": "EmptyLatentImage",
87
+ "pos": [
88
+ 463,
89
+ 707
90
+ ],
91
+ "size": {
92
+ "0": 315,
93
+ "1": 106
94
+ },
95
+ "flags": {},
96
+ "order": 0,
97
+ "mode": 0,
98
+ "outputs": [
99
+ {
100
+ "name": "LATENT",
101
+ "type": "LATENT",
102
+ "links": [
103
+ 34
104
+ ],
105
+ "slot_index": 0
106
+ }
107
+ ],
108
+ "properties": {
109
+ "Node name for S&R": "EmptyLatentImage"
110
+ },
111
+ "widgets_values": [
112
+ 1024,
113
+ 1024,
114
+ 1
115
+ ]
116
+ },
117
+ {
118
+ "id": 21,
119
+ "type": "SamplerCustom",
120
+ "pos": [
121
+ 570,
122
+ -189
123
+ ],
124
+ "size": {
125
+ "0": 355.20001220703125,
126
+ "1": 230
127
+ },
128
+ "flags": {},
129
+ "order": 7,
130
+ "mode": 0,
131
+ "inputs": [
132
+ {
133
+ "name": "model",
134
+ "type": "MODEL",
135
+ "link": 36
136
+ },
137
+ {
138
+ "name": "positive",
139
+ "type": "CONDITIONING",
140
+ "link": 30
141
+ },
142
+ {
143
+ "name": "negative",
144
+ "type": "CONDITIONING",
145
+ "link": 31
146
+ },
147
+ {
148
+ "name": "sampler",
149
+ "type": "SAMPLER",
150
+ "link": 29
151
+ },
152
+ {
153
+ "name": "sigmas",
154
+ "type": "SIGMAS",
155
+ "link": 38
156
+ },
157
+ {
158
+ "name": "latent_image",
159
+ "type": "LATENT",
160
+ "link": 34
161
+ }
162
+ ],
163
+ "outputs": [
164
+ {
165
+ "name": "output",
166
+ "type": "LATENT",
167
+ "links": null,
168
+ "shape": 3
169
+ },
170
+ {
171
+ "name": "denoised_output",
172
+ "type": "LATENT",
173
+ "links": [
174
+ 35
175
+ ],
176
+ "shape": 3,
177
+ "slot_index": 1
178
+ }
179
+ ],
180
+ "properties": {
181
+ "Node name for S&R": "SamplerCustom"
182
+ },
183
+ "widgets_values": [
184
+ true,
185
+ 448506039228027,
186
+ "randomize",
187
+ 1
188
+ ]
189
+ },
190
+ {
191
+ "id": 22,
192
+ "type": "KSamplerSelect",
193
+ "pos": [
194
+ 145,
195
+ -198
196
+ ],
197
+ "size": {
198
+ "0": 315,
199
+ "1": 58
200
+ },
201
+ "flags": {},
202
+ "order": 1,
203
+ "mode": 0,
204
+ "outputs": [
205
+ {
206
+ "name": "SAMPLER",
207
+ "type": "SAMPLER",
208
+ "links": [
209
+ 29
210
+ ],
211
+ "shape": 3,
212
+ "slot_index": 0
213
+ }
214
+ ],
215
+ "properties": {
216
+ "Node name for S&R": "KSamplerSelect"
217
+ },
218
+ "widgets_values": [
219
+ "lcm"
220
+ ]
221
+ },
222
+ {
223
+ "id": 9,
224
+ "type": "SaveImage",
225
+ "pos": [
226
+ 1274,
227
+ -187
228
+ ],
229
+ "size": {
230
+ "0": 391.4791564941406,
231
+ "1": 700.6646728515625
232
+ },
233
+ "flags": {},
234
+ "order": 9,
235
+ "mode": 0,
236
+ "inputs": [
237
+ {
238
+ "name": "images",
239
+ "type": "IMAGE",
240
+ "link": 9
241
+ }
242
+ ],
243
+ "properties": {},
244
+ "widgets_values": [
245
+ "ComfyUI"
246
+ ]
247
+ },
248
+ {
249
+ "id": 24,
250
+ "type": "HyperSDXL1StepUnetScheduler",
251
+ "pos": [
252
+ 104,
253
+ 225
254
+ ],
255
+ "size": {
256
+ "0": 315,
257
+ "1": 58
258
+ },
259
+ "flags": {},
260
+ "order": 4,
261
+ "mode": 0,
262
+ "inputs": [
263
+ {
264
+ "name": "model",
265
+ "type": "MODEL",
266
+ "link": 39,
267
+ "slot_index": 0
268
+ }
269
+ ],
270
+ "outputs": [
271
+ {
272
+ "name": "SIGMAS",
273
+ "type": "SIGMAS",
274
+ "links": [
275
+ 38
276
+ ],
277
+ "shape": 3,
278
+ "slot_index": 0
279
+ }
280
+ ],
281
+ "properties": {
282
+ "Node name for S&R": "HyperSDXL1StepUnetScheduler"
283
+ },
284
+ "widgets_values": [
285
+ 1
286
+ ]
287
+ },
288
+ {
289
+ "id": 20,
290
+ "type": "Note",
291
+ "pos": [
292
+ -471,
293
+ -169
294
+ ],
295
+ "size": {
296
+ "0": 322.0227355957031,
297
+ "1": 164.27613830566406
298
+ },
299
+ "flags": {},
300
+ "order": 2,
301
+ "mode": 0,
302
+ "properties": {
303
+ "text": ""
304
+ },
305
+ "widgets_values": [
306
+ "Use the Hyper-SDXL Unet for 1-step inference. \nIt requires the `HyperSDXL1StepUnetScheduler` to denoise from 800 timestep rather than 999.\n\n\nAttention: `HyperSDXL1StepUnetScheduler` only support our 1-step Unet inference!"
307
+ ],
308
+ "color": "#432",
309
+ "bgcolor": "#653"
310
+ },
311
+ {
312
+ "id": 6,
313
+ "type": "CLIPTextEncode",
314
+ "pos": [
315
+ -31,
316
+ -31
317
+ ],
318
+ "size": {
319
+ "0": 422.84503173828125,
320
+ "1": 164.31304931640625
321
+ },
322
+ "flags": {},
323
+ "order": 5,
324
+ "mode": 0,
325
+ "inputs": [
326
+ {
327
+ "name": "clip",
328
+ "type": "CLIP",
329
+ "link": 23,
330
+ "slot_index": 0
331
+ }
332
+ ],
333
+ "outputs": [
334
+ {
335
+ "name": "CONDITIONING",
336
+ "type": "CONDITIONING",
337
+ "links": [
338
+ 30
339
+ ],
340
+ "slot_index": 0
341
+ }
342
+ ],
343
+ "properties": {
344
+ "Node name for S&R": "CLIPTextEncode"
345
+ },
346
+ "widgets_values": [
347
+ "a photo of a dog"
348
+ ]
349
+ },
350
+ {
351
+ "id": 4,
352
+ "type": "CheckpointLoaderSimple",
353
+ "pos": [
354
+ -568,
355
+ 290
356
+ ],
357
+ "size": {
358
+ "0": 315,
359
+ "1": 98
360
+ },
361
+ "flags": {},
362
+ "order": 3,
363
+ "mode": 0,
364
+ "outputs": [
365
+ {
366
+ "name": "MODEL",
367
+ "type": "MODEL",
368
+ "links": [
369
+ 36,
370
+ 39
371
+ ],
372
+ "slot_index": 0
373
+ },
374
+ {
375
+ "name": "CLIP",
376
+ "type": "CLIP",
377
+ "links": [
378
+ 23,
379
+ 25
380
+ ],
381
+ "slot_index": 1
382
+ },
383
+ {
384
+ "name": "VAE",
385
+ "type": "VAE",
386
+ "links": [
387
+ 8
388
+ ],
389
+ "slot_index": 2
390
+ }
391
+ ],
392
+ "properties": {
393
+ "Node name for S&R": "CheckpointLoaderSimple"
394
+ },
395
+ "widgets_values": [
396
+ "Hyper-SDXL-1step-Unet-Comfyui.fp16.safetensors"
397
+ ]
398
+ }
399
+ ],
400
+ "links": [
401
+ [
402
+ 8,
403
+ 4,
404
+ 2,
405
+ 8,
406
+ 1,
407
+ "VAE"
408
+ ],
409
+ [
410
+ 9,
411
+ 8,
412
+ 0,
413
+ 9,
414
+ 0,
415
+ "IMAGE"
416
+ ],
417
+ [
418
+ 23,
419
+ 4,
420
+ 1,
421
+ 6,
422
+ 0,
423
+ "CLIP"
424
+ ],
425
+ [
426
+ 25,
427
+ 4,
428
+ 1,
429
+ 19,
430
+ 0,
431
+ "CLIP"
432
+ ],
433
+ [
434
+ 29,
435
+ 22,
436
+ 0,
437
+ 21,
438
+ 3,
439
+ "SAMPLER"
440
+ ],
441
+ [
442
+ 30,
443
+ 6,
444
+ 0,
445
+ 21,
446
+ 1,
447
+ "CONDITIONING"
448
+ ],
449
+ [
450
+ 31,
451
+ 19,
452
+ 0,
453
+ 21,
454
+ 2,
455
+ "CONDITIONING"
456
+ ],
457
+ [
458
+ 34,
459
+ 5,
460
+ 0,
461
+ 21,
462
+ 5,
463
+ "LATENT"
464
+ ],
465
+ [
466
+ 35,
467
+ 21,
468
+ 1,
469
+ 8,
470
+ 0,
471
+ "LATENT"
472
+ ],
473
+ [
474
+ 36,
475
+ 4,
476
+ 0,
477
+ 21,
478
+ 0,
479
+ "MODEL"
480
+ ],
481
+ [
482
+ 38,
483
+ 24,
484
+ 0,
485
+ 21,
486
+ 4,
487
+ "SIGMAS"
488
+ ],
489
+ [
490
+ 39,
491
+ 4,
492
+ 0,
493
+ 24,
494
+ 0,
495
+ "MODEL"
496
+ ]
497
+ ],
498
+ "groups": [],
499
+ "config": {},
500
+ "extra": {},
501
+ "version": 0.4
502
+ }