StyleGen / web /templates /upscale.json
multimodalart's picture
Squashing commit
4450790 verified
raw
history blame
11.8 kB
{
"last_node_id": 16,
"last_link_id": 23,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [
1235.7215957031258,
577.1878720703122
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 21
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 10,
"type": "LatentUpscale",
"pos": [
1238,
170
],
"size": {
"0": 315,
"1": 130
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 10
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
14
]
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": [
"nearest-exact",
1152,
1152,
"disabled"
]
},
{
"id": 13,
"type": "VAEDecode",
"pos": [
1961,
125
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 22
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
17
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
374,
171
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 2,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4,
12
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
377,
381
],
"size": {
"0": 425.27801513671875,
"1": 180.6060791015625
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6,
13
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"bad hands, text, watermark\n"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [
435,
600
],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
2
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [
768,
768,
1
]
},
{
"id": 11,
"type": "KSampler",
"pos": [
1585,
114
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 14,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
15
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
469771404043268,
"randomize",
14,
8,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 12,
"type": "SaveImage",
"pos": [
2203,
123
],
"size": {
"0": 407.53717041015625,
"1": 468.13226318359375
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
845,
172
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7,
10
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
89848141647836,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [
24,
315
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
18,
23
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
19,
20
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
21,
22
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"v2-1_768-ema-pruned.safetensors"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1495.7215957031258,
576.1878720703122
],
"size": [
232.9403301043692,
282.4336258387117
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
}
],
"links": [
[
2,
5,
0,
3,
3,
"LATENT"
],
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
10,
3,
0,
10,
0,
"LATENT"
],
[
12,
6,
0,
11,
1,
"CONDITIONING"
],
[
13,
7,
0,
11,
2,
"CONDITIONING"
],
[
14,
10,
0,
11,
3,
"LATENT"
],
[
15,
11,
0,
13,
0,
"LATENT"
],
[
17,
13,
0,
12,
0,
"IMAGE"
],
[
18,
16,
0,
3,
0,
"MODEL"
],
[
19,
16,
1,
6,
0,
"CLIP"
],
[
20,
16,
1,
7,
0,
"CLIP"
],
[
21,
16,
2,
8,
1,
"VAE"
],
[
22,
16,
2,
13,
1,
"VAE"
],
[
23,
16,
0,
11,
0,
"MODEL"
]
],
"groups": [
{
"title": "Txt2Img",
"bounding": [
-1,
30,
1211,
708
],
"color": "#a1309b"
},
{
"title": "Save Intermediate Image",
"bounding": [
1225,
500,
516,
196
],
"color": "#3f789e"
},
{
"title": "Hires Fix",
"bounding": [
1224,
29,
710,
464
],
"color": "#b58b2a"
},
{
"title": "Save Final Image",
"bounding": [
1949,
31,
483,
199
],
"color": "#3f789e"
}
],
"config": {},
"extra": {},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}