diff --git a/!action-sdxl-V0.5.civitai.info b/!action-sdxl-V0.5.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..fc362e653987c7874a67afb5b4e48fc7e45300c6 --- /dev/null +++ b/!action-sdxl-V0.5.civitai.info @@ -0,0 +1,12079 @@ +{ + "id": 133814, + "modelId": 122813, + "name": "V0.5", + "createdAt": "2023-08-04T22:34:29.238Z", + "updatedAt": "2023-08-04T22:44:18.162Z", + "status": "Published", + "publishedAt": "2023-08-04T22:44:18.158Z", + "trainedWords": [ + "zdyna_pose" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 4198, + "ratingCount": 390, + "rating": 4.99, + "thumbsUpCount": 589 + }, + "model": { + "name": "Action SDXL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 97584, + "sizeKB": 166557.58984375, + "name": "!action-sdxl-V0.5.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-04T22:40:43.094Z", + "hashes": { + "AutoV1": "D04FB835", + "AutoV2": "5F6DAB6A3F", + "SHA256": "5F6DAB6A3FCD45F729197D24B5D4303B3DEE33B881253C8047B9AA1F15388DA2", + "CRC32": "5FCDA9DC", + "BLAKE3": "52ACCC054CF691F9F860C86868EBB732BF9E2F3F4EFFC503587BD072D551EE55" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/133814" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0c86eff8-658e-4e56-ade2-4e07ef258430/width=450/1876540.jpeg", + "nsfw": "Soft", + "width": 1152, + "height": 1728, + "hash": "UBAe5Ox]Obpeu6x^%hx__4x]H=RjI8ofRNR4", + "type": "image", + "metadata": { + "hash": "UBAe5Ox]Obpeu6x^%hx__4x]H=RjI8ofRNR4", + "size": 2560156, + "width": 1152, + "height": 1728 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae_0.9.safetensors" + ], + "Model": "XL/sd_xl_base_1.0", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "XL/sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 768, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "zdyna_pose, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "sepia" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 40, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 425046296258221, + "end_at_step": 37, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "12", + 0 + ], + "steps": 40, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "normal", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "euler", + "start_at_step": 37, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "XL/sd_xl_refiner_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "cgi, 3d" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "action-sdxl-hero" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "XL/!action-sdxl-V0.5.safetensors", + "strength_clip": 0.6, + "strength_model": 0.6 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "vae_name": "sdxl_vae_0.9.safetensors" + }, + "class_type": "VAELoader" + }, + "52": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "10", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "53": { + "inputs": { + "images": [ + "52", + 0 + ] + }, + "class_type": "PreviewImage" + }, + "54": { + "inputs": { + "cfg": 9, + "seed": 1075896118322412, + "model": [ + "4", + 0 + ], + "steps": 10, + "denoise": 0.5500000000000002, + "negative": [ + "63", + 0 + ], + "positive": [ + "62", + 0 + ], + "scheduler": "normal", + "latent_image": [ + "59", + 0 + ], + "sampler_name": "euler_ancestral" + }, + "class_type": "KSampler" + }, + "55": { + "inputs": { + "vae": [ + "50", + 0 + ], + "pixels": [ + "17", + 0 + ] + }, + "class_type": "VAEEncode" + }, + "56": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "54", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "57": { + "inputs": { + "images": [ + "56", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "samples": [ + "55", + 0 + ], + "scale_by": 1.5, + "upscale_method": "nearest-exact" + }, + "class_type": "LatentUpscaleBy" + }, + "62": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a dramatic pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects, volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + }, + "class_type": "CLIPTextEncode" + }, + "63": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "cgi, 3d, photo" + }, + "class_type": "CLIPTextEncode" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 14, + 12, + 0, + 11, + 0, + "MODEL" + ], + [ + 19, + 12, + 1, + 15, + 0, + "CLIP" + ], + [ + 20, + 12, + 1, + 16, + 0, + "CLIP" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 47, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 48, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 49, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 50, + 50, + 0, + 17, + 1, + "VAE" + ], + [ + 51, + 50, + 0, + 52, + 1, + "VAE" + ], + [ + 52, + 10, + 0, + 52, + 0, + "LATENT" + ], + [ + 53, + 52, + 0, + 53, + 0, + "IMAGE" + ], + [ + 54, + 17, + 0, + 55, + 0, + "IMAGE" + ], + [ + 55, + 50, + 0, + 55, + 1, + "VAE" + ], + [ + 60, + 54, + 0, + 56, + 0, + "LATENT" + ], + [ + 61, + 50, + 0, + 56, + 1, + "VAE" + ], + [ + 62, + 56, + 0, + 57, + 0, + "IMAGE" + ], + [ + 63, + 55, + 0, + 59, + 0, + "LATENT" + ], + [ + 64, + 59, + 0, + 54, + 3, + "LATENT" + ], + [ + 65, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 76, + 49, + 1, + 62, + 0, + "CLIP" + ], + [ + 77, + 49, + 1, + 63, + 0, + "CLIP" + ], + [ + 78, + 62, + 0, + 54, + 1, + "CONDITIONING" + ], + [ + 79, + 63, + 0, + 54, + 2, + "CONDITIONING" + ], + [ + 80, + 4, + 0, + 54, + 0, + "MODEL" + ] + ], + "nodes": [ + { + "id": 36, + "pos": [ + -547.435085420599, + -83.41178857372952 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 0, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + -555.6248838808347, + 346.6448286818297 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 41, + "pos": [ + -537.4124915743454, + 1105.4474989519647 + ], + "mode": 0, + "size": { + "0": 322.8022766113281, + "1": 163.92898559570312 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 2, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG.\n\nUsing the better 0.9 VAE here." + ] + }, + { + "id": 50, + "pos": [ + -540.4124915743454, + 916.447498951965 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "color": "#332922", + "flags": {}, + "order": 3, + "bgcolor": "#593930", + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 50, + 51, + 55, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae_0.9.safetensors" + ] + }, + { + "id": 12, + "pos": [ + -565.6248838808347, + 195.64482868182944 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 14 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 19, + 20 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_refiner_1.0.safetensors" + ] + }, + { + "id": 42, + "pos": [ + 85.52377753723336, + 872.2042730290383 + ], + "mode": 0, + "size": { + "0": 259.1498107910156, + "1": 228.0334930419922 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 17, + "pos": [ + -536.4740797149254, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": { + "collapsed": true + }, + "order": 23, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 50, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 54 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 52, + "pos": [ + -365.4740797149253, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#223", + "flags": { + "collapsed": true + }, + "order": 20, + "inputs": [ + { + "link": 52, + "name": "samples", + "type": "LATENT" + }, + { + "link": 51, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 53, + "pos": [ + 352, + 96 + ], + "mode": 0, + "size": { + "0": 369.5992736816406, + "1": 326.2545166015625 + }, + "type": "PreviewImage", + "color": "#223", + "flags": {}, + "order": 22, + "inputs": [ + { + "link": 53, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#335", + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 11, + "pos": [ + 766, + 91 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#323", + "flags": {}, + "order": 21, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 14, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 65, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 40, + 8, + "euler", + "normal", + 37, + 10000, + "disable" + ] + }, + { + "id": 40, + "pos": [ + 118, + -443 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 6, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 48, + "pos": [ + 450, + 970 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#233", + "flags": {}, + "order": 7, + "bgcolor": "#355", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 56, + "pos": [ + 1630, + 1020 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#432", + "flags": {}, + "order": 28, + "inputs": [ + { + "link": 60, + "name": "samples", + "type": "LATENT" + }, + { + "link": 61, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 59, + "pos": [ + 1580, + 1160 + ], + "mode": 0, + "size": { + "0": 269.4000244140625, + "1": 86.66625213623047 + }, + "type": "LatentUpscaleBy", + "color": "#432", + "flags": {}, + "order": 26, + "inputs": [ + { + "link": 63, + "name": "samples", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LatentUpscaleBy" + }, + "widgets_values": [ + "nearest-exact", + 1.5 + ] + }, + { + "id": 57, + "pos": [ + 1892.9643219513464, + 89.30937549607815 + ], + "mode": 0, + "size": { + "0": 990.9550170898438, + "1": 1179.1431884765625 + }, + "type": "SaveImage", + "flags": {}, + "order": 29, + "inputs": [ + { + "link": 62, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 19, + "pos": [ + 1124, + 87 + ], + "mode": 0, + "size": { + "0": 727.26904296875, + "1": 744.083984375 + }, + "type": "SaveImage", + "color": "#323", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#535", + "properties": {}, + "widgets_values": [ + "action-sdxl-hero" + ] + }, + { + "id": 5, + "pos": [ + 65.52377753723313, + 722.2042730290377 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 8, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 768, + 1152, + 1 + ] + }, + { + "id": 49, + "pos": [ + -542.6685800589798, + 621.9482242943083 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "color": "#2a363b", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 46, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#3f5159", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 47, + 48, + 76, + 77 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "XL/!action-sdxl-V0.5.safetensors", + 0.6, + 0.6 + ] + }, + { + "id": 45, + "pos": [ + 444, + 724 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 40, + "fixed" + ] + }, + { + "id": 47, + "pos": [ + 450, + 840 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 10, + "title": "end_at_step", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 37, + "fixed" + ] + }, + { + "id": 6, + "pos": [ + 23, + 467 + ], + "mode": 0, + "size": { + "0": 339.08404541015625, + "1": 157.1740264892578 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 47, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting" + ] + }, + { + "id": 7, + "pos": [ + 380, + 470 + ], + "mode": 0, + "size": { + "0": 343.5692138671875, + "1": 152.20408630371094 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "sepia" + ] + }, + { + "id": 62, + "pos": [ + 770, + 926 + ], + "mode": 0, + "size": { + "0": 438.6286926269531, + "1": 153.9615936279297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 76, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 78 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a dramatic pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects, volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + ] + }, + { + "id": 63, + "pos": [ + 769, + 1119 + ], + "mode": 0, + "size": { + "0": 439.6286926269531, + "1": 148.1879119873047 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 77, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 79 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "cgi, 3d, photo" + ] + }, + { + "id": 4, + "pos": [ + -563.4350854205987, + -233.41178857372964 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45, + 80 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 46 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 16, + "pos": [ + 769, + 694 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 20, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "cgi, 3d" + ] + }, + { + "id": 15, + "pos": [ + 764, + 474 + ], + "mode": 0, + "size": { + "0": 348.63818359375, + "1": 180.3082733154297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 19, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + ] + }, + { + "id": 10, + "pos": [ + 39.585034524037376, + 92.33500626534348 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 19, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 49, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 52, + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 425046296258221, + "fixed", + 40, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 37, + "enable" + ] + }, + { + "id": 55, + "pos": [ + 1630, + 930 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEEncode", + "color": "#432", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 54, + "name": "pixels", + "type": "IMAGE" + }, + { + "link": 55, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 54, + "pos": [ + 1250, + 960 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "color": "#432", + "flags": {}, + "order": 27, + "inputs": [ + { + "link": 80, + "name": "model", + "type": "MODEL" + }, + { + "link": 78, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 79, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 64, + "name": "latent_image", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 1075896118322412, + "randomize", + 10, + 9, + "euler_ancestral", + "normal", + 0.5500000000000002 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -574, + -313, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + -585, + 106, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 45, + 648, + 339, + 464 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + -571, + 846, + 378, + 434 + ] + }, + { + "color": "#8AA", + "title": "Step Control", + "bounding": [ + 434, + 648, + 243, + 458 + ] + }, + { + "color": "#3f789e", + "title": "Load in LoRA", + "bounding": [ + -560, + 541, + 353, + 230 + ] + }, + { + "color": "#88A", + "title": "Starting Base Image", + "bounding": [ + 12, + 11, + 731, + 626 + ] + }, + { + "color": "#b58b2a", + "title": "Upscaled Image", + "bounding": [ + 1877, + 9, + 1023, + 1277 + ] + }, + { + "color": "#a1309b", + "title": "Refined Image", + "bounding": [ + 753, + 10, + 1113, + 835 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale Prompt", + "bounding": [ + 752, + 850, + 1116, + 428 + ] + } + ], + "version": 0.4, + "last_link_id": 80, + "last_node_id": 63 + } + }, + "steps": 40, + "width": 768, + "height": 1152, + "models": [ + "XL/sd_xl_base_1.0.safetensors", + "XL/sd_xl_refiner_1.0.safetensors" + ], + "prompt": "zdyna_pose, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "sepia", + "additionalResources": [ + { + "name": "XL/!action-sdxl-V0.5.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/46a5b83a-9138-437e-9511-51efccfb26ec/width=450/1876535.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1728, + "hash": "U9FFgFzT05~p?HM{PVSe]iKQ02J;-=%L-o-V", + "type": "image", + "metadata": { + "hash": "U9FFgFzT05~p?HM{PVSe]iKQ02J;-=%L-o-V", + "size": 2408976, + "width": 1152, + "height": 1728 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae_0.9.safetensors" + ], + "Model": "XL/sd_xl_base_1.0", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "XL/sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 768, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 40, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 44317258845217, + "end_at_step": 37, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "12", + 0 + ], + "steps": 40, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "normal", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "euler", + "start_at_step": 37, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "XL/sd_xl_refiner_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "zdyna_pose, digital painting, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "action-sdxl-hero" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "XL/!action-sdxl-V0.5.safetensors", + "strength_clip": 0.6, + "strength_model": 0.6 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "vae_name": "sdxl_vae_0.9.safetensors" + }, + "class_type": "VAELoader" + }, + "52": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "10", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "53": { + "inputs": { + "images": [ + "52", + 0 + ] + }, + "class_type": "PreviewImage" + }, + "54": { + "inputs": { + "cfg": 9, + "seed": 779155592775634, + "model": [ + "4", + 0 + ], + "steps": 10, + "denoise": 0.5000000000000001, + "negative": [ + "63", + 0 + ], + "positive": [ + "62", + 0 + ], + "scheduler": "normal", + "latent_image": [ + "59", + 0 + ], + "sampler_name": "euler_ancestral" + }, + "class_type": "KSampler" + }, + "55": { + "inputs": { + "vae": [ + "50", + 0 + ], + "pixels": [ + "17", + 0 + ] + }, + "class_type": "VAEEncode" + }, + "56": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "54", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "57": { + "inputs": { + "images": [ + "56", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "samples": [ + "55", + 0 + ], + "scale_by": 1.5, + "upscale_method": "nearest-exact" + }, + "class_type": "LatentUpscaleBy" + }, + "62": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "digital painting of a female superhero casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting" + }, + "class_type": "CLIPTextEncode" + }, + "63": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "flame fire" + }, + "class_type": "CLIPTextEncode" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 14, + 12, + 0, + 11, + 0, + "MODEL" + ], + [ + 19, + 12, + 1, + 15, + 0, + "CLIP" + ], + [ + 20, + 12, + 1, + 16, + 0, + "CLIP" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 47, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 48, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 49, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 50, + 50, + 0, + 17, + 1, + "VAE" + ], + [ + 51, + 50, + 0, + 52, + 1, + "VAE" + ], + [ + 52, + 10, + 0, + 52, + 0, + "LATENT" + ], + [ + 53, + 52, + 0, + 53, + 0, + "IMAGE" + ], + [ + 54, + 17, + 0, + 55, + 0, + "IMAGE" + ], + [ + 55, + 50, + 0, + 55, + 1, + "VAE" + ], + [ + 60, + 54, + 0, + 56, + 0, + "LATENT" + ], + [ + 61, + 50, + 0, + 56, + 1, + "VAE" + ], + [ + 62, + 56, + 0, + 57, + 0, + "IMAGE" + ], + [ + 63, + 55, + 0, + 59, + 0, + "LATENT" + ], + [ + 64, + 59, + 0, + 54, + 3, + "LATENT" + ], + [ + 65, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 76, + 49, + 1, + 62, + 0, + "CLIP" + ], + [ + 77, + 49, + 1, + 63, + 0, + "CLIP" + ], + [ + 78, + 62, + 0, + 54, + 1, + "CONDITIONING" + ], + [ + 79, + 63, + 0, + 54, + 2, + "CONDITIONING" + ], + [ + 80, + 4, + 0, + 54, + 0, + "MODEL" + ] + ], + "nodes": [ + { + "id": 36, + "pos": [ + -547.435085420599, + -83.41178857372952 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 0, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + -555.6248838808347, + 346.6448286818297 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 41, + "pos": [ + -537.4124915743454, + 1105.4474989519647 + ], + "mode": 0, + "size": { + "0": 322.8022766113281, + "1": 163.92898559570312 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 2, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG.\n\nUsing the better 0.9 VAE here." + ] + }, + { + "id": 50, + "pos": [ + -540.4124915743454, + 916.447498951965 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "color": "#332922", + "flags": {}, + "order": 3, + "bgcolor": "#593930", + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 50, + 51, + 55, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae_0.9.safetensors" + ] + }, + { + "id": 12, + "pos": [ + -565.6248838808347, + 195.64482868182944 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 14 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 19, + 20 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_refiner_1.0.safetensors" + ] + }, + { + "id": 42, + "pos": [ + 85.52377753723336, + 872.2042730290383 + ], + "mode": 0, + "size": { + "0": 259.1498107910156, + "1": 228.0334930419922 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 17, + "pos": [ + -536.4740797149254, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": { + "collapsed": true + }, + "order": 23, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 50, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 54 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 52, + "pos": [ + -365.4740797149253, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#223", + "flags": { + "collapsed": true + }, + "order": 20, + "inputs": [ + { + "link": 52, + "name": "samples", + "type": "LATENT" + }, + { + "link": 51, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 53, + "pos": [ + 352, + 96 + ], + "mode": 0, + "size": { + "0": 369.5992736816406, + "1": 326.2545166015625 + }, + "type": "PreviewImage", + "color": "#223", + "flags": {}, + "order": 22, + "inputs": [ + { + "link": 53, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#335", + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 15, + "pos": [ + 764, + 474 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 19, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, digital painting, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting, orange and teal color grading" + ] + }, + { + "id": 16, + "pos": [ + 769, + 657 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 20, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 11, + "pos": [ + 766, + 91 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#323", + "flags": {}, + "order": 21, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 14, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 65, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 40, + 8, + "euler", + "normal", + 37, + 10000, + "disable" + ] + }, + { + "id": 40, + "pos": [ + 118, + -443 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 6, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 48, + "pos": [ + 450, + 970 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#233", + "flags": {}, + "order": 7, + "bgcolor": "#355", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 56, + "pos": [ + 1630, + 1020 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#432", + "flags": {}, + "order": 28, + "inputs": [ + { + "link": 60, + "name": "samples", + "type": "LATENT" + }, + { + "link": 61, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 59, + "pos": [ + 1580, + 1160 + ], + "mode": 0, + "size": { + "0": 269.4000244140625, + "1": 86.66625213623047 + }, + "type": "LatentUpscaleBy", + "color": "#432", + "flags": {}, + "order": 26, + "inputs": [ + { + "link": 63, + "name": "samples", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LatentUpscaleBy" + }, + "widgets_values": [ + "nearest-exact", + 1.5 + ] + }, + { + "id": 57, + "pos": [ + 1892.9643219513464, + 89.30937549607815 + ], + "mode": 0, + "size": { + "0": 990.9550170898438, + "1": 1179.1431884765625 + }, + "type": "SaveImage", + "flags": {}, + "order": 29, + "inputs": [ + { + "link": 62, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 19, + "pos": [ + 1124, + 87 + ], + "mode": 0, + "size": { + "0": 727.26904296875, + "1": 744.083984375 + }, + "type": "SaveImage", + "color": "#323", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#535", + "properties": {}, + "widgets_values": [ + "action-sdxl-hero" + ] + }, + { + "id": 5, + "pos": [ + 65.52377753723313, + 722.2042730290377 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 8, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 768, + 1152, + 1 + ] + }, + { + "id": 49, + "pos": [ + -542.6685800589798, + 621.9482242943083 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "color": "#2a363b", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 46, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#3f5159", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 47, + 48, + 76, + 77 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "XL/!action-sdxl-V0.5.safetensors", + 0.6, + 0.6 + ] + }, + { + "id": 45, + "pos": [ + 444, + 724 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 40, + "fixed" + ] + }, + { + "id": 47, + "pos": [ + 450, + 840 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 10, + "title": "end_at_step", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 37, + "fixed" + ] + }, + { + "id": 6, + "pos": [ + 23, + 467 + ], + "mode": 0, + "size": { + "0": 339.08404541015625, + "1": 157.1740264892578 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 47, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + ] + }, + { + "id": 7, + "pos": [ + 380, + 470 + ], + "mode": 0, + "size": { + "0": 343.5692138671875, + "1": 152.20408630371094 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 62, + "pos": [ + 770, + 926 + ], + "mode": 0, + "size": { + "0": 438.6286926269531, + "1": 153.9615936279297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 76, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 78 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting of a female superhero casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting" + ] + }, + { + "id": 63, + "pos": [ + 769, + 1119 + ], + "mode": 0, + "size": { + "0": 439.6286926269531, + "1": 148.1879119873047 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 77, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 79 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "flame fire" + ] + }, + { + "id": 4, + "pos": [ + -563.4350854205987, + -233.41178857372964 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45, + 80 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 46 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 54, + "pos": [ + 1250, + 960 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "color": "#432", + "flags": {}, + "order": 27, + "inputs": [ + { + "link": 80, + "name": "model", + "type": "MODEL" + }, + { + "link": 78, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 79, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 64, + "name": "latent_image", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 779155592775634, + "randomize", + 10, + 9, + "euler_ancestral", + "normal", + 0.5000000000000001 + ] + }, + { + "id": 10, + "pos": [ + 39.585034524037376, + 92.33500626534348 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 19, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 49, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 52, + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 44317258845217, + "fixed", + 40, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 37, + "enable" + ] + }, + { + "id": 55, + "pos": [ + 1630, + 930 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEEncode", + "color": "#432", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 54, + "name": "pixels", + "type": "IMAGE" + }, + { + "link": 55, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + } + ], + "config": {}, + "groups": [ + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -574, + -313, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + -585, + 106, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 45, + 648, + 339, + 464 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + -571, + 846, + 378, + 434 + ] + }, + { + "color": "#8AA", + "title": "Step Control", + "bounding": [ + 434, + 648, + 243, + 458 + ] + }, + { + "color": "#3f789e", + "title": "Load in LoRA", + "bounding": [ + -560, + 541, + 353, + 230 + ] + }, + { + "color": "#88A", + "title": "Starting Base Image", + "bounding": [ + 12, + 11, + 731, + 626 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale", + "bounding": [ + 1877, + 9, + 1023, + 1277 + ] + }, + { + "color": "#a1309b", + "title": "Group", + "bounding": [ + 753, + 10, + 1113, + 835 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale Prompt", + "bounding": [ + 752, + 850, + 1116, + 428 + ] + } + ], + "version": 0.4, + "last_link_id": 80, + "last_node_id": 63 + } + }, + "steps": 40, + "width": 768, + "height": 1152, + "models": [ + "XL/sd_xl_base_1.0.safetensors", + "XL/sd_xl_refiner_1.0.safetensors" + ], + "prompt": "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "XL/!action-sdxl-V0.5.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e9f621c5-89da-4993-bace-e602077483e9/width=450/1876534.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1472, + "hash": "UJEo;_}=afVYAHS~s:VYD*9v9akC-:rq$%tR", + "type": "image", + "metadata": { + "hash": "UJEo;_}=afVYAHS~s:VYD*9v9akC-:rq$%tR", + "size": 2456669, + "width": 1152, + "height": 1472 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dab6c85e-a435-43e6-bda8-97f218a27011/width=450/1876536.jpeg", + "nsfw": "Soft", + "width": 1152, + "height": 2016, + "hash": "UED0N5rs_400PCxvE0e-0fE0I9^+xvWX-Us=", + "type": "image", + "metadata": { + "hash": "UED0N5rs_400PCxvE0e-0fE0I9^+xvWX-Us=", + "size": 2848717, + "width": 1152, + "height": 2016 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae_0.9.safetensors" + ], + "Model": "XL/sd_xl_base_1.0", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "XL/sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 768, + "height": 1344, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "zdyna_pose, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "sepia" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 40, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 998063890160206, + "end_at_step": 33, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "12", + 0 + ], + "steps": 40, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "normal", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "euler", + "start_at_step": 33, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "XL/sd_xl_refiner_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "cgi, 3d" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "action-sdxl-hero" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "XL/!action-sdxl-V0.5.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "vae_name": "sdxl_vae_0.9.safetensors" + }, + "class_type": "VAELoader" + }, + "52": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "10", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "53": { + "inputs": { + "images": [ + "52", + 0 + ] + }, + "class_type": "PreviewImage" + }, + "54": { + "inputs": { + "cfg": 9, + "seed": 386231378085028, + "model": [ + "4", + 0 + ], + "steps": 10, + "denoise": 0.5000000000000001, + "negative": [ + "63", + 0 + ], + "positive": [ + "62", + 0 + ], + "scheduler": "normal", + "latent_image": [ + "59", + 0 + ], + "sampler_name": "euler_ancestral" + }, + "class_type": "KSampler" + }, + "55": { + "inputs": { + "vae": [ + "50", + 0 + ], + "pixels": [ + "17", + 0 + ] + }, + "class_type": "VAEEncode" + }, + "56": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "54", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "57": { + "inputs": { + "images": [ + "56", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "samples": [ + "55", + 0 + ], + "scale_by": 1.5, + "upscale_method": "nearest-exact" + }, + "class_type": "LatentUpscaleBy" + }, + "62": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a dramatic pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects, volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + }, + "class_type": "CLIPTextEncode" + }, + "63": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "cgi, 3d, photo" + }, + "class_type": "CLIPTextEncode" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 14, + 12, + 0, + 11, + 0, + "MODEL" + ], + [ + 19, + 12, + 1, + 15, + 0, + "CLIP" + ], + [ + 20, + 12, + 1, + 16, + 0, + "CLIP" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 47, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 48, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 49, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 50, + 50, + 0, + 17, + 1, + "VAE" + ], + [ + 51, + 50, + 0, + 52, + 1, + "VAE" + ], + [ + 52, + 10, + 0, + 52, + 0, + "LATENT" + ], + [ + 53, + 52, + 0, + 53, + 0, + "IMAGE" + ], + [ + 54, + 17, + 0, + 55, + 0, + "IMAGE" + ], + [ + 55, + 50, + 0, + 55, + 1, + "VAE" + ], + [ + 60, + 54, + 0, + 56, + 0, + "LATENT" + ], + [ + 61, + 50, + 0, + 56, + 1, + "VAE" + ], + [ + 62, + 56, + 0, + 57, + 0, + "IMAGE" + ], + [ + 63, + 55, + 0, + 59, + 0, + "LATENT" + ], + [ + 64, + 59, + 0, + 54, + 3, + "LATENT" + ], + [ + 65, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 76, + 49, + 1, + 62, + 0, + "CLIP" + ], + [ + 77, + 49, + 1, + 63, + 0, + "CLIP" + ], + [ + 78, + 62, + 0, + 54, + 1, + "CONDITIONING" + ], + [ + 79, + 63, + 0, + 54, + 2, + "CONDITIONING" + ], + [ + 80, + 4, + 0, + 54, + 0, + "MODEL" + ] + ], + "nodes": [ + { + "id": 36, + "pos": [ + -547.435085420599, + -83.41178857372952 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 0, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + -555.6248838808347, + 346.6448286818297 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 41, + "pos": [ + -537.4124915743454, + 1105.4474989519647 + ], + "mode": 0, + "size": { + "0": 322.8022766113281, + "1": 163.92898559570312 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 2, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG.\n\nUsing the better 0.9 VAE here." + ] + }, + { + "id": 50, + "pos": [ + -540.4124915743454, + 916.447498951965 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "color": "#332922", + "flags": {}, + "order": 3, + "bgcolor": "#593930", + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 50, + 51, + 55, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae_0.9.safetensors" + ] + }, + { + "id": 12, + "pos": [ + -565.6248838808347, + 195.64482868182944 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 14 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 19, + 20 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_refiner_1.0.safetensors" + ] + }, + { + "id": 42, + "pos": [ + 85.52377753723336, + 872.2042730290383 + ], + "mode": 0, + "size": { + "0": 259.1498107910156, + "1": 228.0334930419922 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 17, + "pos": [ + -536.4740797149254, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": { + "collapsed": true + }, + "order": 23, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 50, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 54 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 52, + "pos": [ + -365.4740797149253, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#223", + "flags": { + "collapsed": true + }, + "order": 20, + "inputs": [ + { + "link": 52, + "name": "samples", + "type": "LATENT" + }, + { + "link": 51, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 53, + "pos": [ + 352, + 96 + ], + "mode": 0, + "size": { + "0": 369.5992736816406, + "1": 326.2545166015625 + }, + "type": "PreviewImage", + "color": "#223", + "flags": {}, + "order": 22, + "inputs": [ + { + "link": 53, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#335", + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 11, + "pos": [ + 766, + 91 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#323", + "flags": {}, + "order": 21, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 14, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 65, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 40, + 8, + "euler", + "normal", + 33, + 10000, + "disable" + ] + }, + { + "id": 40, + "pos": [ + 118, + -443 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 6, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 48, + "pos": [ + 450, + 970 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#233", + "flags": {}, + "order": 7, + "bgcolor": "#355", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 56, + "pos": [ + 1630, + 1020 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#432", + "flags": {}, + "order": 28, + "inputs": [ + { + "link": 60, + "name": "samples", + "type": "LATENT" + }, + { + "link": 61, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 59, + "pos": [ + 1580, + 1160 + ], + "mode": 0, + "size": { + "0": 269.4000244140625, + "1": 86.66625213623047 + }, + "type": "LatentUpscaleBy", + "color": "#432", + "flags": {}, + "order": 26, + "inputs": [ + { + "link": 63, + "name": "samples", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LatentUpscaleBy" + }, + "widgets_values": [ + "nearest-exact", + 1.5 + ] + }, + { + "id": 57, + "pos": [ + 1892.9643219513464, + 89.30937549607815 + ], + "mode": 0, + "size": { + "0": 990.9550170898438, + "1": 1179.1431884765625 + }, + "type": "SaveImage", + "flags": {}, + "order": 29, + "inputs": [ + { + "link": 62, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 19, + "pos": [ + 1124, + 87 + ], + "mode": 0, + "size": { + "0": 727.26904296875, + "1": 744.083984375 + }, + "type": "SaveImage", + "color": "#323", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#535", + "properties": {}, + "widgets_values": [ + "action-sdxl-hero" + ] + }, + { + "id": 45, + "pos": [ + 444, + 724 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 8, + "title": "steps", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 40, + "fixed" + ] + }, + { + "id": 6, + "pos": [ + 23, + 467 + ], + "mode": 0, + "size": { + "0": 339.08404541015625, + "1": 157.1740264892578 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 47, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting" + ] + }, + { + "id": 7, + "pos": [ + 380, + 470 + ], + "mode": 0, + "size": { + "0": 343.5692138671875, + "1": 152.20408630371094 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "sepia" + ] + }, + { + "id": 62, + "pos": [ + 770, + 926 + ], + "mode": 0, + "size": { + "0": 438.6286926269531, + "1": 153.9615936279297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 76, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 78 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a dramatic pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects, volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + ] + }, + { + "id": 63, + "pos": [ + 769, + 1119 + ], + "mode": 0, + "size": { + "0": 439.6286926269531, + "1": 148.1879119873047 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 77, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 79 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "cgi, 3d, photo" + ] + }, + { + "id": 4, + "pos": [ + -563.4350854205987, + -233.41178857372964 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 9, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45, + 80 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 46 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 54, + "pos": [ + 1250, + 960 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "color": "#432", + "flags": {}, + "order": 27, + "inputs": [ + { + "link": 80, + "name": "model", + "type": "MODEL" + }, + { + "link": 78, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 79, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 64, + "name": "latent_image", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 386231378085028, + "randomize", + 10, + 9, + "euler_ancestral", + "normal", + 0.5000000000000001 + ] + }, + { + "id": 16, + "pos": [ + 769, + 694 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 20, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "cgi, 3d" + ] + }, + { + "id": 15, + "pos": [ + 764, + 474 + ], + "mode": 0, + "size": { + "0": 348.63818359375, + "1": 180.3082733154297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 19, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, wind, dust, paper, confetti and debris flying in the air, stormy sky, raining" + ] + }, + { + "id": 49, + "pos": [ + -542.6685800589798, + 621.9482242943083 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "color": "#2a363b", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 46, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#3f5159", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 47, + 48, + 76, + 77 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "XL/!action-sdxl-V0.5.safetensors", + 1, + 1 + ] + }, + { + "id": 47, + "pos": [ + 450, + 840 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 10, + "title": "end_at_step", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 33, + "fixed" + ] + }, + { + "id": 5, + "pos": [ + 65.52377753723313, + 722.2042730290377 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 11, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 768, + 1344, + 1 + ] + }, + { + "id": 10, + "pos": [ + 39.585034524037376, + 92.33500626534348 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 19, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 49, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 52, + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 998063890160206, + "fixed", + 40, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 33, + "enable" + ] + }, + { + "id": 55, + "pos": [ + 1630, + 930 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEEncode", + "color": "#432", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 54, + "name": "pixels", + "type": "IMAGE" + }, + { + "link": 55, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + } + ], + "config": {}, + "groups": [ + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -574, + -313, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + -585, + 106, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 45, + 648, + 339, + 464 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + -571, + 846, + 378, + 434 + ] + }, + { + "color": "#8AA", + "title": "Step Control", + "bounding": [ + 434, + 648, + 243, + 458 + ] + }, + { + "color": "#3f789e", + "title": "Load in LoRA", + "bounding": [ + -560, + 541, + 353, + 230 + ] + }, + { + "color": "#88A", + "title": "Starting Base Image", + "bounding": [ + 12, + 11, + 731, + 626 + ] + }, + { + "color": "#b58b2a", + "title": "Upscaled Image", + "bounding": [ + 1877, + 9, + 1023, + 1277 + ] + }, + { + "color": "#a1309b", + "title": "Refined Image", + "bounding": [ + 753, + 10, + 1113, + 835 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale Prompt", + "bounding": [ + 752, + 850, + 1116, + 428 + ] + } + ], + "version": 0.4, + "last_link_id": 80, + "last_node_id": 63 + } + }, + "steps": 40, + "width": 768, + "height": 1344, + "models": [ + "XL/sd_xl_base_1.0.safetensors", + "XL/sd_xl_refiner_1.0.safetensors" + ], + "prompt": "zdyna_pose, dutch angle, from below, foreshortening, man in trenchcoat standing above viewer in a zdyna_pose pointing a pistol at viewer in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "sepia", + "additionalResources": [ + { + "name": "XL/!action-sdxl-V0.5.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d1fafbae-6794-4a32-a680-7c2265b426b5/width=450/1876537.jpeg", + "nsfw": "Soft", + "width": 1152, + "height": 2016, + "hash": "ULH2ci-T?as+ys9ZNHf89|Rj9Gf,?bV[V@%L", + "type": "image", + "metadata": { + "hash": "ULH2ci-T?as+ys9ZNHf89|Rj9Gf,?bV[V@%L", + "size": 3114384, + "width": 1152, + "height": 2016 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae_0.9.safetensors" + ], + "Model": "XL/sd_xl_base_1.0", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "XL/sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 768, + "height": 1344, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "zdyna_pose, dutch angle, from below, female soldier posing in a zdyna_pose running through a forest in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "gun weapon" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 40, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 203822501806021, + "end_at_step": 33, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "12", + 0 + ], + "steps": 40, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "normal", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "euler", + "start_at_step": 33, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "XL/sd_xl_refiner_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "digital painting of a female soldier running in a dense forest with motion blur, blurred foreground, wind dust paper shrapnel and debris flying in the air, bright morning sunlight shining through the tree canopy and volumetric lighting, cinematic lighting and color grading" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "cgi, 3d, photo" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "action-sdxl-hero" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "XL/!action-sdxl-V0.5.safetensors", + "strength_clip": 0.6, + "strength_model": 0.6 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "vae_name": "sdxl_vae_0.9.safetensors" + }, + "class_type": "VAELoader" + }, + "52": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "10", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "53": { + "inputs": { + "images": [ + "52", + 0 + ] + }, + "class_type": "PreviewImage" + }, + "54": { + "inputs": { + "cfg": 9, + "seed": 801044258762705, + "model": [ + "4", + 0 + ], + "steps": 10, + "denoise": 0.6000000000000002, + "negative": [ + "63", + 0 + ], + "positive": [ + "62", + 0 + ], + "scheduler": "normal", + "latent_image": [ + "59", + 0 + ], + "sampler_name": "euler_ancestral" + }, + "class_type": "KSampler" + }, + "55": { + "inputs": { + "vae": [ + "50", + 0 + ], + "pixels": [ + "17", + 0 + ] + }, + "class_type": "VAEEncode" + }, + "56": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "54", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "57": { + "inputs": { + "images": [ + "56", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "samples": [ + "55", + 0 + ], + "scale_by": 1.5, + "upscale_method": "nearest-exact" + }, + "class_type": "LatentUpscaleBy" + }, + "62": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "from below, dutch angle, foreshortening, digital painting of a female soldier running in a dense forest with motion blur, blurred foreground, wind dust paper shrapnel and debris flying in the air, bright morning sunlight shining through the tree canopy and volumetric lighting, cinematic lighting and color grading" + }, + "class_type": "CLIPTextEncode" + }, + "63": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "cgi, 3d, photo" + }, + "class_type": "CLIPTextEncode" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 14, + 12, + 0, + 11, + 0, + "MODEL" + ], + [ + 19, + 12, + 1, + 15, + 0, + "CLIP" + ], + [ + 20, + 12, + 1, + 16, + 0, + "CLIP" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 47, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 48, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 49, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 50, + 50, + 0, + 17, + 1, + "VAE" + ], + [ + 51, + 50, + 0, + 52, + 1, + "VAE" + ], + [ + 52, + 10, + 0, + 52, + 0, + "LATENT" + ], + [ + 53, + 52, + 0, + 53, + 0, + "IMAGE" + ], + [ + 54, + 17, + 0, + 55, + 0, + "IMAGE" + ], + [ + 55, + 50, + 0, + 55, + 1, + "VAE" + ], + [ + 60, + 54, + 0, + 56, + 0, + "LATENT" + ], + [ + 61, + 50, + 0, + 56, + 1, + "VAE" + ], + [ + 62, + 56, + 0, + 57, + 0, + "IMAGE" + ], + [ + 63, + 55, + 0, + 59, + 0, + "LATENT" + ], + [ + 64, + 59, + 0, + 54, + 3, + "LATENT" + ], + [ + 65, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 76, + 49, + 1, + 62, + 0, + "CLIP" + ], + [ + 77, + 49, + 1, + 63, + 0, + "CLIP" + ], + [ + 78, + 62, + 0, + 54, + 1, + "CONDITIONING" + ], + [ + 79, + 63, + 0, + 54, + 2, + "CONDITIONING" + ], + [ + 80, + 4, + 0, + 54, + 0, + "MODEL" + ] + ], + "nodes": [ + { + "id": 36, + "pos": [ + -547.435085420599, + -83.41178857372952 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 0, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + -555.6248838808347, + 346.6448286818297 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 41, + "pos": [ + -537.4124915743454, + 1105.4474989519647 + ], + "mode": 0, + "size": { + "0": 322.8022766113281, + "1": 163.92898559570312 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 2, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG.\n\nUsing the better 0.9 VAE here." + ] + }, + { + "id": 50, + "pos": [ + -540.4124915743454, + 916.447498951965 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "color": "#332922", + "flags": {}, + "order": 3, + "bgcolor": "#593930", + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 50, + 51, + 55, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae_0.9.safetensors" + ] + }, + { + "id": 12, + "pos": [ + -565.6248838808347, + 195.64482868182944 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 14 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 19, + 20 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_refiner_1.0.safetensors" + ] + }, + { + "id": 42, + "pos": [ + 85.52377753723336, + 872.2042730290383 + ], + "mode": 0, + "size": { + "0": 259.1498107910156, + "1": 228.0334930419922 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 17, + "pos": [ + -536.4740797149254, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": { + "collapsed": true + }, + "order": 23, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 50, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 54 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 52, + "pos": [ + -365.4740797149253, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#223", + "flags": { + "collapsed": true + }, + "order": 20, + "inputs": [ + { + "link": 52, + "name": "samples", + "type": "LATENT" + }, + { + "link": 51, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 53, + "pos": [ + 352, + 96 + ], + "mode": 0, + "size": { + "0": 369.5992736816406, + "1": 326.2545166015625 + }, + "type": "PreviewImage", + "color": "#223", + "flags": {}, + "order": 22, + "inputs": [ + { + "link": 53, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#335", + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 11, + "pos": [ + 766, + 91 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#323", + "flags": {}, + "order": 21, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 14, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 65, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 40, + 8, + "euler", + "normal", + 33, + 10000, + "disable" + ] + }, + { + "id": 40, + "pos": [ + 118, + -443 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 6, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 48, + "pos": [ + 450, + 970 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#233", + "flags": {}, + "order": 7, + "bgcolor": "#355", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 56, + "pos": [ + 1630, + 1020 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#432", + "flags": {}, + "order": 28, + "inputs": [ + { + "link": 60, + "name": "samples", + "type": "LATENT" + }, + { + "link": 61, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 59, + "pos": [ + 1580, + 1160 + ], + "mode": 0, + "size": { + "0": 269.4000244140625, + "1": 86.66625213623047 + }, + "type": "LatentUpscaleBy", + "color": "#432", + "flags": {}, + "order": 26, + "inputs": [ + { + "link": 63, + "name": "samples", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LatentUpscaleBy" + }, + "widgets_values": [ + "nearest-exact", + 1.5 + ] + }, + { + "id": 57, + "pos": [ + 1892.9643219513464, + 89.30937549607815 + ], + "mode": 0, + "size": { + "0": 990.9550170898438, + "1": 1179.1431884765625 + }, + "type": "SaveImage", + "flags": {}, + "order": 29, + "inputs": [ + { + "link": 62, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 49, + "pos": [ + -542.6685800589798, + 621.9482242943083 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "color": "#2a363b", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 46, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#3f5159", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 47, + 48, + 76, + 77 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "XL/!action-sdxl-V0.5.safetensors", + 0.6, + 0.6 + ] + }, + { + "id": 45, + "pos": [ + 444, + 724 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 8, + "title": "steps", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 40, + "fixed" + ] + }, + { + "id": 6, + "pos": [ + 23, + 467 + ], + "mode": 0, + "size": { + "0": 339.08404541015625, + "1": 157.1740264892578 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 47, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, dutch angle, from below, female soldier posing in a zdyna_pose running through a forest in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + ] + }, + { + "id": 7, + "pos": [ + 380, + 470 + ], + "mode": 0, + "size": { + "0": 343.5692138671875, + "1": 152.20408630371094 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "gun weapon" + ] + }, + { + "id": 63, + "pos": [ + 769, + 1119 + ], + "mode": 0, + "size": { + "0": 439.6286926269531, + "1": 148.1879119873047 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 77, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 79 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "cgi, 3d, photo" + ] + }, + { + "id": 4, + "pos": [ + -563.4350854205987, + -233.41178857372964 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 9, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45, + 80 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 46 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 16, + "pos": [ + 769, + 694 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 20, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "cgi, 3d, photo" + ] + }, + { + "id": 15, + "pos": [ + 764, + 474 + ], + "mode": 0, + "size": { + "0": 348.63818359375, + "1": 180.3082733154297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 19, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting of a female soldier running in a dense forest with motion blur, blurred foreground, wind dust paper shrapnel and debris flying in the air, bright morning sunlight shining through the tree canopy and volumetric lighting, cinematic lighting and color grading" + ] + }, + { + "id": 5, + "pos": [ + 65.52377753723313, + 722.2042730290377 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 10, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 768, + 1344, + 1 + ] + }, + { + "id": 47, + "pos": [ + 450, + 840 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 11, + "title": "end_at_step", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 33, + "fixed" + ] + }, + { + "id": 62, + "pos": [ + 770, + 926 + ], + "mode": 0, + "size": { + "0": 438.6286926269531, + "1": 153.9615936279297 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 76, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 78 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "from below, dutch angle, foreshortening, digital painting of a female soldier running in a dense forest with motion blur, blurred foreground, wind dust paper shrapnel and debris flying in the air, bright morning sunlight shining through the tree canopy and volumetric lighting, cinematic lighting and color grading" + ] + }, + { + "id": 19, + "pos": [ + 1125, + 87 + ], + "mode": 0, + "size": { + "0": 727.26904296875, + "1": 744.083984375 + }, + "type": "SaveImage", + "color": "#323", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#535", + "properties": {}, + "widgets_values": [ + "action-sdxl-hero" + ] + }, + { + "id": 10, + "pos": [ + 39.585034524037376, + 92.33500626534348 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 19, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 49, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 52, + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 203822501806021, + "fixed", + 40, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 33, + "enable" + ] + }, + { + "id": 55, + "pos": [ + 1628, + 929 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEEncode", + "color": "#432", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 54, + "name": "pixels", + "type": "IMAGE" + }, + { + "link": 55, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 54, + "pos": [ + 1250, + 960 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "color": "#432", + "flags": {}, + "order": 27, + "inputs": [ + { + "link": 80, + "name": "model", + "type": "MODEL" + }, + { + "link": 78, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 79, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 64, + "name": "latent_image", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 801044258762705, + "randomize", + 10, + 9, + "euler_ancestral", + "normal", + 0.6000000000000002 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -574, + -313, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + -585, + 106, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 45, + 648, + 339, + 464 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + -571, + 846, + 378, + 434 + ] + }, + { + "color": "#8AA", + "title": "Step Control", + "bounding": [ + 434, + 648, + 243, + 458 + ] + }, + { + "color": "#3f789e", + "title": "Load in LoRA", + "bounding": [ + -560, + 541, + 353, + 230 + ] + }, + { + "color": "#88A", + "title": "Starting Base Image", + "bounding": [ + 12, + 11, + 731, + 626 + ] + }, + { + "color": "#b58b2a", + "title": "Upscaled Image", + "bounding": [ + 1877, + 9, + 1023, + 1277 + ] + }, + { + "color": "#a1309b", + "title": "Refined Image", + "bounding": [ + 753, + 10, + 1113, + 835 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale Prompt", + "bounding": [ + 752, + 850, + 1116, + 428 + ] + } + ], + "version": 0.4, + "last_link_id": 80, + "last_node_id": 63 + } + }, + "steps": 40, + "width": 768, + "height": 1344, + "models": [ + "XL/sd_xl_base_1.0.safetensors", + "XL/sd_xl_refiner_1.0.safetensors" + ], + "prompt": "zdyna_pose, dutch angle, from below, female soldier posing in a zdyna_pose running through a forest in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "gun weapon", + "additionalResources": [ + { + "name": "XL/!action-sdxl-V0.5.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8ba57955-1a5a-46b6-ac39-2bbcc9fe97ed/width=450/1876532.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1728, + "hash": "U7GIfnqZ00%%804T.9D$=_Mc0eD4IXtSyX={", + "type": "image", + "metadata": { + "hash": "U7GIfnqZ00%%804T.9D$=_Mc0eD4IXtSyX={", + "size": 2523068, + "width": 1152, + "height": 1728 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae_0.9.safetensors" + ], + "Model": "XL/sd_xl_base_1.0", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "XL/sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 768, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 40, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 1102447933008394, + "end_at_step": 37, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "12", + 0 + ], + "steps": 40, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "normal", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "euler", + "start_at_step": 37, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "XL/sd_xl_refiner_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "zdyna_pose, digital painting, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "action-sdxl-hero" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "XL/!action-sdxl-V0.5.safetensors", + "strength_clip": 0.6, + "strength_model": 0.6 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "vae_name": "sdxl_vae_0.9.safetensors" + }, + "class_type": "VAELoader" + }, + "52": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "10", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "53": { + "inputs": { + "images": [ + "52", + 0 + ] + }, + "class_type": "PreviewImage" + }, + "54": { + "inputs": { + "cfg": 9, + "seed": 796955245168101, + "model": [ + "4", + 0 + ], + "steps": 10, + "denoise": 0.5000000000000001, + "negative": [ + "63", + 0 + ], + "positive": [ + "62", + 0 + ], + "scheduler": "normal", + "latent_image": [ + "59", + 0 + ], + "sampler_name": "euler_ancestral" + }, + "class_type": "KSampler" + }, + "55": { + "inputs": { + "vae": [ + "50", + 0 + ], + "pixels": [ + "17", + 0 + ] + }, + "class_type": "VAEEncode" + }, + "56": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "54", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "57": { + "inputs": { + "images": [ + "56", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "samples": [ + "55", + 0 + ], + "scale_by": 1.5, + "upscale_method": "nearest-exact" + }, + "class_type": "LatentUpscaleBy" + }, + "62": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "digital painting of a female superhero casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting" + }, + "class_type": "CLIPTextEncode" + }, + "63": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "flame fire" + }, + "class_type": "CLIPTextEncode" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 14, + 12, + 0, + 11, + 0, + "MODEL" + ], + [ + 19, + 12, + 1, + 15, + 0, + "CLIP" + ], + [ + 20, + 12, + 1, + 16, + 0, + "CLIP" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 47, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 48, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 49, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 50, + 50, + 0, + 17, + 1, + "VAE" + ], + [ + 51, + 50, + 0, + 52, + 1, + "VAE" + ], + [ + 52, + 10, + 0, + 52, + 0, + "LATENT" + ], + [ + 53, + 52, + 0, + 53, + 0, + "IMAGE" + ], + [ + 54, + 17, + 0, + 55, + 0, + "IMAGE" + ], + [ + 55, + 50, + 0, + 55, + 1, + "VAE" + ], + [ + 60, + 54, + 0, + 56, + 0, + "LATENT" + ], + [ + 61, + 50, + 0, + 56, + 1, + "VAE" + ], + [ + 62, + 56, + 0, + 57, + 0, + "IMAGE" + ], + [ + 63, + 55, + 0, + 59, + 0, + "LATENT" + ], + [ + 64, + 59, + 0, + 54, + 3, + "LATENT" + ], + [ + 65, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 76, + 49, + 1, + 62, + 0, + "CLIP" + ], + [ + 77, + 49, + 1, + 63, + 0, + "CLIP" + ], + [ + 78, + 62, + 0, + 54, + 1, + "CONDITIONING" + ], + [ + 79, + 63, + 0, + 54, + 2, + "CONDITIONING" + ], + [ + 80, + 4, + 0, + 54, + 0, + "MODEL" + ] + ], + "nodes": [ + { + "id": 36, + "pos": [ + -547.435085420599, + -83.41178857372952 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 0, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + -555.6248838808347, + 346.6448286818297 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 41, + "pos": [ + -537.4124915743454, + 1105.4474989519647 + ], + "mode": 0, + "size": { + "0": 322.8022766113281, + "1": 163.92898559570312 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 2, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG.\n\nUsing the better 0.9 VAE here." + ] + }, + { + "id": 50, + "pos": [ + -540.4124915743454, + 916.447498951965 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "color": "#332922", + "flags": {}, + "order": 3, + "bgcolor": "#593930", + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 50, + 51, + 55, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae_0.9.safetensors" + ] + }, + { + "id": 12, + "pos": [ + -565.6248838808347, + 195.64482868182944 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 14 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 19, + 20 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_refiner_1.0.safetensors" + ] + }, + { + "id": 42, + "pos": [ + 85.52377753723336, + 872.2042730290383 + ], + "mode": 0, + "size": { + "0": 259.1498107910156, + "1": 228.0334930419922 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 17, + "pos": [ + -536.4740797149254, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": { + "collapsed": true + }, + "order": 23, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 50, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 54 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 52, + "pos": [ + -365.4740797149253, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#223", + "flags": { + "collapsed": true + }, + "order": 20, + "inputs": [ + { + "link": 52, + "name": "samples", + "type": "LATENT" + }, + { + "link": 51, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 53, + "pos": [ + 352, + 96 + ], + "mode": 0, + "size": { + "0": 369.5992736816406, + "1": 326.2545166015625 + }, + "type": "PreviewImage", + "color": "#223", + "flags": {}, + "order": 22, + "inputs": [ + { + "link": 53, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#335", + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 15, + "pos": [ + 764, + 474 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 19, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, digital painting, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting, orange and teal color grading" + ] + }, + { + "id": 16, + "pos": [ + 769, + 657 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 20, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 11, + "pos": [ + 766, + 91 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#323", + "flags": {}, + "order": 21, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 14, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 65, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 40, + 8, + "euler", + "normal", + 37, + 10000, + "disable" + ] + }, + { + "id": 40, + "pos": [ + 118, + -443 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 6, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 48, + "pos": [ + 450, + 970 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#233", + "flags": {}, + "order": 7, + "bgcolor": "#355", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 56, + "pos": [ + 1630, + 1020 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#432", + "flags": {}, + "order": 28, + "inputs": [ + { + "link": 60, + "name": "samples", + "type": "LATENT" + }, + { + "link": 61, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 59, + "pos": [ + 1580, + 1160 + ], + "mode": 0, + "size": { + "0": 269.4000244140625, + "1": 86.66625213623047 + }, + "type": "LatentUpscaleBy", + "color": "#432", + "flags": {}, + "order": 26, + "inputs": [ + { + "link": 63, + "name": "samples", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LatentUpscaleBy" + }, + "widgets_values": [ + "nearest-exact", + 1.5 + ] + }, + { + "id": 57, + "pos": [ + 1892.9643219513464, + 89.30937549607815 + ], + "mode": 0, + "size": { + "0": 990.9550170898438, + "1": 1179.1431884765625 + }, + "type": "SaveImage", + "flags": {}, + "order": 29, + "inputs": [ + { + "link": 62, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 19, + "pos": [ + 1124, + 87 + ], + "mode": 0, + "size": { + "0": 727.26904296875, + "1": 744.083984375 + }, + "type": "SaveImage", + "color": "#323", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#535", + "properties": {}, + "widgets_values": [ + "action-sdxl-hero" + ] + }, + { + "id": 5, + "pos": [ + 65.52377753723313, + 722.2042730290377 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 8, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 768, + 1152, + 1 + ] + }, + { + "id": 49, + "pos": [ + -542.6685800589798, + 621.9482242943083 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "color": "#2a363b", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 46, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#3f5159", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 47, + 48, + 76, + 77 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "XL/!action-sdxl-V0.5.safetensors", + 0.6, + 0.6 + ] + }, + { + "id": 45, + "pos": [ + 444, + 724 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 40, + "fixed" + ] + }, + { + "id": 47, + "pos": [ + 450, + 840 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 10, + "title": "end_at_step", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 37, + "fixed" + ] + }, + { + "id": 10, + "pos": [ + 39.585034524037376, + 92.33500626534348 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 19, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 49, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 52, + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 1102447933008394, + "fixed", + 40, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 37, + "enable" + ] + }, + { + "id": 55, + "pos": [ + 1630, + 930 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEEncode", + "color": "#432", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 54, + "name": "pixels", + "type": "IMAGE" + }, + { + "link": 55, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 6, + "pos": [ + 23, + 467 + ], + "mode": 0, + "size": { + "0": 339.08404541015625, + "1": 157.1740264892578 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 47, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + ] + }, + { + "id": 7, + "pos": [ + 380, + 470 + ], + "mode": 0, + "size": { + "0": 343.5692138671875, + "1": 152.20408630371094 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 62, + "pos": [ + 770, + 926 + ], + "mode": 0, + "size": [ + 438.62869960379885, + 153.96159338106554 + ], + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 76, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 78 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting of a female superhero casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting" + ] + }, + { + "id": 63, + "pos": [ + 769, + 1119 + ], + "mode": 0, + "size": [ + 439.62869960379885, + 148.18791697079632 + ], + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 77, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 79 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "flame fire" + ] + }, + { + "id": 4, + "pos": [ + -563.4350854205987, + -233.41178857372964 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45, + 80 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 46 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 54, + "pos": [ + 1250, + 960 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "color": "#432", + "flags": {}, + "order": 27, + "inputs": [ + { + "link": 80, + "name": "model", + "type": "MODEL" + }, + { + "link": 78, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 79, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 64, + "name": "latent_image", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 796955245168101, + "randomize", + 10, + 9, + "euler_ancestral", + "normal", + 0.5000000000000001 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -574, + -313, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + -585, + 106, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 45, + 648, + 339, + 464 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + -571, + 846, + 378, + 434 + ] + }, + { + "color": "#8AA", + "title": "Step Control", + "bounding": [ + 434, + 648, + 243, + 458 + ] + }, + { + "color": "#3f789e", + "title": "Load in LoRA", + "bounding": [ + -560, + 541, + 353, + 230 + ] + }, + { + "color": "#88A", + "title": "Starting Base Image", + "bounding": [ + 12, + 11, + 731, + 626 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale", + "bounding": [ + 1877, + 9, + 1023, + 1277 + ] + }, + { + "color": "#a1309b", + "title": "Group", + "bounding": [ + 753, + 10, + 1113, + 835 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale Prompt", + "bounding": [ + 752, + 850, + 1116, + 428 + ] + } + ], + "version": 0.4, + "last_link_id": 80, + "last_node_id": 63 + } + }, + "steps": 40, + "width": 768, + "height": 1152, + "models": [ + "XL/sd_xl_base_1.0.safetensors", + "XL/sd_xl_refiner_1.0.safetensors" + ], + "prompt": "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "XL/!action-sdxl-V0.5.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/495555c7-0bea-4cca-83e8-62cd2de371d0/width=450/1876533.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1472, + "hash": "UIE:PK^h4oEM~q%LIAIUS6S5Dii_pIR*V@az", + "type": "image", + "metadata": { + "hash": "UIE:PK^h4oEM~q%LIAIUS6S5Dii_pIR*V@az", + "size": 2022512, + "width": 1152, + "height": 1472 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3f841dea-1767-4dfb-99b6-095d6ec3b49d/width=450/1876531.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1728, + "hash": "UKHL9+^1O@_3~p?atlIUI=g4E1M{X99ZE1R+", + "type": "image", + "metadata": { + "hash": "UKHL9+^1O@_3~p?atlIUI=g4E1M{X99ZE1R+", + "size": 2202609, + "width": 1152, + "height": 1728 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae_0.9.safetensors" + ], + "Model": "XL/sd_xl_base_1.0", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "XL/sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 768, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 40, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 1123645071018022, + "end_at_step": 37, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "12", + 0 + ], + "steps": 40, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "normal", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "euler", + "start_at_step": 37, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "XL/sd_xl_refiner_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "zdyna_pose, digital painting, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting, orange and teal color grading" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "action-sdxl-hero" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "XL/!action-sdxl-V0.5.safetensors", + "strength_clip": 0.6, + "strength_model": 0.6 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "vae_name": "sdxl_vae_0.9.safetensors" + }, + "class_type": "VAELoader" + }, + "52": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "10", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "53": { + "inputs": { + "images": [ + "52", + 0 + ] + }, + "class_type": "PreviewImage" + }, + "54": { + "inputs": { + "cfg": 6, + "seed": 85133334682496, + "model": [ + "12", + 0 + ], + "steps": 6, + "denoise": 0.35, + "negative": [ + "61", + 0 + ], + "positive": [ + "60", + 0 + ], + "scheduler": "normal", + "latent_image": [ + "59", + 0 + ], + "sampler_name": "euler_ancestral" + }, + "class_type": "KSampler" + }, + "55": { + "inputs": { + "vae": [ + "50", + 0 + ], + "pixels": [ + "17", + 0 + ] + }, + "class_type": "VAEEncode" + }, + "56": { + "inputs": { + "vae": [ + "50", + 0 + ], + "samples": [ + "54", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "57": { + "inputs": { + "images": [ + "56", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "samples": [ + "55", + 0 + ], + "scale_by": 1.5, + "upscale_method": "nearest-exact" + }, + "class_type": "LatentUpscaleBy" + }, + "60": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "digital painting, dutch angle, from below, female superhero in fighting stance casting lightning from her outstretched arms in a blue suit with orange capewith a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting" + }, + "class_type": "CLIPTextEncode" + }, + "61": { + "inputs": { + "clip": [ + "12", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 14, + 12, + 0, + 11, + 0, + "MODEL" + ], + [ + 19, + 12, + 1, + 15, + 0, + "CLIP" + ], + [ + 20, + 12, + 1, + 16, + 0, + "CLIP" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 47, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 48, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 49, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 50, + 50, + 0, + 17, + 1, + "VAE" + ], + [ + 51, + 50, + 0, + 52, + 1, + "VAE" + ], + [ + 52, + 10, + 0, + 52, + 0, + "LATENT" + ], + [ + 53, + 52, + 0, + 53, + 0, + "IMAGE" + ], + [ + 54, + 17, + 0, + 55, + 0, + "IMAGE" + ], + [ + 55, + 50, + 0, + 55, + 1, + "VAE" + ], + [ + 60, + 54, + 0, + 56, + 0, + "LATENT" + ], + [ + 61, + 50, + 0, + 56, + 1, + "VAE" + ], + [ + 62, + 56, + 0, + 57, + 0, + "IMAGE" + ], + [ + 63, + 55, + 0, + 59, + 0, + "LATENT" + ], + [ + 64, + 59, + 0, + 54, + 3, + "LATENT" + ], + [ + 65, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 71, + 12, + 0, + 54, + 0, + "MODEL" + ], + [ + 72, + 12, + 1, + 60, + 0, + "CLIP" + ], + [ + 73, + 12, + 1, + 61, + 0, + "CLIP" + ], + [ + 74, + 60, + 0, + 54, + 1, + "CONDITIONING" + ], + [ + 75, + 61, + 0, + 54, + 2, + "CONDITIONING" + ] + ], + "nodes": [ + { + "id": 36, + "pos": [ + -547.435085420599, + -83.41178857372952 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 0, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + -555.6248838808347, + 346.6448286818297 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 41, + "pos": [ + -537.4124915743454, + 1105.4474989519647 + ], + "mode": 0, + "size": { + "0": 322.8022766113281, + "1": 163.92898559570312 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 2, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG.\n\nUsing the better 0.9 VAE here." + ] + }, + { + "id": 50, + "pos": [ + -540.4124915743454, + 916.447498951965 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "color": "#332922", + "flags": {}, + "order": 3, + "bgcolor": "#593930", + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 50, + 51, + 55, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae_0.9.safetensors" + ] + }, + { + "id": 4, + "pos": [ + -563.4350854205987, + -233.41178857372964 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 46 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 12, + "pos": [ + -565.6248838808347, + 195.64482868182944 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 14, + 71 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 19, + 20, + 72, + 73 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "XL/sd_xl_refiner_1.0.safetensors" + ] + }, + { + "id": 42, + "pos": [ + 85.52377753723336, + 872.2042730290383 + ], + "mode": 0, + "size": { + "0": 259.1498107910156, + "1": 228.0334930419922 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 17, + "pos": [ + -536.4740797149254, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": { + "collapsed": true + }, + "order": 23, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 50, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 54 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 52, + "pos": [ + -365.4740797149253, + 1026.6492009476544 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#223", + "flags": { + "collapsed": true + }, + "order": 20, + "inputs": [ + { + "link": 52, + "name": "samples", + "type": "LATENT" + }, + { + "link": 51, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 53, + "pos": [ + 352, + 96 + ], + "mode": 0, + "size": { + "0": 369.5992736816406, + "1": 326.2545166015625 + }, + "type": "PreviewImage", + "color": "#223", + "flags": {}, + "order": 22, + "inputs": [ + { + "link": 53, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#335", + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 7, + "pos": [ + 380, + 470 + ], + "mode": 0, + "size": { + "0": 343.5692138671875, + "1": 152.20408630371094 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 6, + "pos": [ + 23, + 467 + ], + "mode": 0, + "size": { + "0": 339.08404541015625, + "1": 157.1740264892578 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 47, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading" + ] + }, + { + "id": 15, + "pos": [ + 764, + 474 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 19, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "zdyna_pose, digital painting, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting, orange and teal color grading" + ] + }, + { + "id": 16, + "pos": [ + 769, + 657 + ], + "mode": 0, + "size": { + "0": 340, + "1": 140 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 20, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 11, + "pos": [ + 766, + 91 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#323", + "flags": {}, + "order": 21, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 14, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 65, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 40, + 8, + "euler", + "normal", + 37, + 10000, + "disable" + ] + }, + { + "id": 40, + "pos": [ + 118, + -443 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 7, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 48, + "pos": [ + 450, + 970 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#233", + "flags": {}, + "order": 8, + "bgcolor": "#355", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 60, + "pos": [ + 762, + 926 + ], + "mode": 0, + "size": { + "0": 467.8367919921875, + "1": 150.28089904785156 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 72, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 74 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "digital painting, dutch angle, from below, female superhero in fighting stance casting lightning from her outstretched arms in a blue suit with orange capewith a tall city in the background in a low camera angle actions scene, foreground objects, wind, dust, paper, confetti and debris flying in the air volumetric lighting, bright background lighting" + ] + }, + { + "id": 61, + "pos": [ + 765, + 1114 + ], + "mode": 0, + "size": { + "0": 462.8668518066406, + "1": 146.4709014892578 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 73, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 75 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 56, + "pos": [ + 1630, + 1020 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "color": "#432", + "flags": {}, + "order": 28, + "inputs": [ + { + "link": 60, + "name": "samples", + "type": "LATENT" + }, + { + "link": 61, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 62 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 59, + "pos": [ + 1580, + 1160 + ], + "mode": 0, + "size": { + "0": 269.4000244140625, + "1": 86.66625213623047 + }, + "type": "LatentUpscaleBy", + "color": "#432", + "flags": {}, + "order": 26, + "inputs": [ + { + "link": 63, + "name": "samples", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 64 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "LatentUpscaleBy" + }, + "widgets_values": [ + "nearest-exact", + 1.5 + ] + }, + { + "id": 57, + "pos": [ + 1892.9643219513464, + 89.30937549607815 + ], + "mode": 0, + "size": { + "0": 990.9550170898438, + "1": 1179.1431884765625 + }, + "type": "SaveImage", + "flags": {}, + "order": 29, + "inputs": [ + { + "link": 62, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 19, + "pos": [ + 1124, + 87 + ], + "mode": 0, + "size": { + "0": 727.26904296875, + "1": 744.083984375 + }, + "type": "SaveImage", + "color": "#323", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#535", + "properties": {}, + "widgets_values": [ + "action-sdxl-hero" + ] + }, + { + "id": 5, + "pos": [ + 65.52377753723313, + 722.2042730290377 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 9, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 768, + 1152, + 1 + ] + }, + { + "id": 49, + "pos": [ + -542.6685800589798, + 621.9482242943083 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "color": "#2a363b", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 46, + "name": "clip", + "type": "CLIP" + } + ], + "bgcolor": "#3f5159", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 49 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 47, + 48 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "XL/!action-sdxl-V0.5.safetensors", + 0.6, + 0.6 + ] + }, + { + "id": 45, + "pos": [ + 444, + 724 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 10, + "title": "steps", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 40, + "fixed" + ] + }, + { + "id": 47, + "pos": [ + 450, + 840 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#233", + "flags": {}, + "order": 11, + "title": "end_at_step", + "bgcolor": "#355", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 37, + "fixed" + ] + }, + { + "id": 10, + "pos": [ + 39.585034524037376, + 92.33500626534348 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 19, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 49, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 52, + 65 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 1123645071018022, + "fixed", + 40, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 37, + "enable" + ] + }, + { + "id": 55, + "pos": [ + 1630, + 930 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEEncode", + "color": "#432", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 54, + "name": "pixels", + "type": "IMAGE" + }, + { + "link": 55, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 63 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEEncode" + } + }, + { + "id": 54, + "pos": [ + 1250, + 960 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "color": "#432", + "flags": {}, + "order": 27, + "inputs": [ + { + "link": 71, + "name": "model", + "type": "MODEL" + }, + { + "link": 74, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 75, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 64, + "name": "latent_image", + "type": "LATENT" + } + ], + "bgcolor": "#653", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 85133334682496, + "randomize", + 6, + 6, + "euler_ancestral", + "normal", + 0.35 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -574, + -313, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + -585, + 106, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 45, + 648, + 339, + 464 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + -571, + 846, + 378, + 434 + ] + }, + { + "color": "#8AA", + "title": "Step Control", + "bounding": [ + 434, + 648, + 243, + 458 + ] + }, + { + "color": "#3f789e", + "title": "Load in LoRA", + "bounding": [ + -560, + 541, + 353, + 230 + ] + }, + { + "color": "#88A", + "title": "Starting Base Image", + "bounding": [ + 12, + 11, + 731, + 626 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale", + "bounding": [ + 1877, + 9, + 1023, + 1277 + ] + }, + { + "color": "#a1309b", + "title": "Group", + "bounding": [ + 753, + 10, + 1113, + 835 + ] + }, + { + "color": "#b58b2a", + "title": "Upscale Prompt", + "bounding": [ + 752, + 850, + 1116, + 428 + ] + } + ], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 61 + } + }, + "steps": 40, + "width": 768, + "height": 1152, + "models": [ + "XL/sd_xl_base_1.0.safetensors", + "XL/sd_xl_refiner_1.0.safetensors" + ], + "prompt": "zdyna_pose, dutch angle, from below, female superhero in a zdyna_pose casting lightning from her outstretched arms with a tall city in the background in a low camera angle actions scene, foreground objects dust and volumetric lighting, bright cinematic lighting, orange and teal color grading", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "XL/!action-sdxl-V0.5.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/133814" +} \ No newline at end of file diff --git a/!action-sdxl-V0.5.preview.png b/!action-sdxl-V0.5.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..066a05ce9d4fe5092371bb928693b2aaf5528629 Binary files /dev/null and b/!action-sdxl-V0.5.preview.png differ diff --git a/!action-sdxl-V0.5.safetensors b/!action-sdxl-V0.5.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1db7c6dee495f5e21759046edf8b2e89411b50c3 --- /dev/null +++ b/!action-sdxl-V0.5.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f6dab6a3fcd45f729197d24b5d4303b3dee33b881253c8047b9aa1f15388da2 +size 170554972 diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..aaba46aa11aa8161066c2cf25f2c342de21d3fed 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +aesthetic_anime_v1s.preview.png filter=lfs diff=lfs merge=lfs -text +CyborgMechExosuit_v1.5.preview.png filter=lfs diff=lfs merge=lfs -text +dataviz_style_xl_v1.preview.png filter=lfs diff=lfs merge=lfs -text +game_icon_v1.0.preview.png filter=lfs diff=lfs merge=lfs -text +sdxl_photorealistic_slider_v1-0.preview.png filter=lfs diff=lfs merge=lfs -text +zavy-ctsmtrc-sdxl.preview.png filter=lfs diff=lfs merge=lfs -text diff --git a/CLAYMATE_V2.03_.civitai.info b/CLAYMATE_V2.03_.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..105ff0ce03c3475357eff98b23e4de314cb74f4b --- /dev/null +++ b/CLAYMATE_V2.03_.civitai.info @@ -0,0 +1,501 @@ +{ + "id": 236248, + "modelId": 208168, + "name": "v2.0", + "createdAt": "2023-11-23T13:48:48.231Z", + "updatedAt": "2023-11-23T13:53:52.427Z", + "status": "Published", + "publishedAt": "2023-11-23T13:53:52.425Z", + "trainedWords": [ + "claymation" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

Don't know if I can consider this a V2 or just a variation of the overall style but with the same purpose, I'll let you decide

I like to put claymation as first token, without it's more stopmotion like.

Like V1 you can use various words to steer the style in the direction you need to take.

It should work well at weight 1 but you can use higher or lower values

", + "stats": { + "downloadCount": 2343, + "ratingCount": 524, + "rating": 5, + "thumbsUpCount": 528 + }, + "model": { + "name": "CLAYMATE - Claymation Style for SDXL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 182497, + "sizeKB": 891173.02734375, + "name": "CLAYMATE_V2.03_.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-11-23T13:56:07.674Z", + "hashes": { + "AutoV1": "10F5063F", + "AutoV2": "FC1790DDEC", + "SHA256": "FC1790DDEC807108BB4125748C37B5B33C23806F2B5F74D1C5B8B19BF68B2284", + "CRC32": "67B507C5", + "BLAKE3": "5D3EECFA85595AA17DAA8C5301149D8E7E8A9B0FF4F6F8D9988B3A942CB36826", + "AutoV3": "9813D045AFC7C2F141C2F71B7AA36411EA54E15D7F9E66FB94E6466F2A61524D" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/236248" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9d87f5fe-a355-4dd0-b3a2-96c281c635f2/width=450/3857714.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U4BM@r$%*6kB0BofrZM|05WYM+XMKISwxHof", + "type": "image", + "metadata": { + "hash": "U4BM@r$%*6kB0BofrZM|05WYM+XMKISwxHof", + "size": 1680958, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 3321214724, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, a Musk deer in Mossy Forest landscape at Small hours ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/16bc2b66-0e0f-4ad3-aad4-8846720aad32/width=450/3857712.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UgE:7Ljbtnt8TOWCR-a#RPoLaIWAX9f5t7bI", + "type": "image", + "metadata": { + "hash": "UgE:7Ljbtnt8TOWCR-a#RPoLaIWAX9f5t7bI", + "size": 1197463, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 3608067367, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, The Truman Show - A man discovers his entire life has been a reality TV show. ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c84a138f-2ef5-4e39-8f81-f9bb9f5bbcb4/width=450/3857711.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U8EeoW0L~9w]Kg9aELoe8|t7M|NHInR+jYRj", + "type": "image", + "metadata": { + "hash": "U8EeoW0L~9w]Kg9aELoe8|t7M|NHInR+jYRj", + "size": 1467232, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 3036275614, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, Death: The Grim Reaper stands amidst withering flowers, signifying transformation, rebirth, and change. ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a77b447d-1b7a-4d1d-971c-2f43edf9e316/width=450/3857685.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UBHJao=_0_tT0bI;$Knn0yNaRkRi6FnjxubW", + "type": "image", + "metadata": { + "hash": "UBHJao=_0_tT0bI;$Knn0yNaRkRi6FnjxubW", + "size": 1454686, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 3444737364, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, a Stock car in The Mirror Maze landscape at Dusk ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 14, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7f4b39ff-a368-4562-9e7f-2c824e38ec9f/width=450/3857689.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U5E1]c]ju1Pn00~V=yv}+t0#V{m,9brEoc.8", + "type": "image", + "metadata": { + "hash": "U5E1]c]ju1Pn00~V=yv}+t0#V{m,9brEoc.8", + "size": 1416612, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 3545575572, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "stopmotion, Romeo and Juliet, Two young lovers from feuding families defy societal expectations and face tragic consequences. ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 14, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7ef9c177-f7dc-4f67-95d9-d28bf269a736/width=450/3857707.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U6E_v:}s{9%100SeNFV]0MxtELaL0yjYnmWE", + "type": "image", + "metadata": { + "hash": "U6E_v:}s{9%100SeNFV]0MxtELaL0yjYnmWE", + "size": 1454388, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 3897238050, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, Gorgon: Snake-haired, Petrifying, Serpentine, Monstrous, Medusa-like, Stone-gazing, Cursed, Serpent-headed ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8410115a-9602-4dd0-aa1e-9b044e3cd955/width=450/3857703.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UAAJHh^-5A5WDzNYRhM^5_NH$*$kaFr:InNG", + "type": "image", + "metadata": { + "hash": "UAAJHh^-5A5WDzNYRhM^5_NH$*$kaFr:InNG", + "size": 1334732, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 11862246, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, a Wolf playing guitar in a club, rock band, concert ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7f41182f-511c-482a-9385-97e62d9fe695/width=450/3857715.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U4E3L-C3_h%NHjD*%2KQyssDIA9]Gd?H-MDj", + "type": "image", + "metadata": { + "hash": "U4E3L-C3_h%NHjD*%2KQyssDIA9]Gd?H-MDj", + "size": 1282521, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 1685805037, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, best quality, masterpiece, a Danish boy in Atlantis landscape at Tea time ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/54b32945-2807-457a-8d75-0668f4674329/width=450/3857698.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U6DIah9^m+jb00E*IUoI02I;~Uoe-;-UxZNb", + "type": "image", + "metadata": { + "hash": "U6DIah9^m+jb00E*IUoI02I;~Uoe-;-UxZNb", + "size": 1421841, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 1105308206, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, Kasha: Corpse-eating cat, Cat-like, Fire-wielding, Supernatural, Monstrous, Yokai, Ethereal, Otherworldly. ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/83158020-783b-4b02-87ef-7294b28429ee/width=450/3857697.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U6B2s9~C9cE400EKxWxU0sEOtO%2-:-mNF5S", + "type": "image", + "metadata": { + "hash": "U6B2s9~C9cE400EKxWxU0sEOtO%2-:-mNF5S", + "size": 1311444, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 1520523878, + "Model": "juggernautXL_version5", + "steps": 24, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "claymation, a Dominican kid playing guitar in a club, rock band, concert ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "Euler a", + "cfgScale": 10, + "resources": [ + { + "name": "CLAYMATE_V2.03_", + "type": "lora", + "weight": 1 + }, + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "painting", + "\"CLAYMATE_V2.03_": "73b7b029f113\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/236248" +} \ No newline at end of file diff --git a/CLAYMATE_V2.03_.preview.png b/CLAYMATE_V2.03_.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..fb1f40237729737c28d5e17416775436e652bdc9 Binary files /dev/null and b/CLAYMATE_V2.03_.preview.png differ diff --git a/CLAYMATE_V2.03_.safetensors b/CLAYMATE_V2.03_.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e5fdbf23542dedb54e7ef9af690e4627348adbda --- /dev/null +++ b/CLAYMATE_V2.03_.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc1790ddec807108bb4125748c37b5b33c23806f2b5f74d1c5b8b19bf68b2284 +size 912561180 diff --git a/Character Design.civitai.info b/Character Design.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..0fe30066e405ac14ac137fe806e97d01c81240f8 --- /dev/null +++ b/Character Design.civitai.info @@ -0,0 +1,460 @@ +{ + "id": 153726, + "modelId": 97499, + "name": "SD XL", + "createdAt": "2023-09-02T15:10:20.963Z", + "updatedAt": "2023-09-02T15:37:55.286Z", + "status": "Published", + "publishedAt": "2023-09-04T03:00:00.000Z", + "trainedWords": [ + "Character Design page" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 2380, + "ratingCount": 215, + "rating": 5, + "thumbsUpCount": 355 + }, + "model": { + "name": "Character Design", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 114756, + "sizeKB": 223129.86328125, + "name": "Character Design.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-09-02T15:21:21.635Z", + "hashes": { + "AutoV1": "E445A01F", + "AutoV2": "BD46B0CA22", + "SHA256": "BD46B0CA2218E84E45BA1D3C54575FEEB2D8F8E83D72376A2AF3C00D39CE2AC4", + "CRC32": "DC617980", + "BLAKE3": "1EF6E7387004DF6C0455B6189B43A343E5CC26EB2FF7976DD0320D091DC34E95" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/153726" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/198973b7-31e5-4499-b7ff-d0ec06d1b787/width=450/2321871.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UANmH9wf?^-B8wW=RkM|.8bvMdkW%gVt%2ni", + "type": "image", + "metadata": { + "hash": "UANmH9wf?^-B8wW=RkM|.8bvMdkW%gVt%2ni", + "size": 1677457, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 2649030130, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - Character design for a sword-wielding boy School uniform attire Red scarf or neckerchief Monster Boy game style Eight-directional movement Cartoonish art style Playful or adventurous expression Colorful and vibrant design Exaggerated or stylized features Dynamic and action-packed poses Sword or blade weapon Whimsical or fantasy influence Unique or magical abilities Expressive eyes and facial features Iconic hairstyle or headgear Detailed and fluid animation Charming and lovable personality.", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e581e12f-95de-4d4e-a560-0d5f1ce5a5f3/width=450/2321880.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UALEj8pJ?anQ0eMx%M%f~qj=RjjFNHxtM{D%", + "type": "image", + "metadata": { + "hash": "UALEj8pJ?anQ0eMx%M%f~qj=RjjFNHxtM{D%", + "size": 1449707, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 506138998, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - children's book-illustration style character sheet illustrations of a of a 14 year old girl with long black hair, wearing a blue hoodie, light-blue pants with pink shoes, hannah barbera style. The character is a The illustrations should be simple, cute, and in full color with a flat color style. he character should be depicted from different angles on a white background no outline", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e93ef371-096c-4e14-9b2f-72bd0ac0a1e3/width=450/2321874.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U4HB-~?I}9tA~qWUMcR:?^s9IVaepen5bYxt", + "type": "image", + "metadata": { + "hash": "U4HB-~?I}9tA~qWUMcR:?^s9IVaepen5bYxt", + "size": 1379385, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 1579745644, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - A set of sketches or illustrations featuring a character named BlueBoy, who has blue skin and wears orange clothing. The character should be simple to draw, but should have distinctive and memorable features that help to make him stand out. Please generate a variety of poses and expressions for the character, and feel free to experiment with different clothing designs or accessories to help further develop his personality and style.\"", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/32794f74-8516-4b70-b255-9e5976ba0ec3/width=450/2321890.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UJLD$Ds.WZ%2DORiflaht8NGxuV@_4Rjofoe", + "type": "image", + "metadata": { + "hash": "UJLD$Ds.WZ%2DORiflaht8NGxuV@_4Rjofoe", + "size": 1576625, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 827947307, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - a character design sheet of a long purple-haired tall prehistoric cartoon female character, with a sharp face with violet eyes and a scar going from her cheek to her left eye. She's dresses in dark gnawed robes but her legs and arms are visible. Cartoon style, front, side and back views", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/959bae69-f619-492b-b982-a1c4e56f0819/width=450/2321884.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UEI#MuxaPVx]}=niENi_-ojYIUe.-RbboyoL", + "type": "image", + "metadata": { + "hash": "UEI#MuxaPVx]}=niENi_-ojYIUe.-RbboyoL", + "size": 1308100, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 668510346, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - A set cartoon character design of a litle girl with short blonde and curly hair, character design, fullbody, with the view from front, side, back and 3/ 4 of the character, medieval and boots character sheet", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8dd0badb-0881-4328-98c0-45c52216384e/width=450/2321879.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UIGa::t7NdxZ~9odR*t6%fn*oebHxujbj[oL", + "type": "image", + "metadata": { + "hash": "UIGa::t7NdxZ~9odR*t6%fn*oebHxujbj[oL", + "size": 1217385, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 661868014, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - elves character concept girl thin cute 360 degrees full turnaround full height simple cartoon style less detail full body front view back view side view minimalist blonde hair 2d", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9a553177-5fef-4c88-81fb-4fff47c983b6/width=450/2321881.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U9Kc*D-Ukrxa#*w]X-WALftRrqjsC,Xm$La}", + "type": "image", + "metadata": { + "hash": "U9Kc*D-Ukrxa#*w]X-WALftRrqjsC,Xm$La}", + "size": 1337209, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 4261164321, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - Front view+ side view,+rear view+A character design for a little boy wearing a Chinese lion dance costume+disney design style+character design+Unification of three view clothing", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4f20b488-03fe-4017-aa9c-92fd56e15e05/width=450/2321886.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UAKnh^xv?bInxu-;D%of~qWBM{%M?bWV%Mt7", + "type": "image", + "metadata": { + "hash": "UAKnh^xv?bInxu-;D%of~qWBM{%M?bWV%Mt7", + "size": 1179548, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 37611008, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - Q version of the game character sketch on the same screen with three views front view left view back view monochrome pencil watercolor style", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5bb1520a-1166-4220-b6b6-3723e49f7253/width=450/2321885.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U7Ef$[PC1n^Q:%S$Bq#lcYRkicxtGwV@+ZtR", + "type": "image", + "metadata": { + "hash": "U7Ef$[PC1n^Q:%S$Bq#lcYRkicxtGwV@+ZtR", + "size": 1026807, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 3197160424, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - Cute cartoon character, embellished with white lines in red colors, and paired with blue colors. Cool casual clothing for characters", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/996094ff-615e-4d73-9043-3b64ccad7a75/width=450/2321888.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U5L4m3~pRn_M;1%fMyxtVsj@t7Vs}pVso}WB", + "type": "image", + "metadata": { + "hash": "U5L4m3~pRn_M;1%fMyxtVsj@t7Vs}pVso}WB", + "size": 1607169, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 2651506493, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Character Design - A character design sheet for a cute gremlin engineer girl daul wielding pistols for a super hero movie", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Character Design": "bf566fe0981a\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/153726" +} \ No newline at end of file diff --git a/Character Design.preview.png b/Character Design.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..e3879ac6b392d4417435db5a75c18e5eb2e44a24 Binary files /dev/null and b/Character Design.preview.png differ diff --git a/Character Design.safetensors b/Character Design.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..10de05d47bd335664ccdb27e3d931eca58f6fd8c --- /dev/null +++ b/Character Design.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd46b0ca2218e84e45ba1d3c54575feeb2d8f8e83d72376a2af3c00d39ce2ac4 +size 228484980 diff --git a/ClassipeintXL2.1.civitai.info b/ClassipeintXL2.1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..680b0ea9b9429f89e1412a49fdc5fe93dff6844e --- /dev/null +++ b/ClassipeintXL2.1.civitai.info @@ -0,0 +1,421 @@ +{ + "id": 356771, + "modelId": 127139, + "name": "v2.1", + "createdAt": "2024-02-23T07:42:39.751Z", + "updatedAt": "2024-02-23T07:48:42.144Z", + "status": "Published", + "publishedAt": "2024-02-23T07:48:42.140Z", + "trainedWords": [ + "oil painting" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

what v2.0 should have been.

", + "stats": { + "downloadCount": 1709, + "ratingCount": 48, + "rating": 5, + "thumbsUpCount": 138 + }, + "model": { + "name": "ClassipeintXL (oil paint / oil painting style)", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 284603, + "sizeKB": 129751.6875, + "name": "ClassipeintXL2.1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-23T07:46:00.566Z", + "hashes": { + "AutoV1": "D57AFE46", + "AutoV2": "74B377EE27", + "SHA256": "74B377EE27855418A95935852F570F0078A9A7A82CFA4DDC81568FC52ADC87FD", + "CRC32": "98938DE0", + "BLAKE3": "5DA8489998E956D29FA6934FCAF7B4AC03AB7ABC951A778F6DB420FC6D30FA85", + "AutoV3": "BAEA2347AD6107BE75FAAEF690E252AAED957D4D6F2037409BF6B942C41E3F56" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/356771" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2c70e47c-0445-483a-8062-04d65e5ca311/width=450/6998858.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U7FiAF004:_2bxADxuxZ00-:xGad~pIUIAt6", + "type": "image", + "metadata": { + "hash": "U7FiAF004:_2bxADxuxZ00-:xGad~pIUIAt6", + "size": 1557381, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 261562292751499, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":261562292751499,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":261562292751499,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":261562292751499,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[261562292751499,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "atmospheric oil painting of a hyper-real portrait of a hauntingly beautiful pale pastel rainbow scaled sky serpent with huge watery eyes sorrow flowers and crystalline structures photo-real pastel galaxy stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a1bbd13a-b8a5-4f09-b202-cd0c42e90775/width=450/6998855.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U9G[TV0L57pJuP9F%M.802%1-:Rjm+tlRiZ$", + "type": "image", + "metadata": { + "hash": "U9G[TV0L57pJuP9F%M.802%1-:Rjm+tlRiZ$", + "size": 1354955, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 159891185045576, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":159891185045576,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":159891185045576,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":159891185045576,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[159891185045576,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "minimalist closeup portrait of an artist at work painting on her canvas easel magical worlds glowing magical energies double exposure bright background high key lighting", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e261478e-51cb-4ef4-a685-0248e9065ccc/width=450/6998856.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U8B3sT-T00E3~qWB4nW?4:I;xt%14nRk?bxY", + "type": "image", + "metadata": { + "hash": "U8B3sT-T00E3~qWB4nW?4:I;xt%14nRk?bxY", + "size": 1526888, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 552687378305160, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"a barn owl with emerging from the shadows of a nighttime forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":552687378305160,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"a barn owl with emerging from the shadows of a nighttime forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":552687378305160,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"a barn owl with emerging from the shadows of a nighttime forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":552687378305160,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"a barn owl with emerging from the shadows of a nighttime forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"a barn owl with emerging from the shadows of a nighttime forest\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[552687378305160,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"a barn owl with emerging from the shadows of a nighttime forest\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "a barn owl with emerging from the shadows of a nighttime forest", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/603313ed-c469-46cf-acd7-3166b71e927b/width=450/6998874.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UGGugp~qRP-nEn?vozMx%#yE-;M_o#kXjFRQ", + "type": "image", + "metadata": { + "hash": "UGGugp~qRP-nEn?vozMx%#yE-;M_o#kXjFRQ", + "size": 1533165, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 53164479698143, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":53164479698143,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":53164479698143,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":53164479698143,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[53164479698143,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "Pope Francis in (Fallout Power Armor:1.3) papal armor white and gold leading his cardinal-ninja soldiers in the courtyard of St Peter's Basilica", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/24f7fbad-d930-46ea-9c63-a9634ce43d62/width=450/6998857.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UBF5$}I]%#xu~WNeyDIo00M{IoR*4:jFIAay", + "type": "image", + "metadata": { + "hash": "UBF5$}I]%#xu~WNeyDIo00M{IoR*4:jFIAay", + "size": 1574629, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 1069680060791466, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":1069680060791466,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1069680060791466,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1069680060791466,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[1069680060791466,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "oil painting portrait female space ship captain baroque-style uniform retro-futurism sleek leather and chrome haughty and beautiful background launching pad for a shiny ornate fantastic rocket ship", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1279e901-0719-4dd6-871b-a22926f2c199/width=450/6998859.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UFGIGn9bad%2WS4:Mw.7^+IUiv%M~Vadt7R-", + "type": "image", + "metadata": { + "hash": "UFGIGn9bad%2WS4:Mw.7^+IUiv%M~Vadt7R-", + "size": 1394012, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 340731584629603, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"oil painting an adorable quokka taking a walk in a big city\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":340731584629603,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting an adorable quokka taking a walk in a big city\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":340731584629603,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting an adorable quokka taking a walk in a big city\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":340731584629603,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting an adorable quokka taking a walk in a big city\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"oil painting an adorable quokka taking a walk in a big city\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[340731584629603,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"oil painting an adorable quokka taking a walk in a big city\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "oil painting an adorable quokka taking a walk in a big city", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1a06a32b-00e7-42de-943b-33cc984e622a/width=450/6998861.jpeg", + "nsfw": "Soft", + "width": 832, + "height": 1216, + "hash": "U7F=Hpt,0L?FR~EL~U%LF{oJ$%t6_2-:E29u", + "type": "image", + "metadata": { + "hash": "U7F=Hpt,0L?FR~EL~U%LF{oJ$%t6_2-:E29u", + "size": 1523951, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 632619306340897, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper \",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":632619306340897,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper \",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":632619306340897,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper \",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":632619306340897,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper \",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper \"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[632619306340897,\"randomize\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper \"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "closeup portrait oil painting of a beautiful model in haute couture gown in front of ornate victorian wallpaper ", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/17d6267a-6a4d-41cc-919c-9497f1f613c6/width=450/6998860.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UREVi?%1MyIV~osmRkR+kVjFoet6Rjoef6oe", + "type": "image", + "metadata": { + "hash": "UREVi?%1MyIV~osmRkR+kVjFoet6Rjoef6oe", + "size": 1462327, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 107373864517411, + "vaes": [], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":107373864517411,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":107373864517411,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":107373864517411,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[107373864517411,\"fixed\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "retro future 1890 steampunk Boston crowded downtown diverse population by Ismail Inceoglu and Jeremy Mann", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c0a5b9cc-60f5-48ed-b789-a79561428180/width=450/6998862.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U9Fi7400V?_3%2I=M{xaE-D*%Mad~q9FxutR", + "type": "image", + "metadata": { + "hash": "U9Fi7400V?_3%2I=M{xaE-D*%Mad~q9FxutR", + "size": 1670679, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 28200826290889, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":28200826290889,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":28200826290889,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":28200826290889,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[28200826290889,\"fixed\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "oil painting of an android made of reflective chrome and kintsugi cracked white porcelain beautiful lighting background busy street sidewalk bustling with humans", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a68fed44-396a-4df2-ac65-79f18bd1e9c9/width=450/6998863.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UTKA[:~U~UbbxuxtjYNH~UD*Rkt6s,j[R-ad", + "type": "image", + "metadata": { + "hash": "UTKA[:~U~UbbxuxtjYNH~UD*Rkt6s,j[R-ad", + "size": 1683275, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 715289434412470, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":715289434412470,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":715289434412470,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"Classipeint/ClassipeintXL2b\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":715289434412470,\"steps\":26,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ClassipeintXL2.1-sized.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ClassipeintXL2.1-sized.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[715289434412470,\"fixed\",26,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"Classipeint/ClassipeintXL2b\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 26, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "isometric diagram of a boxy flux capacitor with exposed vacuum tubes and an oscilloscope and digital display wiring electric glow", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/356771" +} \ No newline at end of file diff --git a/ClassipeintXL2.1.preview.png b/ClassipeintXL2.1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..6fe8fb22723a59014527cba2a8dd2dba36a5530e Binary files /dev/null and b/ClassipeintXL2.1.preview.png differ diff --git a/ClassipeintXL2.1.safetensors b/ClassipeintXL2.1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..05251f8adb6619fea3cd803d03d2f3126b4f4ae3 --- /dev/null +++ b/ClassipeintXL2.1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:74b377ee27855418a95935852f570f0078a9a7a82cfa4ddc81568fc52adc87fd +size 132865728 diff --git a/Cyber_Background_sdxl.civitai.info b/Cyber_Background_sdxl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..23ed23a955e075f5f7b00b4eda1e9c300b440ed3 --- /dev/null +++ b/Cyber_Background_sdxl.civitai.info @@ -0,0 +1,498 @@ +{ + "id": 141343, + "modelId": 102838, + "name": "SDXL", + "createdAt": "2023-08-15T16:59:39.447Z", + "updatedAt": "2023-08-15T17:07:53.980Z", + "status": "Published", + "publishedAt": "2023-08-15T17:07:53.979Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 2363, + "ratingCount": 163, + "rating": 4.99, + "thumbsUpCount": 316 + }, + "model": { + "name": "Cyber Background", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 103884, + "sizeKB": 166546.65234375, + "name": "Cyber_Background_sdxl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-15T17:05:48.084Z", + "hashes": { + "AutoV1": "BFA01131", + "AutoV2": "E536C06AB0", + "SHA256": "E536C06AB0EC42175BBB3F11EC2139E16A95916D7DACCDC73DE1EE9EA30B3928", + "CRC32": "3BA72165", + "BLAKE3": "AE1AA4E92450472404081536F586640E6CC7AD477123EA1F612E7D224C692713" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/141343" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/27d73f06-f88c-4604-9564-ddeba9955b4d/width=450/2044766.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "UNE]lgNxJRn%}ZWB$NjuNfaxxDsoOFNwxFay", + "type": "image", + "metadata": { + "hash": "UNE]lgNxJRn%}ZWB$NjuNfaxxDsoOFNwxFay", + "size": 1562248, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2470336986, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "(a cyberpunk interior design ), Quantum lighthouse, a beacon that guides travelers through the labyrinthine pathways of alternate dimensions. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"(a cyberpunk interior design ), __scifi_enviroment__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1a4d7429-5658-4407-9d0f-90c0039ede04/width=450/2044738.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "UF9~R1NbJ7}Ho#$RxHWUJ7$j$kJ7Nc$k$+NG", + "type": "image", + "metadata": { + "hash": "UF9~R1NbJ7}Ho#$RxHWUJ7$j$kJ7Nc$k$+NG", + "size": 1562279, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2470336984, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "(a cyberpunk interior design ), Gravitational dunes, shifting hills of gravity anomalies that defy conventional physics. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"(a cyberpunk interior design ), __scifi_enviroment__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c3e5080a-b911-4673-8494-908347df0ff7/width=450/2044734.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "UCBVV1^QIo57~BxajFEM0fAB%2$%0L5Q%2=y", + "type": "image", + "metadata": { + "hash": "UCBVV1^QIo57~BxajFEM0fAB%2$%0L5Q%2=y", + "size": 1556517, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2470336983, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "(a cyberpunk interior design ), Techno-wasteland, a desolate landscape of rusted robotics and malfunctioning machinery. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"(a cyberpunk interior design ), __scifi_enviroment__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2152a127-b1dd-4e13-8609-374f7e2a1a82/width=450/2044735.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U78E6-Nf|yoM^8a1^lkDU^w3sCv%E_tRIofl", + "type": "image", + "metadata": { + "hash": "U78E6-Nf|yoM^8a1^lkDU^w3sCv%E_tRIofl", + "size": 1484673, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 936616798, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a cyberpunk interior design , A subterranean transit hub features maglev trains that navigate through tunnels adorned with luminescent mineral formations. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"a cyberpunk interior design , __scifi_enviroment_2__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9a5b07e2-ba39-4909-902a-5253068195bf/width=450/2044732.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U5BVMA|50J=6xjsFK+Im00Ec7]K11ZWF[boc", + "type": "image", + "metadata": { + "hash": "U5BVMA|50J=6xjsFK+Im00Ec7]K11ZWF[boc", + "size": 1697324, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 936616799, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a cyberpunk interior design , A colossal tree of interconnected servers forms the heart of a digital realm, where avatars navigate a cybernetic dreamscape. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"a cyberpunk interior design , __scifi_enviroment_2__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/831b67a8-1a5d-4bf7-b0ea-3609e776d2cb/width=450/2044731.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U6AvInGFo#q^ysFt=zFb1XJ7S~Bi-=M_}bn5", + "type": "image", + "metadata": { + "hash": "U6AvInGFo#q^ysFt=zFb1XJ7S~Bi-=M_}bn5", + "size": 1686254, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 936616802, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a cyberpunk interior design , In a labyrinthine space station, gravity-defying walkways lead to chambers adorned with holographic art from across the galaxy. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"a cyberpunk interior design , __scifi_enviroment_2__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6755c482-06fd-4366-9a77-65dfdeaf79f7/width=450/2044736.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U17dRWt,0O01*J=w58rWa%%1ELs7~9Te#659", + "type": "image", + "metadata": { + "hash": "U17dRWt,0O01*J=w58rWa%%1ELs7~9Te#659", + "size": 1682678, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 1784207081, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a cyberpunk interior design , A clockwork city is powered by an enormous, rotating mechanism at its core, with gears and cogs driving its fantastical existence. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"a cyberpunk interior design , __scifi_enviroment_2__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8b1a492e-511b-4eb3-9048-28ddb2653023/width=450/2044764.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U7573]%gu%%0%z%gx]adk:tlg4T0uiR+ROpI", + "type": "image", + "metadata": { + "hash": "U7573]%gu%%0%z%gx]adk:tlg4T0uiR+ROpI", + "size": 1619766, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2470336985, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "(a cyberpunk interior design ), Subterranean hydroplex, an underwater colony with bioluminescent coral gardens and pressurized domes. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"(a cyberpunk interior design ), __scifi_enviroment__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fa5ce71d-05f3-459d-9beb-fa4171fa867d/width=450/2044765.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U87ehsY6D4T{yZtkNGjG8_tQyYm,$vS4xaRj", + "type": "image", + "metadata": { + "hash": "U87ehsY6D4T{yZtkNGjG8_tQyYm,$vS4xaRj", + "size": 1689210, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2470336987, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "(a cyberpunk interior design ), Celestial aviary, floating islands where biomechanical birds glide through clouds of luminescent gas. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"(a cyberpunk interior design ), __scifi_enviroment__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9b74d64f-7100-4380-878c-5c4c6f06fc88/width=450/2044775.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "U9AbSar|Fsn%}1$-xIWURQ$kjFW,1pwN,vw}", + "type": "image", + "metadata": { + "hash": "U9AbSar|Fsn%}1$-xIWURQ$kjFW,1pwN,vw}", + "size": 1522184, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2470336988, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "(a cyberpunk interior design ), Magnetic stormfront, an electrifying tempest of charged particles crackling across a dark sky. , cyberpunk style environment ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 9, + "resources": [ + { + "name": "Cyber_Background_sdxl", + "type": "lora", + "weight": 1 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"(a cyberpunk interior design ), __scifi_enviroment__ , cyberpunk style environment \"", + "\"Cyber_Background_sdxl": "2b5575fcf23a\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/141343" +} \ No newline at end of file diff --git a/Cyber_Background_sdxl.preview.png b/Cyber_Background_sdxl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..97bcc170faf97900e13b9bdad702e69b8d4bd195 Binary files /dev/null and b/Cyber_Background_sdxl.preview.png differ diff --git a/Cyber_Background_sdxl.safetensors b/Cyber_Background_sdxl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0e096c64b3ed0479fbc62bde7ccc64e80d23661b --- /dev/null +++ b/Cyber_Background_sdxl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e536c06ab0ec42175bbb3f11ec2139e16a95916d7daccdc73de1ee9ea30b3928 +size 170543772 diff --git a/CyborgMechExosuit_v1.5.civitai.info b/CyborgMechExosuit_v1.5.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..a4ef3c32f62c014fb0bfb2c3d45d7bb42a519a0f --- /dev/null +++ b/CyborgMechExosuit_v1.5.civitai.info @@ -0,0 +1,229 @@ +{ + "id": 364354, + "modelId": 318695, + "name": "v1.5", + "createdAt": "2024-02-27T19:34:46.570Z", + "updatedAt": "2024-02-27T19:43:37.234Z", + "status": "Published", + "publishedAt": "2024-02-27T19:43:37.233Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

I tried to refine the previous version slightly. This one should allow for even more consitent features for the cyborg Lora. Hope you enjoy this version as well.

You can now easily use a strength 1.0 with this Lora.
I made sure not to fry it this time!

This will for sure also not be the last version, as I am looking for more material to train the Lora on to improve its quality and versatility.

Again, there are no real trigger words, but words like e.g. exosuit, tactical gear, futuristic, cyberpunk, work best.

", + "stats": { + "downloadCount": 944, + "ratingCount": 67, + "rating": 4.97, + "thumbsUpCount": 136 + }, + "model": { + "name": "CyborgMechExosuit", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 291204, + "sizeKB": 223096.59765625, + "name": "CyborgMechExosuit_v1.5.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-27T19:41:25.279Z", + "hashes": { + "AutoV1": "3184D606", + "AutoV2": "0DBE8E4249", + "SHA256": "0DBE8E42492371B84C827091091B1DB924E6DCF15472A94CB90965D8B18188E9", + "CRC32": "0B9C1740", + "BLAKE3": "DA81D05E7E8062F1F3792B4D7058753DC2B17AE8EFCF15BDC7CB43F8D1F5A6CA", + "AutoV3": "C280992B4E6FD79973A4D3C00976E603CBAE6C04696BB32B6F10FA31CFB9AD70" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/364354" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3eda8d95-fa14-49bb-85b1-a575f39f909e/width=450/7194547.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "UA8Nw$MdAC%M?wRi9Y%MyYM{WBoz%hRPf8t7", + "type": "image", + "metadata": { + "hash": "UA8Nw$MdAC%M?wRi9Y%MyYM{WBoz%hRPf8t7", + "size": 701584, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/34ee2272-a8d0-4545-8556-ecd5a709210f/width=450/7194534.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "UGBN7#_N?boz.8t6DiITX9Mx9FD%?v-;WBkC", + "type": "image", + "metadata": { + "hash": "UGBN7#_N?boz.8t6DiITX9Mx9FD%?v-;WBkC", + "size": 708872, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f21d90ac-9066-49b5-82b2-9127a48b2d16/width=450/7194542.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "U6A,%#4oOmxv_3krI9Nc00^+idD$%%McWZnh", + "type": "image", + "metadata": { + "hash": "U6A,%#4oOmxv_3krI9Nc00^+idD$%%McWZnh", + "size": 693712, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1758cde7-5ac4-43d7-9992-d1d8ebf883db/width=450/7194550.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "U98g:B%h9ux]_Nx^E1t7XUx]tRkCx^tRxvjZ", + "type": "image", + "metadata": { + "hash": "U98g:B%h9ux]_Nx^E1t7XUx]tRkCx^tRxvjZ", + "size": 714238, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1ee3ee15-241d-4334-889f-d7af2f883437/width=450/7194555.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "U99ajUt84m9Y?^RPIAkBITIUnl%g?vtRDiRj", + "type": "image", + "metadata": { + "hash": "U99ajUt84m9Y?^RPIAkBITIUnl%g?vtRDiRj", + "size": 694636, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/034218bd-87af-410f-8a8f-336cdb2169aa/width=450/7194536.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "UBAwk{M{E1%g_4yDIUx]9E?wM{M{_4ocNHRP", + "type": "image", + "metadata": { + "hash": "UBAwk{M{E1%g_4yDIUx]9E?wM{M{_4ocNHRP", + "size": 646664, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8fa97bea-f126-4d41-9d99-0ae157c0f228/width=450/7194552.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "U98#B2tl4n.8?wtRD$xuIUoz%NRikCogx]t7", + "type": "image", + "metadata": { + "hash": "U98#B2tl4n.8?wtRD$xuIUoz%NRikCogx]t7", + "size": 698128, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f8ae3059-d335-4148-9de3-3f4d0c2ddbdd/width=450/7194533.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "UFBM_ZD%xYEL?wS0ahNF9uJPxItQ%ME1-pS%", + "type": "image", + "metadata": { + "hash": "UFBM_ZD%xYEL?wS0ahNF9uJPxItQ%ME1-pS%", + "size": 624788, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6206f26c-081c-4bbd-97c2-675770221553/width=450/7194535.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "UMA^g[t8IURi_NRjM{kC%NM_M{t7-=RjM{kC", + "type": "image", + "metadata": { + "hash": "UMA^g[t8IURi_NRjM{kC%NM_M{t7-=RjM{kC", + "size": 642482, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6a1a563f-361b-4c12-8850-af3cd85da3f7/width=450/7194545.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "U67L7cR*yDMx%%x]ITMx9Zx]I9t8T0M{Mwoz", + "type": "image", + "metadata": { + "hash": "U67L7cR*yDMx%%x]ITMx9Zx]I9t8T0M{Mwoz", + "size": 679928, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + } + ], + "downloadUrl": "https://civitai.com/api/download/models/364354" +} \ No newline at end of file diff --git a/CyborgMechExosuit_v1.5.preview.png b/CyborgMechExosuit_v1.5.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..e8b779f75c56edab10b834361722b9c9c214a3ee --- /dev/null +++ b/CyborgMechExosuit_v1.5.preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea2022d89687bc131fdcfd2519c36fd9a10725a706a6193b8587c3fa1c2860bc +size 3156250 diff --git a/CyborgMechExosuit_v1.5.safetensors b/CyborgMechExosuit_v1.5.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..04e2f68a92db51a4349fef0b79ac01882fb0f6a8 --- /dev/null +++ b/CyborgMechExosuit_v1.5.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0dbe8e42492371b84c827091091b1db924e6dcf15472a94cb90965d8b18188e9 +size 228450916 diff --git a/DetailedEyes_V3.civitai.info b/DetailedEyes_V3.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..f8e56442720bf3720d5ddd02d481c3b79ae15ac1 --- /dev/null +++ b/DetailedEyes_V3.civitai.info @@ -0,0 +1,143 @@ +{ + "id": 145907, + "modelId": 120723, + "name": "v3.0", + "createdAt": "2023-08-22T07:09:33.933Z", + "updatedAt": "2023-08-22T07:21:27.733Z", + "status": "Published", + "publishedAt": "2023-08-22T07:21:27.730Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

add more eyes details

", + "stats": { + "downloadCount": 17855, + "ratingCount": 662, + "rating": 4.98, + "thumbsUpCount": 1185 + }, + "model": { + "name": "DetailedEyes_XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 108044, + "sizeKB": 90967.7734375, + "name": "DetailedEyes_V3.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-22T07:10:42.364Z", + "hashes": { + "AutoV1": "E53F3469", + "AutoV2": "2C1C3F889F", + "SHA256": "2C1C3F889F30F8C61EC73561C1E875EB8CE245F42C0A114B108EE70F61D51B76", + "CRC32": "B905D08C", + "BLAKE3": "ADCF318E16CD58F53129C3BED833509D143614B7A789CCFF98BE7B7F79F5F802" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/145907" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cebd0283-6c71-45e2-b204-c9fa3b6bea1f/width=450/2148743.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1182, + "hash": "UbHd~fRkj?oM~qo0j[j[.9a}a}ae_3oeayj]", + "type": "image", + "metadata": { + "hash": "UbHd~fRkj?oM~qo0j[j[.9a}a}ae_3oeayj]", + "size": 3225665, + "width": 2048, + "height": 1182 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b9c0826c-3047-403f-a01b-077e502c8cad/width=450/2148754.jpeg", + "nsfw": "None", + "width": 1616, + "height": 1326, + "hash": "UaI;xFt6ofxu?vt7WBoLyDkCoLWB~qofkCNG", + "type": "image", + "metadata": { + "hash": "UaI;xFt6ofxu?vt7WBoLyDkCoLWB~qofkCNG", + "size": 2359418, + "width": 1616, + "height": 1326 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e69c72bf-0c6c-47bd-abf5-04a6dbb7c30c/width=450/2148811.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1182, + "hash": "UWI}I#xXS4t6~qR*oKbH.SWEkBRk_3bIWVof", + "type": "image", + "metadata": { + "hash": "UWI}I#xXS4t6~qR*oKbH.SWEkBRk_3bIWVof", + "size": 3297122, + "width": 2048, + "height": 1182 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/18b48789-0836-47cc-ad70-289d2bf23b2d/width=450/2148847.jpeg", + "nsfw": "Soft", + "width": 1632, + "height": 1329, + "hash": "UgF~jptQkCxu_NkCj?kC?bofayWB?bV@a#WB", + "type": "image", + "metadata": { + "hash": "UgF~jptQkCxu_NkCj?kC?bofayWB?bV@a#WB", + "size": 2865665, + "width": 1632, + "height": 1329 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5ba6c99d-9864-4094-9c25-4410e625d9c0/width=450/2148890.jpeg", + "nsfw": "None", + "width": 1632, + "height": 1329, + "hash": "UcKJ[WjYj]s;uPbHs,t7?uW=R*I;^+smbHs8", + "type": "image", + "metadata": { + "hash": "UcKJ[WjYj]s;uPbHs,t7?uW=R*I;^+smbHs8", + "size": 2752373, + "width": 1632, + "height": 1329 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + } + ], + "downloadUrl": "https://civitai.com/api/download/models/145907" +} \ No newline at end of file diff --git a/DetailedEyes_V3.preview.png b/DetailedEyes_V3.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..957656a56e4907854a935f48fcc7b5cabafdb085 Binary files /dev/null and b/DetailedEyes_V3.preview.png differ diff --git a/DetailedEyes_V3.safetensors b/DetailedEyes_V3.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..92eb0ddf839dcae56e657d8e00696f03979967c6 --- /dev/null +++ b/DetailedEyes_V3.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c1c3f889f30f8c61ec73561c1e875eb8ce245f42c0a114b108ee70f61d51b76 +size 93151000 diff --git a/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.civitai.info b/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..57d63d65c0c34a73fc070963d047ace886b6afe9 --- /dev/null +++ b/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.civitai.info @@ -0,0 +1,488 @@ +{ + "id": 287607, + "modelId": 255143, + "name": "v1.0", + "createdAt": "2024-01-05T18:03:25.219Z", + "updatedAt": "2024-02-20T04:05:21.282Z", + "status": "Published", + "publishedAt": "2024-01-05T18:06:21.626Z", + "trainedWords": [ + "Dreamyvibes Artstyle" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "", + "stats": { + "downloadCount": 3866, + "ratingCount": 313, + "rating": 5, + "thumbsUpCount": 630 + }, + "model": { + "name": "DreamyVibes Artsyle - SDXL LoRA", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 225787, + "sizeKB": 445786.08203125, + "name": "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-01-05T18:06:02.393Z", + "hashes": { + "AutoV1": "36B415B3", + "AutoV2": "1A25FDDF84", + "SHA256": "1A25FDDF841FC61E7C31D7CF2EB0B627604CE5C431854381966FB4390E0566C5", + "CRC32": "35E210FB", + "BLAKE3": "37D6326D4413512A3745338CB48D780FF77D7F3314018366BD66AB0934AC0135", + "AutoV3": "878E2D40FDC5E786E986B05F4E1756A3717D3EA7CE26D0A3C00AE61B93D45703" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/287607" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/eceec0cf-c814-464f-aaea-fef00a727f51/width=450/5196439.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAFF.**C02rE5b=|8_9GXyxUQp04+v%2Kb%G", + "type": "image", + "metadata": { + "hash": "UAFF.**C02rE5b=|8_9GXyxUQp04+v%2Kb%G", + "size": 1645324, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 3548597835, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": " Dreamyvibes artstyle, A VW van rounds a curve in the road in a woodsy new york state backroad.", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Reddit - Trigger with dreamyvibes artstyle": "1> Dreamyvibes artstyle, A VW van rounds a curve in the road in a woodsy new york state backroad. \"", + "\"Dreamyvibes artstyle SDXL - Reddit - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9ce73035-57da-4e39-bd35-d0bd3922b856/width=450/5196559.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UIH2WR~p0000K,SirBD%xbs+M{S%SijYf5R*", + "type": "image", + "metadata": { + "hash": "UIH2WR~p0000K,SirBD%xbs+M{S%SijYf5R*", + "size": 1597295, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 2176943881, + "Model": "juggernautXL_version5", + "steps": 30, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "Dreamyvibes artstyle, a lost dog looks up at a small pottery store on the side of a road near the beach. ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/81c5dda5-bff5-475e-8b33-d53c4889ca5f/width=450/5196440.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UJH0||,s4:t7.AI@D%t7A{$f]~s:OtWBwHEO", + "type": "image", + "metadata": { + "hash": "UJH0||,s4:t7.AI@D%t7A{$f]~s:OtWBwHEO", + "size": 1454687, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 4273748483, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": " Dreamyvibes artstyle, sunset in seattle", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Reddit - Trigger with dreamyvibes artstyle": "1> Dreamyvibes artstyle, sunset in seattle \"", + "\"Dreamyvibes artstyle SDXL - Reddit - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7c233aba-a0cc-4939-be62-72c76b6bc14a/width=450/5196608.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UKFE=c%19aa}-;xZnjWC}=kBENs-$~NbWBoL", + "type": "image", + "metadata": { + "hash": "UKFE=c%19aa}-;xZnjWC}=kBENs-$~NbWBoL", + "size": 1461325, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 154707384, + "Model": "OpenDalleV1.1", + "steps": 30, + "hashes": { + "model": "673887ace8" + }, + "prompt": "Dreamyvibes artstyle, two foxes examine a closed gas station late at night. ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "673887ace8", + "name": "OpenDalleV1.1", + "type": "model" + } + ], + "Model hash": "673887ace8", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4de1a910-ba82-4575-95c4-0b6ee5af3b86/width=450/5196467.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UPIpPa5UE2%K~DNKI:-T,^soxZNHs=W:n+V]", + "type": "image", + "metadata": { + "hash": "UPIpPa5UE2%K~DNKI:-T,^soxZNHs=W:n+V]", + "size": 1497010, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 4273748480, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": " Dreamyvibes artstyle, sunset in seattle", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Reddit - Trigger with dreamyvibes artstyle": "1> Dreamyvibes artstyle, sunset in seattle \"", + "\"Dreamyvibes artstyle SDXL - Reddit - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7d658178-70fd-42d9-9780-3e1a4a6bf420/width=450/5197179.jpeg", + "nsfw": "None", + "width": 1352, + "height": 768, + "hash": "UBD9MR0h00xB7Q^iVBIU,mELNu-o9ajY%2bc", + "type": "image", + "metadata": { + "hash": "UBD9MR0h00xB7Q^iVBIU,mELNu-o9ajY%2bc", + "size": 1506051, + "width": 1352, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1352x768", + "seed": 3839275686, + "Model": "realvisxlV30_v30Bakedvae", + "steps": 29, + "hashes": { + "model": "364cecf630" + }, + "prompt": "Dreamyvibes artstyle, a scene from the movie Star Wars featuring princess leia ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle", + "VAE hash": "ff724e9611", + "cfgScale": 6.5, + "resources": [ + { + "hash": "364cecf630", + "name": "realvisxlV30_v30Bakedvae", + "type": "model" + } + ], + "Model hash": "364cecf630", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9c285813-2a78-4f30-9af8-2ba3f32e80cb/width=450/5196678.jpeg", + "nsfw": "None", + "width": 1144, + "height": 896, + "hash": "UCF;pf03RRrw#aRP-oE20J};ERNY74f{n.xW", + "type": "image", + "metadata": { + "hash": "UCF;pf03RRrw#aRP-oE20J};ERNY74f{n.xW", + "size": 1655540, + "width": 1144, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1144x896", + "seed": 2767991726, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Dreamyvibes artstyle, an epic photograph of earth taken from the crest of the dark side of the moon ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/068b6d73-142b-4e79-9de1-18f33e3bb465/width=450/5196776.jpeg", + "nsfw": "None", + "width": 1144, + "height": 896, + "hash": "U6D8z.0#1m^N02t3~AM~0L=x9Y9]}5I[9u=^", + "type": "image", + "metadata": { + "hash": "U6D8z.0#1m^N02t3~AM~0L=x9Y9]}5I[9u=^", + "size": 1480527, + "width": 1144, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1144x896", + "seed": 272917092, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Dreamyvibes artstyle, a bonfire set near a tree with a giant hole, after midnight in a distant section of lost woods. Snowy but dark and only moonlight illuminating the area. ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle, a bonfire set near a tree with a giant hole", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e3d7a638-cdc1-4040-ba95-64543bb69715/width=450/5196607.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAEy6:0i0h?GaPZ#M|of00~9oNS%1*x]-TNy", + "type": "image", + "metadata": { + "hash": "UAEy6:0i0h?GaPZ#M|of00~9oNS%1*x]-TNy", + "size": 1578072, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 154707382, + "Model": "OpenDalleV1.1", + "steps": 30, + "hashes": { + "model": "673887ace8" + }, + "prompt": "Dreamyvibes artstyle, two foxes examine a closed gas station late at night. ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle", + "VAE hash": "ff724e9611", + "cfgScale": 6, + "resources": [ + { + "hash": "673887ace8", + "name": "OpenDalleV1.1", + "type": "model" + } + ], + "Model hash": "673887ace8", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/842addff-70cc-4035-9e7b-10b619c31ad5/width=450/5197583.jpeg", + "nsfw": "None", + "width": 1160, + "height": 1024, + "hash": "UOHdt7xv0gs.}[XTENniVZJAWUaK59Wrxss+", + "type": "image", + "metadata": { + "hash": "UOHdt7xv0gs.}[XTENniVZJAWUaK59Wrxss+", + "size": 1864418, + "width": 1160, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1160x1024", + "seed": 1243494235, + "Model": "copaxTimelessxlSDXL1_v8", + "steps": 29, + "hashes": { + "model": "a771b2b5e8" + }, + "prompt": "Dreamyvibes artstyle, Sunset with unique colors seen from the top of Mt. Everest. Climbers are seen in the distance. snow. ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "Template": "\"Dreamyvibes artstyle", + "VAE hash": "ff724e9611", + "cfgScale": 6.5, + "resources": [ + { + "hash": "a771b2b5e8", + "name": "copaxTimelessxlSDXL1_v8", + "type": "model" + } + ], + "Model hash": "a771b2b5e8", + "Negative Template": "", + "Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "1>\"", + "\"Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle": "878e2d40fdc5\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/287607" +} \ No newline at end of file diff --git a/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.preview.png b/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..f3e825e5fd313727b50e0a1f1c802cb1ecb565fc Binary files /dev/null and b/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.preview.png differ diff --git a/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.safetensors b/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f5bd8ae69220557e87e22627088a19c586303e14 --- /dev/null +++ b/Dreamyvibes artstyle SDXL - Trigger with dreamyvibes artstyle.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a25fddf841fc61e7c31d7cf2eb0b627604ce5c431854381966fb4390e0566c5 +size 456484948 diff --git a/EldritchImpressionismXL1.5.civitai.info b/EldritchImpressionismXL1.5.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..d30ac118ded2506bda76785eb893412d50ee194e --- /dev/null +++ b/EldritchImpressionismXL1.5.civitai.info @@ -0,0 +1,421 @@ +{ + "id": 360775, + "modelId": 312341, + "name": "v1.5", + "createdAt": "2024-02-25T20:54:27.579Z", + "updatedAt": "2024-02-25T21:22:02.908Z", + "status": "Published", + "publishedAt": "2024-02-25T21:22:02.902Z", + "trainedWords": [ + "impressionist painting" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

more reliable style and more versatile subject matter

", + "stats": { + "downloadCount": 1419, + "ratingCount": 94, + "rating": 5, + "thumbsUpCount": 265 + }, + "model": { + "name": "Eldritch Impressionism oil painting", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 288137, + "sizeKB": 68226.8671875, + "name": "EldritchImpressionismXL1.5.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-25T21:00:52.443Z", + "hashes": { + "AutoV1": "62AA66BB", + "AutoV2": "77FA845838", + "SHA256": "77FA8458383F20B2C714A363152D6A6ADBE03973106BEB37D4288CC16AAE8670", + "CRC32": "6BDD8134", + "BLAKE3": "3928A4D753C21C2FC89B3BECFE802AC1245C3655B98F19B18AEA2F8CF7099CD9", + "AutoV3": "2B11115D510A65C8DEF18ED1C700D9018DF555EF556AAA034F1F748E80C0A02F" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/360775" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9551c561-13eb-4a12-a82e-d6b17aa11f45/width=450/7113129.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UdI;VB$x$zkC~WayR-oLpJj[NdjaR.WXs-f6", + "type": "image", + "metadata": { + "hash": "UdI;VB$x$zkC~WayR-oLpJj[NdjaR.WXs-f6", + "size": 1391977, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 944234937538672, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting color and temperature study of a bale of hay on a grassy field at sunset\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":944234937538672,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting color and temperature study of a bale of hay on a grassy field at sunset\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":944234937538672,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting color and temperature study of a bale of hay on a grassy field at sunset\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":944234937538672,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting color and temperature study of a bale of hay on a grassy field at sunset\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting color and temperature study of a bale of hay on a grassy field at sunset\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[944234937538672,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting color and temperature study of a bale of hay on a grassy field at sunset\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting color and temperature study of a bale of hay on a grassy field at sunset", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4e976e69-fbf7-45ab-9d5e-421647ab99ea/width=450/7113138.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U2E.qc*000~nD402.8-:4n-OIpS$2|ogxAaJ", + "type": "image", + "metadata": { + "hash": "U2E.qc*000~nD402.8-:4n-OIpS$2|ogxAaJ", + "size": 1481092, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 225796104383937, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":225796104383937,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":225796104383937,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":225796104383937,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[225796104383937,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting a museum-goer appreciating Monet's paintings of Rouen cathedral in an ornate frame on the museum wall", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2e1203b2-5cb8-436d-b225-69a3b5fb22d2/width=450/7113127.jpeg", + "nsfw": "Soft", + "width": 832, + "height": 1216, + "hash": "U47-c-00Aw-U~qMxXTay4nx]-oRk4:t8={M|", + "type": "image", + "metadata": { + "hash": "U47-c-00Aw-U~qMxXTay4nx]-oRk4:t8={M|", + "size": 1101178, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 837481723023778, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":837481723023778,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":837481723023778,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":837481723023778,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting dark moody minimal portrait of a model emerging from the shadows\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[837481723023778,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting dark moody minimal portrait of a model emerging from the shadows\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting dark moody minimal portrait of a model emerging from the shadows", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2d0a3ae9-295f-4d64-8d61-5afb03a4cf13/width=450/7113185.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U4AA2TNx9[,-01IU9GE200~V~BNH}@E2X.-o", + "type": "image", + "metadata": { + "hash": "U4AA2TNx9[,-01IU9GE200~V~BNH}@E2X.-o", + "size": 1345466, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 128499247576269, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":128499247576269,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":128499247576269,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":128499247576269,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":5.74,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",5.74,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[128499247576269,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist paintin gof a tall graceful flamenco dancer with flowing swirling skirt in a darkened room dramatic light and flamenco band in the background", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7ab26000-3a95-43b4-87fb-5086e07100d9/width=450/7130627.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U8DS{.Dj4.%f?^M{M{WC0LS#-VRkROnj%2s+", + "type": "image", + "metadata": { + "hash": "U8DS{.Dj4.%f?^M{M{WC0LS#-VRkROnj%2s+", + "size": 1499097, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 580631300581076, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting of an adorable sugar glider in the upper branches of its forest\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":580631300581076,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of an adorable sugar glider in the upper branches of its forest\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":580631300581076,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of an adorable sugar glider in the upper branches of its forest\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":580631300581076,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of an adorable sugar glider in the upper branches of its forest\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting of an adorable sugar glider in the upper branches of its forest\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[580631300581076,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting of an adorable sugar glider in the upper branches of its forest\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting of an adorable sugar glider in the upper branches of its forest", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/73cb72d6-393c-46c1-8fb8-abc4dafe228f/width=450/7113131.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U6Iqoo8IPq.84T[CD#E200?YQ.Dj9KRj-VS5", + "type": "image", + "metadata": { + "hash": "U6Iqoo8IPq.84T[CD#E200?YQ.Dj9KRj-VS5", + "size": 1496093, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 1069373281634794, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":1069373281634794,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1069373281634794,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1069373281634794,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[1069373281634794,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting of a slice of three layer cake on a white plate on a colorful tablecloth with a fork", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/295a8ab7-b037-4f31-8166-be14b4e2b0ca/width=450/7113144.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UUI4-C?ExuNc~p%0xZofXnNGM|t6o~NGNGaf", + "type": "image", + "metadata": { + "hash": "UUI4-C?ExuNc~p%0xZofXnNGM|t6o~NGNGaf", + "size": 1435553, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 565215480908649, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":565215480908649,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":565215480908649,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":565215480908649,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[565215480908649,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting portrait of superman in flight above metropolis city late afternoon aerial view majestic pose", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/94364f50-54c0-4056-b413-fdb83158b8b7/width=450/7113132.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UDG+R2?t*J9w?^D+E3$*IVxZIBxt-.NbjFoJ", + "type": "image", + "metadata": { + "hash": "UDG+R2?t*J9w?^D+E3$*IVxZIBxt-.NbjFoJ", + "size": 1603462, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 553808191988975, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":553808191988975,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":553808191988975,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":553808191988975,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[553808191988975,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting portrait of a hipster alpaca wearing sunglasses (pointillist style:0.45)", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a137d621-ace3-435e-bb54-581325db9243/width=450/7113140.jpeg", + "nsfw": "Soft", + "width": 832, + "height": 1216, + "hash": "U9Bq6f~o014p#O--R:D,?s$}RQI]~VxZIpNI", + "type": "image", + "metadata": { + "hash": "U9Bq6f~o014p#O--R:D,?s$}RQI]~VxZIpNI", + "size": 1494639, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 975838319218822, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":975838319218822,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":975838319218822,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":975838319218822,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[975838319218822,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting of a beautiful biracial woman submerged in a forest pool with a tall waterfall and sun rays filtering through the leaves", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/95353576-222e-4e5a-98bf-738d2192902b/width=450/7113136.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UGI#iWk?o0xa4:.8t7M|yZ-nNGM|%Loeoej?", + "type": "image", + "metadata": { + "hash": "UGI#iWk?o0xa4:.8t7M|yZ-nNGM|%Loeoej?", + "size": 1433033, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 275128704899262, + "vaes": [], + "Model": "sd_xl_base_1.0_0.9vae", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":275128704899262,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":856,\"height\":1280,\"batch_size\":2},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":275128704899262,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"impressionism1.5/impressionism\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":275128704899262,\"steps\":28,\"cfg\":4.5,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"EldritchImpressionismXL1.5.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.5,\"lora_03\":\"None\",\"strength_03\":0.37,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0_0.9vae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":75,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[75],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[856,1280,2],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0_0.9vae.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"EldritchImpressionismXL1.5.safetensors\",1,\"None\",0.5,\"None\",0.37,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":75}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[275128704899262,\"randomize\",28,4.5,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"impressionism1.5/impressionism\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[71,58,0,62,0,\"IMAGE\"],[75,49,0,56,3,\"LATENT\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0_0.9vae.safetensors" + ], + "prompt": "impressionist painting of a sherman tank on a flat winter field with distant explosions old world war scene", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/360775" +} \ No newline at end of file diff --git a/EldritchImpressionismXL1.5.preview.png b/EldritchImpressionismXL1.5.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..c47d802596272595c69b0d32e2eb628746d6aa81 Binary files /dev/null and b/EldritchImpressionismXL1.5.preview.png differ diff --git a/EldritchImpressionismXL1.5.safetensors b/EldritchImpressionismXL1.5.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1c514ca8cc08f1ea9f5dc8f80a05f4574bfdd2d5 --- /dev/null +++ b/EldritchImpressionismXL1.5.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:77fa8458383f20b2c714a363152d6a6adbe03973106beb37d4288cc16aae8670 +size 69864312 diff --git a/Fantasy_Wizard__Witches_SDXL.civitai.info b/Fantasy_Wizard__Witches_SDXL.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..c9f82e3d3c843795641aca0f42374e422d2a10d3 --- /dev/null +++ b/Fantasy_Wizard__Witches_SDXL.civitai.info @@ -0,0 +1,631 @@ +{ + "id": 345857, + "modelId": 308147, + "name": "v1.0", + "createdAt": "2024-02-16T09:02:02.734Z", + "updatedAt": "2024-02-16T09:43:04.734Z", + "status": "Published", + "publishedAt": "2024-02-16T09:43:04.733Z", + "trainedWords": [ + "hkmagic" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1506, + "ratingCount": 113, + "rating": 4.96, + "thumbsUpCount": 253 + }, + "model": { + "name": "Fantasy Wizard & Witches (SDXL)", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 275143, + "sizeKB": 223098.77734375, + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-16T09:06:24.492Z", + "hashes": { + "AutoV1": "717BEACC", + "AutoV2": "FFB96671DC", + "SHA256": "FFB96671DC7713AB70BCC3045E786CAB4DC87D685E6AEA34C619FC47D9643380", + "CRC32": "F59FD16B", + "BLAKE3": "21E2E2BFBFAA7C47CA00F6E8F481727F251370D8854791035B72F62C35E0FA61", + "AutoV3": "F743017EB2CA8D69CE414E9035A24A6FFA237390370BC92EBDC8FF2B9807BC6C" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/345857" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4b96820a-500f-447f-8907-5d2555d2c3a2/width=450/6701275.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U9CZ0WxLDRM|_4?KIBV[S$-sRVMy0JM{b:jZ", + "type": "image", + "metadata": { + "hash": "U9CZ0WxLDRM|_4?KIBV[S$-sRVMy0JM{b:jZ", + "size": 1855341, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 1060962925048188, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":1060962925048188,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a white fantasy Lion wizard wearing magical outfit, casting purple spells, purple spells, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1060962925048188,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a white fantasy Lion wizard wearing magical outfit, casting purple spells, purple spells, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a white fantasy Lion wizard wearing magical outfit, casting purple spells, purple spells, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1060962925048188,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a white fantasy Lion wizard wearing magical outfit, casting purple spells, purple spells, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"}},\"workflow\":{\"last_node_id\":23,\"last_link_id\":38,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[1060962925048188,\"randomize\",30,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"\"]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":11},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":12}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.3,0.27]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[654,330],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[11],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[12],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":[342.2724220969458,46],\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2166,360],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a white fantasy Lion wizard wearing magical outfit, casting purple spells, purple spells, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.7000000000000001,0.7000000000000001]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[11,4,0,11,0,\"MODEL\"],[12,4,1,11,1,\"CLIP\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 30, + "width": 1024, + "height": 1024, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a white fantasy Lion wizard wearing magical outfit, casting purple spells, purple spells, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet", + "additionalResources": [ + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.3, + "strengthClip": 0.27 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.7000000000000001, + "strengthClip": 0.7000000000000001 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0f6879ab-ee1a-4660-a699-38375cc0c915/width=450/6701277.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "U7BM@_EQWTo~_KI[MxIq9~WBM}a#0N?EIWWV", + "type": "image", + "metadata": { + "hash": "U7BM@_EQWTo~_KI[MxIq9~WBM}a#0N?EIWWV", + "size": 1672632, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 1063289497723975, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":1063289497723975,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy witch, wearing magical outfit, holding spellbook in one hand and casting yellow spells with other hand, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1063289497723975,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy witch, wearing magical outfit, holding spellbook in one hand and casting yellow spells with other hand, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy witch, wearing magical outfit, holding spellbook in one hand and casting yellow spells with other hand, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1063289497723975,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy witch, wearing magical outfit, holding spellbook in one hand and casting yellow spells with other hand, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"}},\"workflow\":{\"last_node_id\":23,\"last_link_id\":38,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[1063289497723975,\"randomize\",30,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"\"]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":11},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":12}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.3,0.27]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[654,330],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[11],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[12],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":[342.2724220969458,46],\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.7000000000000001,0.7000000000000001]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2166,360],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a beautiful fantasy witch, wearing magical outfit, holding spellbook in one hand and casting yellow spells with other hand, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[11,4,0,11,0,\"MODEL\"],[12,4,1,11,1,\"CLIP\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 30, + "width": 1024, + "height": 1024, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a beautiful fantasy witch, wearing magical outfit, holding spellbook in one hand and casting yellow spells with other hand, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet", + "additionalResources": [ + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.3, + "strengthClip": 0.27 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.7000000000000001, + "strengthClip": 0.7000000000000001 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5db67708-181b-4443-85e9-933700e88c8e/width=450/6701274.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U3AIfN=z}s]j}]^5Mx#,0zxt0zAD0x0|ELEg", + "type": "image", + "metadata": { + "hash": "U3AIfN=z}s]j}]^5Mx#,0zxt0zAD0x0|ELEg", + "size": 1607984, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 195909533277797, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":195909533277797,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a evil fantasy demon, holding spellbook in one hand and casting red dark spells with other hand, hell in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":195909533277797,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a evil fantasy demon, holding spellbook in one hand and casting red dark spells with other hand, hell in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a evil fantasy demon, holding spellbook in one hand and casting red dark spells with other hand, hell in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":195909533277797,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a evil fantasy demon, holding spellbook in one hand and casting red dark spells with other hand, hell in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.7000000000000001,\"strength_clip\":0.7000000000000001,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"}},\"workflow\":{\"last_node_id\":23,\"last_link_id\":38,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[195909533277797,\"randomize\",30,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"\"]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":11},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":12}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.3,0.27]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[654,330],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[11],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[12],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":[342.2724220969458,46],\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.7000000000000001,0.7000000000000001]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2166,360],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a evil fantasy demon, holding spellbook in one hand and casting red dark spells with other hand, hell in background, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[11,4,0,11,0,\"MODEL\"],[12,4,1,11,1,\"CLIP\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 30, + "width": 1024, + "height": 1024, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a evil fantasy demon, holding spellbook in one hand and casting red dark spells with other hand, hell in background, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet", + "additionalResources": [ + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.3, + "strengthClip": 0.27 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.7000000000000001, + "strengthClip": 0.7000000000000001 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/aee612d2-2958-411c-be04-230ab382f953/width=450/6701273.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "UDCtE*kRNGx[?[j=IUx[9JICMytQ8|afMyWD", + "type": "image", + "metadata": { + "hash": "UDCtE*kRNGx[?[j=IUx[9JICMytQ8|afMyWD", + "size": 1661238, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 396976826972402, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":396976826972402,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy Fairy, holding spellbook in one hand and casting water spells with other hand, fantasy forest in background\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":396976826972402,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy Fairy, holding spellbook in one hand and casting water spells with other hand, fantasy forest in background\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy Fairy, holding spellbook in one hand and casting water spells with other hand, fantasy forest in background\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":396976826972402,\"steps\":30,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a beautiful fantasy Fairy, holding spellbook in one hand and casting water spells with other hand, fantasy forest in background\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.74,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.3,\"strength_clip\":0.27,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"}},\"workflow\":{\"last_node_id\":23,\"last_link_id\":38,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[396976826972402,\"randomize\",30,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet\",\"\"]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":11},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":12}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.3,0.27]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[654,330],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[11],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[12],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":[342.2724220969458,46],\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.74]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2166,360],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a beautiful fantasy Fairy, holding spellbook in one hand and casting water spells with other hand, fantasy forest in background\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[11,4,0,11,0,\"MODEL\"],[12,4,1,11,1,\"CLIP\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 30, + "width": 1024, + "height": 1024, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a beautiful fantasy Fairy, holding spellbook in one hand and casting water spells with other hand, fantasy forest in background, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, helmet", + "additionalResources": [ + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.3, + "strengthClip": 0.27 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.74 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fd376214-2e77-4ee7-9d07-fe382edb1184/width=450/6701272.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UGEMEukq-OXTT{S#E2f+0MxCE2xW~Ar=,.WC", + "type": "image", + "metadata": { + "hash": "UGEMEukq-OXTT{S#E2f+0MxCE2xW~Ar=,.WC", + "size": 1948273, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 444296956535963, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":444296956535963,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Wizard, holding spellbook in one hand and casting fire spells with other hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, (embedding:FastNegativeV2:1), (embedding:verybadimagenegative_v1.3:1) 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":444296956535963,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Wizard, holding spellbook in one hand and casting fire spells with other hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, (embedding:FastNegativeV2:1), (embedding:verybadimagenegative_v1.3:1) 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Wizard, holding spellbook in one hand and casting fire spells with other hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, (embedding:FastNegativeV2:1), (embedding:verybadimagenegative_v1.3:1) 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":444296956535963,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Wizard, holding spellbook in one hand and casting fire spells with other hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, (embedding:FastNegativeV2:1), (embedding:verybadimagenegative_v1.3:1) 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"}},\"workflow\":{\"last_node_id\":23,\"last_link_id\":38,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":{\"0\":342.2724304199219,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.75]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a fantasy Wizard, holding spellbook in one hand and casting fire spells with other hand, fantasy forest in background, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2166,360],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":11},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":12}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"extremely_detailed.safetensors\",0.37,0.35000000000000003]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[654,330],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[11],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[12],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, (embedding:FastNegativeV2:1), (embedding:verybadimagenegative_v1.3:1) 3d\",\"\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[444296956535963,\"randomize\",50,8,\"dpmpp_2m\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[11,4,0,11,0,\"MODEL\"],[12,4,1,11,1,\"CLIP\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 50, + "width": 1024, + "height": 1024, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a fantasy Wizard, holding spellbook in one hand and casting fire spells with other hand, fantasy forest in background, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, nude, nudity, (embedding:FastNegativeV2:1), (embedding:verybadimagenegative_v1.3:1) 3d", + "additionalResources": [ + { + "name": "extremely_detailed.safetensors", + "type": "lora", + "strength": 0.37, + "strengthClip": 0.35 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.75 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/020cff99-1a3c-4d1e-a016-96dd57a6661c/width=450/6701276.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "U5CPJ8M~_4W.EWxt9FRi1CM_EGW.0Jxw9WV@", + "type": "image", + "metadata": { + "hash": "U5CPJ8M~_4W.EWxt9FRi1CM_EGW.0Jxw9WV@", + "size": 1626635, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 118749359120674, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "juggernautXL_v8Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":118749359120674,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a very sexy fantasy succubus, castin purple magic, dungeon in background, HKMagic, nsfw\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":118749359120674,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a very sexy fantasy succubus, castin purple magic, dungeon in background, HKMagic, nsfw\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a very sexy fantasy succubus, castin purple magic, dungeon in background, HKMagic, nsfw\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":118749359120674,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a very sexy fantasy succubus, castin purple magic, dungeon in background, HKMagic, nsfw\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"24\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"juggernautXL_v8Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":24,\"last_link_id\":42,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":{\"0\":342.2724304199219,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.75]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[118749359120674,\"randomize\",50,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":41},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":42}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"extremely_detailed.safetensors\",0.37,0.35000000000000003]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-104,88],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[39],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[40],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"juggernautXL_v8Rundiffusion.safetensors\"]},{\"id\":24,\"type\":\"LoraLoader\",\"pos\":[349,69],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":39},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":40}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[41],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[42],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.45,0.45]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2166,360],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a very sexy fantasy succubus, castin purple magic, dungeon in background, HKMagic, nsfw\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"],[39,4,0,24,0,\"MODEL\"],[40,4,1,24,1,\"CLIP\"],[41,24,0,11,0,\"MODEL\"],[42,24,1,11,1,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 50, + "width": 1024, + "height": 1024, + "models": [ + "juggernautXL_v8Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a very sexy fantasy succubus, castin purple magic, dungeon in background, HKMagic, nsfw, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d", + "additionalResources": [ + { + "name": "extremely_detailed.safetensors", + "type": "lora", + "strength": 0.37, + "strengthClip": 0.35 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.75 + }, + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.45, + "strengthClip": 0.45 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/aa0e35eb-38c5-4da9-92ba-d41f7799c34d/width=450/6701278.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAD]xX%Mi^VsUHxubcRk,Bj[s:t7I=WVRjV@", + "type": "image", + "metadata": { + "hash": "UAD]xX%Mi^VsUHxubcRk,Bj[s:t7I=WVRjV@", + "size": 1755794, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 429540175834406, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sdxlUnstableDiffusers_v11Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":429540175834406,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a ghost, casting thunder magic spells, graveyard in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":429540175834406,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a ghost, casting thunder magic spells, graveyard in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a ghost, casting thunder magic spells, graveyard in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":429540175834406,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a ghost, casting thunder magic spells, graveyard in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"24\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":24,\"last_link_id\":42,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":{\"0\":342.2724304199219,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.75]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[429540175834406,\"randomize\",50,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":41},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":42}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"extremely_detailed.safetensors\",0.37,0.35000000000000003]},{\"id\":24,\"type\":\"LoraLoader\",\"pos\":[349,69],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":39},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":40}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[41],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[42],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.45,0.45]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-104,88],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[39],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[40],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a ghost, casting thunder magic spells, graveyard in background, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2297,522],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"],[39,4,0,24,0,\"MODEL\"],[40,4,1,24,1,\"CLIP\"],[41,24,0,11,0,\"MODEL\"],[42,24,1,11,1,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 50, + "width": 1024, + "height": 1024, + "models": [ + "sdxlUnstableDiffusers_v11Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a ghost, casting thunder magic spells, graveyard in background, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d", + "additionalResources": [ + { + "name": "extremely_detailed.safetensors", + "type": "lora", + "strength": 0.37, + "strengthClip": 0.35 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.75 + }, + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.45, + "strengthClip": 0.45 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/95ad12fe-b491-468b-abea-fa7991a317d5/width=450/6701279.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UIFY.VI9Z~H?lVIURPt6?GITIrxv4-oes.tR", + "type": "image", + "metadata": { + "hash": "UIFY.VI9Z~H?lVIURPt6?GITIrxv4-oes.tR", + "size": 1793779, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 679514775800988, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sdxlUnstableDiffusers_v11Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":679514775800988,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy lightning dragon wearing robe, casting lightning spells, fantasy city in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":679514775800988,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy lightning dragon wearing robe, casting lightning spells, fantasy city in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy lightning dragon wearing robe, casting lightning spells, fantasy city in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":679514775800988,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy lightning dragon wearing robe, casting lightning spells, fantasy city in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":4},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"24\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":24,\"last_link_id\":42,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,4]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":{\"0\":342.2724304199219,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.75]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[679514775800988,\"randomize\",50,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":41},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":42}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"extremely_detailed.safetensors\",0.37,0.35000000000000003]},{\"id\":24,\"type\":\"LoraLoader\",\"pos\":[349,69],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":39},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":40}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[41],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[42],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.45,0.45]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-104,88],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[39],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[40],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2297,521],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a fantasy lightning dragon wearing robe, casting lightning spells, fantasy city in background, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"],[39,4,0,24,0,\"MODEL\"],[40,4,1,24,1,\"CLIP\"],[41,24,0,11,0,\"MODEL\"],[42,24,1,11,1,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 50, + "width": 1024, + "height": 1024, + "models": [ + "sdxlUnstableDiffusers_v11Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a fantasy lightning dragon wearing robe, casting lightning spells, fantasy city in background, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d", + "additionalResources": [ + { + "name": "extremely_detailed.safetensors", + "type": "lora", + "strength": 0.37, + "strengthClip": 0.35 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.75 + }, + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.45, + "strengthClip": 0.45 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f8134079-44da-417a-a30b-83e961c79c50/width=450/6701280.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAAnStyBaef*?[bvDjx@%fRQIUbH9HVaM|bH", + "type": "image", + "metadata": { + "hash": "UAAnStyBaef*?[bvDjx@%fRQIUbH9HVaM|bH", + "size": 1726398, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 755633862675436, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sdxlUnstableDiffusers_v11Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":755633862675436,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy leaf dragon wearing robe, casting green magic spells with one hand and holdin magic spellbook in one hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":755633862675436,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy leaf dragon wearing robe, casting green magic spells with one hand and holdin magic spellbook in one hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy leaf dragon wearing robe, casting green magic spells with one hand and holdin magic spellbook in one hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":755633862675436,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy leaf dragon wearing robe, casting green magic spells with one hand and holdin magic spellbook in one hand, fantasy forest in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"24\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":24,\"last_link_id\":42,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":{\"0\":342.2724304199219,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.75]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[755633862675436,\"randomize\",50,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":41},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":42}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"extremely_detailed.safetensors\",0.37,0.35000000000000003]},{\"id\":24,\"type\":\"LoraLoader\",\"pos\":[349,69],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":39},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":40}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[41],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[42],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.45,0.45]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-104,88],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[39],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[40],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2297,521],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a fantasy leaf dragon wearing robe, casting green magic spells with one hand and holdin magic spellbook in one hand, fantasy forest in background, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,8]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"],[39,4,0,24,0,\"MODEL\"],[40,4,1,24,1,\"CLIP\"],[41,24,0,11,0,\"MODEL\"],[42,24,1,11,1,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 50, + "width": 1024, + "height": 1024, + "models": [ + "sdxlUnstableDiffusers_v11Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a fantasy leaf dragon wearing robe, casting green magic spells with one hand and holdin magic spellbook in one hand, fantasy forest in background, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d", + "additionalResources": [ + { + "name": "extremely_detailed.safetensors", + "type": "lora", + "strength": 0.37, + "strengthClip": 0.35 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.75 + }, + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.45, + "strengthClip": 0.45 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3ee5c4f2-3fc8-403c-8162-bc6a8d0c88bf/width=450/6701281.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U6Bo:go+~C%L-FojH@RP5--:t7Rk0JD#9Yj?", + "type": "image", + "metadata": { + "hash": "U6Bo:go+~C%L-FojH@RP5--:t7Rk0JD#9Yj?", + "size": 1556959, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 472736857648080, + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sdxlUnstableDiffusers_v11Rundiffusion", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":472736857648080,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Cat wearing robe, casting black magic spells with one hand and holdin magic spellbook in one hand, fantasy tavern in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":472736857648080,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Cat wearing robe, casting black magic spells with one hand and holdin magic spellbook in one hand, fantasy tavern in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"10\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"},\"11\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"12\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"20\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Cat wearing robe, casting black magic spells with one hand and holdin magic spellbook in one hand, fantasy tavern in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"21\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"23\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":472736857648080,\"steps\":50,\"cfg\":8,\"sampler_name\":\"dpmpp_2m\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"a fullbody shot of a fantasy Cat wearing robe, casting black magic spells with one hand and holdin magic spellbook in one hand, fantasy tavern in background, HKMagic\",\"text_l\":\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"negative\":{\"inputs\":{\"width\":1024,\"height\":1024,\"crop_w\":0,\"crop_h\":0,\"target_width\":1024,\"target_height\":1024,\"text_g\":\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"text_l\":\"\",\"clip\":{\"inputs\":{\"lora_name\":\"Fantasy_Wizard__Witches_SDXL.safetensors\",\"strength_model\":0.75,\"strength_clip\":0.75,\"model\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"extremely_detailed.safetensors\",\"strength_model\":0.37,\"strength_clip\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncodeSDXL\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":8},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"sdxl_vae.safetensors\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"24\":{\"inputs\":{\"lora_name\":\"add-detail-xl.safetensors\",\"strength_model\":0.45,\"strength_clip\":0.45,\"model\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":24,\"last_link_id\":42,\"nodes\":[{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[1270,-186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":0,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"HD, masterpiece, best quality, hyper detailed, ultra detailed, hailoknight, 1woman wearing white armor, holding blue weapon infront of her, cape, simple glowing background \"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[1706,-195],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":1,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":null}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young\"]},{\"id\":10,\"type\":\"VAELoader\",\"pos\":[1707,129],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"sdxl_vae.safetensors\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[2125,226],\"size\":{\"0\":342.2724304199219,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[38],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":12,\"type\":\"LoraLoader\",\"pos\":[1098,63],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":13},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[15],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[29,30],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Fantasy_Wizard__Witches_SDXL.safetensors\",0.75,0.75]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[1790,258],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":15},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":32},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":33},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[472736857648080,\"randomize\",50,8,\"dpmpp_2m\",\"karras\",1]},{\"id\":11,\"type\":\"LoraLoader\",\"pos\":[766,66],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":41},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":42}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[13],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[14],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"extremely_detailed.safetensors\",0.37,0.35000000000000003]},{\"id\":24,\"type\":\"LoraLoader\",\"pos\":[349,69],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":39},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":40}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[41],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[42],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"add-detail-xl.safetensors\",0.45,0.45]},{\"id\":21,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[1286,579],\"size\":{\"0\":412.9512939453125,\"1\":400.0898132324219},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":30}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[33],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d\",\"\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-104,88],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[39],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[40],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sdxlUnstableDiffusers_v11Rundiffusion.safetensors\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[1347,394],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,8]},{\"id\":23,\"type\":\"SaveImage\",\"pos\":[2297,521],\"size\":{\"0\":545.5598754882812,\"1\":513.970703125},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":38}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":20,\"type\":\"CLIPTextEncodeSDXL\",\"pos\":[782,590],\"size\":{\"0\":462.9512939453125,\"1\":393.0898132324219},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":29}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[32],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncodeSDXL\"},\"widgets_values\":[1024,1024,0,0,1024,1024,\"a fullbody shot of a fantasy Cat wearing robe, casting black magic spells with one hand and holdin magic spellbook in one hand, fantasy tavern in background, HKMagic\",\"HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[7,3,0,8,0,\"LATENT\"],[10,10,0,8,1,\"VAE\"],[13,11,0,12,0,\"MODEL\"],[14,11,1,12,1,\"CLIP\"],[15,12,0,3,0,\"MODEL\"],[29,12,1,20,0,\"CLIP\"],[30,12,1,21,0,\"CLIP\"],[32,20,0,3,1,\"CONDITIONING\"],[33,21,0,3,2,\"CONDITIONING\"],[38,8,0,23,0,\"IMAGE\"],[39,4,0,24,0,\"MODEL\"],[40,4,1,24,1,\"CLIP\"],[41,24,0,11,0,\"MODEL\"],[42,24,1,11,1,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 50, + "width": 1024, + "height": 1024, + "models": [ + "sdxlUnstableDiffusers_v11Rundiffusion.safetensors" + ], + "prompt": "a fullbody shot of a fantasy Cat wearing robe, casting black magic spells with one hand and holdin magic spellbook in one hand, fantasy tavern in background, HKMagic, HD, masterpiece, best quality, hyper detailed, ultra detailed, super realistic", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 8, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, low quality, worst quality, bad compostion, poor, low effort, child, childish, young, (embedding:FastNegativeV2:0.5), (embedding:verybadimagenegative_v1.3:0.6) (embedding:JuggernautNegative-neg:1), 3d", + "additionalResources": [ + { + "name": "extremely_detailed.safetensors", + "type": "lora", + "strength": 0.37, + "strengthClip": 0.35 + }, + { + "name": "Fantasy_Wizard__Witches_SDXL.safetensors", + "type": "lora", + "strength": 0.75, + "strengthClip": 0.75 + }, + { + "name": "add-detail-xl.safetensors", + "type": "lora", + "strength": 0.45, + "strengthClip": 0.45 + } + ] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/345857" +} \ No newline at end of file diff --git a/Fantasy_Wizard__Witches_SDXL.preview.png b/Fantasy_Wizard__Witches_SDXL.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..6368d68353072f176efc33cf73a8827f22ed0acd Binary files /dev/null and b/Fantasy_Wizard__Witches_SDXL.preview.png differ diff --git a/Fantasy_Wizard__Witches_SDXL.safetensors b/Fantasy_Wizard__Witches_SDXL.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3b89085f20ba9e7d7c7890b8f8369e729c7f741d --- /dev/null +++ b/Fantasy_Wizard__Witches_SDXL.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ffb96671dc7713ab70bcc3045e786cab4dc87d685e6aea34c619fc47d9643380 +size 228453148 diff --git a/Furry Enhancer V2.84.civitai.info b/Furry Enhancer V2.84.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..5bbe7b27f14b7ce28d1d0157631ca8854804ca01 --- /dev/null +++ b/Furry Enhancer V2.84.civitai.info @@ -0,0 +1,376 @@ +{ + "id": 357604, + "modelId": 310964, + "name": "v2.84", + "createdAt": "2024-02-23T20:47:42.754Z", + "updatedAt": "2024-03-03T10:34:52.255Z", + "status": "Published", + "publishedAt": "2024-02-23T21:49:16.123Z", + "trainedWords": [ + "anthro", + "furry", + "male", + "female", + "photo", + "digital art", + "closeup", + "POV", + "from behind", + "whitewolf", + "hellhound", + "wet fur", + "shower", + "pawpads" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

added around 200+ more training images (around 800 total) and did a complete retain

more detailed

more specim

better nsfw part (man) wip

\"emotions\" ( wip)

I recommend using the Model Ratatoskr, Bifr\u00f6st Project, FenrisXL ,or Mj\u00f6lnir

", + "stats": { + "downloadCount": 581, + "ratingCount": 26, + "rating": 5, + "thumbsUpCount": 65 + }, + "model": { + "name": "Furry Enhancer (SDXL)", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 285343, + "sizeKB": 994786.5390625, + "name": "Furry Enhancer V2.84.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-23T20:56:15.999Z", + "hashes": { + "AutoV1": "701A05E1", + "AutoV2": "39F4AFDE3E", + "SHA256": "39F4AFDE3E8FB1C4B4843179CA3056D523EB1CD843B491BBD1178620C016C3AA", + "CRC32": "111BEE32", + "BLAKE3": "F0FEE964A8D08A88AC5389E5349CBAB0405618E8789956E0BCE3E858A6C846C8", + "AutoV3": "5FFB715C57699C0B1341945F23C8CCFF5B7242104B36E516D8813DAA6389066B" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/357604" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a2c92b08-512b-4fa9-a770-fdc32a126491/width=450/7023606.jpeg", + "nsfw": "None", + "width": 2016, + "height": 2592, + "hash": "UIHB#qt8_3V@~pRkIo%M_2DiSf-;o}IUn#o#", + "type": "image", + "metadata": { + "hash": "UIHB#qt8_3V@~pRkIo%M_2DiSf-;o}IUn#o#", + "size": 4866696, + "width": 2016, + "height": 2592 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a6933250-ff36-4312-bad9-ff3fd8ab2767/width=450/7023388.jpeg", + "nsfw": "None", + "width": 2432, + "height": 2432, + "hash": "UJF$Fe_N_4%M_4%MofNGo}t6jFjF%MxuRjoe", + "type": "image", + "metadata": { + "hash": "UJF$Fe_N_4%M_4%MofNGo}t6jFjF%MxuRjoe", + "size": 8250248, + "width": 2432, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 633806970333448, + "vaes": [ + "FenrisXL.vae.pt" + ], + "Model": "RatatoskrV3.7", + "comfy": "{\"prompt\":{\"1\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"2\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"},\"3\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"4\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"5\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"6\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"7\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"8\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"},\"9\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"},\"11\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"12\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"},\"13\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"14\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"},\"15\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"16\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":30,\"nodes\":[{\"id\":6,\"type\":\"EmptyLatentImage\",\"pos\":[100,358],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1216,1216,1]},{\"id\":7,\"type\":\"VAEDecode\",\"pos\":[4261.799987792969,130],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":14,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":22},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[8],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":10,\"type\":\"VAEDecodeTiled\",\"pos\":[2260,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":11},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":12}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[13],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecodeTiled\"},\"widgets_values\":[512]},{\"id\":11,\"type\":\"UpscaleModelLoader\",\"pos\":[100,782],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"UPSCALE_MODEL\",\"type\":\"UPSCALE_MODEL\",\"links\":[14],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"UpscaleModelLoader\"},\"widgets_values\":[\"4x-UltraSharp.pth\"]},{\"id\":12,\"type\":\"ImageUpscaleWithModel\",\"pos\":[2675,130],\"size\":{\"0\":241.79998779296875,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"upscale_model\",\"type\":\"UPSCALE_MODEL\",\"link\":14},{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":13}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[15],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageUpscaleWithModel\"}},{\"id\":13,\"type\":\"ImageScale\",\"pos\":[3016.7999877929688,130],\"size\":{\"0\":315,\"1\":130},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":15}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[16],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageScale\"},\"widgets_values\":[\"nearest-exact\",2432,2432,\"disabled\"]},{\"id\":14,\"type\":\"VAEEncodeTiled\",\"pos\":[3431.7999877929688,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":16},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":17}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[21],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEEncodeTiled\"},\"widgets_values\":[512]},{\"id\":8,\"type\":\"VAELoader\",\"pos\":[100,594],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10,12,17],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"FenrisXL.vae.pt\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[4571.799987792969,130],\"size\":{\"0\":315,\"1\":270},\"flags\":{},\"order\":15,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":8}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":15,\"type\":\"KSampler\",\"pos\":[3846.7999877929688,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":13,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":30},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":19},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":20},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":21}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[22],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[941474384575060,\"fixed\",60,5,\"dpmpp_3m_sde_gpu\",\"exponential\",0.2]},{\"id\":5,\"type\":\"KSampler\",\"pos\":[1845,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":29},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":5},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":7}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[11],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[633806970333448,\"fixed\",30,3,\"dpmpp_3m_sde_gpu\",\"exponential\",1]},{\"id\":2,\"type\":\"CLIPSetLastLayer\",\"pos\":[515,130],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":1}],\"outputs\":[{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[24],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPSetLastLayer\"},\"widgets_values\":[-2]},{\"id\":1,\"type\":\"CheckpointLoaderSimple\",\"pos\":[100,130],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[23],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[1],\"shape\":3},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"RatatoskrV3.7.safetensors\"]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[868,117],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":23},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":24}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[29,30],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[25,28],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Furry Enhancer V2.84.safetensors\",0.9,0.9]},{\"id\":4,\"type\":\"CLIPTextEncode\",\"pos\":[1345,460],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":28}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,20],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"bad quality, watermark\"]},{\"id\":3,\"type\":\"CLIPTextEncode\",\"pos\":[1345,130],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":25}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[5,19],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes \"]}],\"links\":[[1,1,1,2,0,\"CLIP\"],[5,3,0,5,1,\"CONDITIONING\"],[6,4,0,5,2,\"CONDITIONING\"],[7,6,0,5,3,\"LATENT\"],[8,7,0,9,0,\"IMAGE\"],[10,8,0,7,1,\"VAE\"],[11,5,0,10,0,\"LATENT\"],[12,8,0,10,1,\"VAE\"],[13,10,0,12,1,\"IMAGE\"],[14,11,0,12,0,\"UPSCALE_MODEL\"],[15,12,0,13,0,\"IMAGE\"],[16,13,0,14,0,\"IMAGE\"],[17,8,0,14,1,\"VAE\"],[19,3,0,15,1,\"CONDITIONING\"],[20,4,0,15,2,\"CONDITIONING\"],[21,14,0,15,3,\"LATENT\"],[22,15,0,7,0,\"LATENT\"],[23,1,0,16,0,\"MODEL\"],[24,2,0,16,1,\"CLIP\"],[25,16,1,3,0,\"CLIP\"],[28,16,1,4,0,\"CLIP\"],[29,16,0,5,0,\"MODEL\"],[30,16,0,15,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"5\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"15\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 30, + "width": 1216, + "height": 1216, + "models": [ + "RatatoskrV3.7.safetensors" + ], + "prompt": "closeup, portrait photo of an anthro wolf viking male , Nordic knotwork tattoos, wearing a helmet with Nordic runes ", + "denoise": 1, + "sampler": "dpmpp_3m_sde_gpu", + "cfgScale": 3, + "modelIds": [], + "scheduler": "exponential", + "upscalers": [ + "4x-UltraSharp.pth" + ], + "versionIds": [], + "controlNets": [], + "negativePrompt": "bad quality, watermark", + "additionalResources": [ + { + "name": "Furry Enhancer V2.84.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e28f93bb-fe67-4e06-91a8-dd09c6bd77e2/width=450/7023315.jpeg", + "nsfw": "None", + "width": 2016, + "height": 2592, + "hash": "UDEME}Dh00.9-=NHIUxu_4E19E-p_3ROIUkX", + "type": "image", + "metadata": { + "hash": "UDEME}Dh00.9-=NHIUxu_4E19E-p_3ROIUkX", + "size": 4623628, + "width": 2016, + "height": 2592 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d9273644-e6f4-4a60-9242-c9ff0f9cd73b/width=450/7023377.jpeg", + "nsfw": "None", + "width": 2432, + "height": 2432, + "hash": "UNGR@*%f~V%2-oSNRibaxtNGITIoxuocM{WA", + "type": "image", + "metadata": { + "hash": "UNGR@*%f~V%2-oSNRibaxtNGITIoxuocM{WA", + "size": 7774679, + "width": 2432, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 633806970333448, + "vaes": [ + "FenrisXL.vae.pt" + ], + "Model": "RatatoskrV3.7", + "comfy": "{\"prompt\":{\"1\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"2\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"},\"3\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"4\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"5\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"6\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"7\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"8\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"},\"9\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"},\"11\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"12\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"},\"13\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"14\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"},\"15\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"16\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":30,\"nodes\":[{\"id\":6,\"type\":\"EmptyLatentImage\",\"pos\":[100,358],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1216,1216,1]},{\"id\":7,\"type\":\"VAEDecode\",\"pos\":[4261.799987792969,130],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":14,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":22},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[8],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":10,\"type\":\"VAEDecodeTiled\",\"pos\":[2260,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":11},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":12}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[13],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecodeTiled\"},\"widgets_values\":[512]},{\"id\":11,\"type\":\"UpscaleModelLoader\",\"pos\":[100,782],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"UPSCALE_MODEL\",\"type\":\"UPSCALE_MODEL\",\"links\":[14],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"UpscaleModelLoader\"},\"widgets_values\":[\"4x-UltraSharp.pth\"]},{\"id\":12,\"type\":\"ImageUpscaleWithModel\",\"pos\":[2675,130],\"size\":{\"0\":241.79998779296875,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"upscale_model\",\"type\":\"UPSCALE_MODEL\",\"link\":14},{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":13}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[15],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageUpscaleWithModel\"}},{\"id\":13,\"type\":\"ImageScale\",\"pos\":[3016.7999877929688,130],\"size\":{\"0\":315,\"1\":130},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":15}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[16],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageScale\"},\"widgets_values\":[\"nearest-exact\",2432,2432,\"disabled\"]},{\"id\":14,\"type\":\"VAEEncodeTiled\",\"pos\":[3431.7999877929688,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":16},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":17}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[21],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEEncodeTiled\"},\"widgets_values\":[512]},{\"id\":8,\"type\":\"VAELoader\",\"pos\":[100,594],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10,12,17],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"FenrisXL.vae.pt\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[4571.799987792969,130],\"size\":{\"0\":315,\"1\":270},\"flags\":{},\"order\":15,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":8}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":15,\"type\":\"KSampler\",\"pos\":[3846.7999877929688,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":13,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":30},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":19},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":20},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":21}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[22],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[941474384575060,\"fixed\",60,5,\"dpmpp_3m_sde_gpu\",\"exponential\",0.2]},{\"id\":4,\"type\":\"CLIPTextEncode\",\"pos\":[1345,460],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":28}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,20],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic\"]},{\"id\":5,\"type\":\"KSampler\",\"pos\":[1845,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":29},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":5},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":7}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[11],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[633806970333448,\"fixed\",30,3,\"dpmpp_3m_sde_gpu\",\"exponential\",1]},{\"id\":2,\"type\":\"CLIPSetLastLayer\",\"pos\":[515,130],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":1}],\"outputs\":[{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[24],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPSetLastLayer\"},\"widgets_values\":[-2]},{\"id\":1,\"type\":\"CheckpointLoaderSimple\",\"pos\":[100,130],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[23],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[1],\"shape\":3},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"RatatoskrV3.7.safetensors\"]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[868,117],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":23},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":24}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[29,30],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[25,28],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Furry Enhancer V2.84.safetensors\",0.9,0.9]},{\"id\":3,\"type\":\"CLIPTextEncode\",\"pos\":[1345,130],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":25}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[5,19],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light \"]}],\"links\":[[1,1,1,2,0,\"CLIP\"],[5,3,0,5,1,\"CONDITIONING\"],[6,4,0,5,2,\"CONDITIONING\"],[7,6,0,5,3,\"LATENT\"],[8,7,0,9,0,\"IMAGE\"],[10,8,0,7,1,\"VAE\"],[11,5,0,10,0,\"LATENT\"],[12,8,0,10,1,\"VAE\"],[13,10,0,12,1,\"IMAGE\"],[14,11,0,12,0,\"UPSCALE_MODEL\"],[15,12,0,13,0,\"IMAGE\"],[16,13,0,14,0,\"IMAGE\"],[17,8,0,14,1,\"VAE\"],[19,3,0,15,1,\"CONDITIONING\"],[20,4,0,15,2,\"CONDITIONING\"],[21,14,0,15,3,\"LATENT\"],[22,15,0,7,0,\"LATENT\"],[23,1,0,16,0,\"MODEL\"],[24,2,0,16,1,\"CLIP\"],[25,16,1,3,0,\"CLIP\"],[28,16,1,4,0,\"CLIP\"],[29,16,0,5,0,\"MODEL\"],[30,16,0,15,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"5\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"15\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 30, + "width": 1216, + "height": 1216, + "models": [ + "RatatoskrV3.7.safetensors" + ], + "prompt": "Closeup Photo face of an anthro Labrador on a motorcycle, rocker outfit, natural light ", + "denoise": 1, + "sampler": "dpmpp_3m_sde_gpu", + "cfgScale": 3, + "modelIds": [], + "scheduler": "exponential", + "upscalers": [ + "4x-UltraSharp.pth" + ], + "versionIds": [], + "controlNets": [], + "negativePrompt": "anime, 3D render, painting, cartoon, drawing, (makeup), bad anatomy, unrealistic", + "additionalResources": [ + { + "name": "Furry Enhancer V2.84.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3b6b43ac-33a1-4b4c-923d-7fc1fd7540ea/width=450/7023630.jpeg", + "nsfw": "None", + "width": 2016, + "height": 2592, + "hash": "U6Dlcm.60esQxWE9AGE3%M4q9b%2Ezr;-:~V", + "type": "image", + "metadata": { + "hash": "U6Dlcm.60esQxWE9AGE3%M4q9b%2Ezr;-:~V", + "size": 5684254, + "width": 2016, + "height": 2592 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4bb43cd3-d58d-4d51-8d36-ab3266038b73/width=450/7023319.jpeg", + "nsfw": "None", + "width": 2016, + "height": 2592, + "hash": "UHF#wD~W%gozS}N{s.bIoJRPt7xt9aoMe.xa", + "type": "image", + "metadata": { + "hash": "UHF#wD~W%gozS}N{s.bIoJRPt7xt9aoMe.xa", + "size": 5253243, + "width": 2016, + "height": 2592 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1f251f06-82db-4579-ad73-538b08c87fe4/width=450/7023385.jpeg", + "nsfw": "None", + "width": 2432, + "height": 2432, + "hash": "UGGl6Z~padxt_3t7D%xuD%RkRkRj-pxaRjxa", + "type": "image", + "metadata": { + "hash": "UGGl6Z~padxt_3t7D%xuD%RkRkRj-pxaRjxa", + "size": 7494303, + "width": 2432, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 633806970333448, + "vaes": [ + "FenrisXL.vae.pt" + ], + "Model": "RatatoskrV3.7", + "comfy": "{\"prompt\":{\"1\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"2\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"},\"3\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"4\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"5\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"6\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"7\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"8\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"},\"9\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"},\"11\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"12\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"},\"13\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"14\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"},\"15\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"16\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":30,\"nodes\":[{\"id\":6,\"type\":\"EmptyLatentImage\",\"pos\":[100,358],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1216,1216,1]},{\"id\":7,\"type\":\"VAEDecode\",\"pos\":[4261.799987792969,130],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":14,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":22},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[8],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":10,\"type\":\"VAEDecodeTiled\",\"pos\":[2260,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":11},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":12}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[13],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecodeTiled\"},\"widgets_values\":[512]},{\"id\":11,\"type\":\"UpscaleModelLoader\",\"pos\":[100,782],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"UPSCALE_MODEL\",\"type\":\"UPSCALE_MODEL\",\"links\":[14],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"UpscaleModelLoader\"},\"widgets_values\":[\"4x-UltraSharp.pth\"]},{\"id\":12,\"type\":\"ImageUpscaleWithModel\",\"pos\":[2675,130],\"size\":{\"0\":241.79998779296875,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"upscale_model\",\"type\":\"UPSCALE_MODEL\",\"link\":14},{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":13}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[15],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageUpscaleWithModel\"}},{\"id\":13,\"type\":\"ImageScale\",\"pos\":[3016.7999877929688,130],\"size\":{\"0\":315,\"1\":130},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":15}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[16],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageScale\"},\"widgets_values\":[\"nearest-exact\",2432,2432,\"disabled\"]},{\"id\":14,\"type\":\"VAEEncodeTiled\",\"pos\":[3431.7999877929688,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":16},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":17}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[21],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEEncodeTiled\"},\"widgets_values\":[512]},{\"id\":8,\"type\":\"VAELoader\",\"pos\":[100,594],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10,12,17],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"FenrisXL.vae.pt\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[4571.799987792969,130],\"size\":{\"0\":315,\"1\":270},\"flags\":{},\"order\":15,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":8}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":15,\"type\":\"KSampler\",\"pos\":[3846.7999877929688,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":13,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":30},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":19},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":20},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":21}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[22],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[941474384575060,\"fixed\",60,5,\"dpmpp_3m_sde_gpu\",\"exponential\",0.2]},{\"id\":5,\"type\":\"KSampler\",\"pos\":[1845,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":29},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":5},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":7}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[11],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[633806970333448,\"fixed\",30,3,\"dpmpp_3m_sde_gpu\",\"exponential\",1]},{\"id\":2,\"type\":\"CLIPSetLastLayer\",\"pos\":[515,130],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":1}],\"outputs\":[{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[24],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPSetLastLayer\"},\"widgets_values\":[-2]},{\"id\":1,\"type\":\"CheckpointLoaderSimple\",\"pos\":[100,130],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[23],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[1],\"shape\":3},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"RatatoskrV3.7.safetensors\"]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[868,117],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":23},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":24}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[29,30],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[25,28],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Furry Enhancer V2.84.safetensors\",0.9,0.9]},{\"id\":4,\"type\":\"CLIPTextEncode\",\"pos\":[1345,460],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":28}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,20],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"bad quality, watermark\"]},{\"id\":3,\"type\":\"CLIPTextEncode\",\"pos\":[1345,130],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":25}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[5,19],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light\"]}],\"links\":[[1,1,1,2,0,\"CLIP\"],[5,3,0,5,1,\"CONDITIONING\"],[6,4,0,5,2,\"CONDITIONING\"],[7,6,0,5,3,\"LATENT\"],[8,7,0,9,0,\"IMAGE\"],[10,8,0,7,1,\"VAE\"],[11,5,0,10,0,\"LATENT\"],[12,8,0,10,1,\"VAE\"],[13,10,0,12,1,\"IMAGE\"],[14,11,0,12,0,\"UPSCALE_MODEL\"],[15,12,0,13,0,\"IMAGE\"],[16,13,0,14,0,\"IMAGE\"],[17,8,0,14,1,\"VAE\"],[19,3,0,15,1,\"CONDITIONING\"],[20,4,0,15,2,\"CONDITIONING\"],[21,14,0,15,3,\"LATENT\"],[22,15,0,7,0,\"LATENT\"],[23,1,0,16,0,\"MODEL\"],[24,2,0,16,1,\"CLIP\"],[25,16,1,3,0,\"CLIP\"],[28,16,1,4,0,\"CLIP\"],[29,16,0,5,0,\"MODEL\"],[30,16,0,15,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"5\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"15\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 30, + "width": 1216, + "height": 1216, + "models": [ + "RatatoskrV3.7.safetensors" + ], + "prompt": "Detailed closeup photo of an anthro white wolf queen wearing gothic clothes, detailed eye's, epic light", + "denoise": 1, + "sampler": "dpmpp_3m_sde_gpu", + "cfgScale": 3, + "modelIds": [], + "scheduler": "exponential", + "upscalers": [ + "4x-UltraSharp.pth" + ], + "versionIds": [], + "controlNets": [], + "negativePrompt": "bad quality, watermark", + "additionalResources": [ + { + "name": "Furry Enhancer V2.84.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5c9fe6e6-cb01-4fe2-8687-1d91bd8b2df2/width=450/7023314.jpeg", + "nsfw": "None", + "width": 2016, + "height": 2592, + "hash": "UNHnpPxu~B-o.TxuI=og%hxaE3kDo~t7t5kC", + "type": "image", + "metadata": { + "hash": "UNHnpPxu~B-o.TxuI=og%hxaE3kDo~t7t5kC", + "size": 4761166, + "width": 2016, + "height": 2592 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d5dfbb4e-f034-4098-83e7-a4a44ba23d07/width=450/7023316.jpeg", + "nsfw": "X", + "width": 2016, + "height": 2592, + "hash": "UKI=S]b_%$?FO]M|R+Rj?vV@R*WC~qkCt7Io", + "type": "image", + "metadata": { + "hash": "UKI=S]b_%$?FO]M|R+Rj?vV@R*WC~qkCt7Io", + "size": 4764321, + "width": 2016, + "height": 2592 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/27631cd8-40e5-4c46-91a9-ae79e0bb03bf/width=450/7023390.jpeg", + "nsfw": "None", + "width": 2432, + "height": 2432, + "hash": "UIF=zE_4_N%M.8t7t7WBWrxajFoKozofs.jZ", + "type": "image", + "metadata": { + "hash": "UIF=zE_4_N%M.8t7t7WBWrxajFoKozofs.jZ", + "size": 8074671, + "width": 2432, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 633806970333448, + "vaes": [ + "FenrisXL.vae.pt" + ], + "Model": "RatatoskrV3.7", + "comfy": "{\"prompt\":{\"1\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"2\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"},\"3\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"4\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"5\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"6\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"7\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"},\"8\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"},\"9\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"},\"11\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"12\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"},\"13\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"14\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"},\"15\":{\"inputs\":{\"seed\":941474384575060,\"steps\":60,\"cfg\":5,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":0.2,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"tile_size\":512,\"pixels\":{\"inputs\":{\"upscale_method\":\"nearest-exact\",\"width\":2432,\"height\":2432,\"crop\":\"disabled\",\"image\":{\"inputs\":{\"upscale_model\":{\"inputs\":{\"model_name\":\"4x-UltraSharp.pth\"},\"class_type\":\"UpscaleModelLoader\"},\"image\":{\"inputs\":{\"tile_size\":512,\"samples\":{\"inputs\":{\"seed\":633806970333448,\"steps\":30,\"cfg\":3,\"sampler_name\":\"dpmpp_3m_sde_gpu\",\"scheduler\":\"exponential\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"bad quality, watermark\",\"clip\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1216,\"height\":1216,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEDecodeTiled\"}},\"class_type\":\"ImageUpscaleWithModel\"}},\"class_type\":\"ImageScale\"},\"vae\":{\"inputs\":{\"vae_name\":\"FenrisXL.vae.pt\"},\"class_type\":\"VAELoader\"}},\"class_type\":\"VAEEncodeTiled\"}},\"class_type\":\"KSampler\"},\"16\":{\"inputs\":{\"lora_name\":\"Furry Enhancer V2.84.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"stop_at_clip_layer\":-2,\"clip\":{\"inputs\":{\"ckpt_name\":\"RatatoskrV3.7.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"CLIPSetLastLayer\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":30,\"nodes\":[{\"id\":6,\"type\":\"EmptyLatentImage\",\"pos\":[100,358],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1216,1216,1]},{\"id\":7,\"type\":\"VAEDecode\",\"pos\":[4261.799987792969,130],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":14,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":22},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":10}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[8],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":10,\"type\":\"VAEDecodeTiled\",\"pos\":[2260,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":11},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":12}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[13],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEDecodeTiled\"},\"widgets_values\":[512]},{\"id\":11,\"type\":\"UpscaleModelLoader\",\"pos\":[100,782],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"UPSCALE_MODEL\",\"type\":\"UPSCALE_MODEL\",\"links\":[14],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"UpscaleModelLoader\"},\"widgets_values\":[\"4x-UltraSharp.pth\"]},{\"id\":12,\"type\":\"ImageUpscaleWithModel\",\"pos\":[2675,130],\"size\":{\"0\":241.79998779296875,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"upscale_model\",\"type\":\"UPSCALE_MODEL\",\"link\":14},{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":13}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[15],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageUpscaleWithModel\"}},{\"id\":13,\"type\":\"ImageScale\",\"pos\":[3016.7999877929688,130],\"size\":{\"0\":315,\"1\":130},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":15}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[16],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"ImageScale\"},\"widgets_values\":[\"nearest-exact\",2432,2432,\"disabled\"]},{\"id\":14,\"type\":\"VAEEncodeTiled\",\"pos\":[3431.7999877929688,130],\"size\":{\"0\":315,\"1\":78},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":16},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":17}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[21],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAEEncodeTiled\"},\"widgets_values\":[512]},{\"id\":8,\"type\":\"VAELoader\",\"pos\":[100,594],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[10,12,17],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"VAELoader\"},\"widgets_values\":[\"FenrisXL.vae.pt\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[4571.799987792969,130],\"size\":{\"0\":315,\"1\":270},\"flags\":{},\"order\":15,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":8}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":15,\"type\":\"KSampler\",\"pos\":[3846.7999877929688,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":13,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":30},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":19},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":20},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":21}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[22],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[941474384575060,\"fixed\",60,5,\"dpmpp_3m_sde_gpu\",\"exponential\",0.2]},{\"id\":5,\"type\":\"KSampler\",\"pos\":[1845,130],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":29},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":5},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":7}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[11],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[633806970333448,\"fixed\",30,3,\"dpmpp_3m_sde_gpu\",\"exponential\",1]},{\"id\":2,\"type\":\"CLIPSetLastLayer\",\"pos\":[515,130],\"size\":{\"0\":315,\"1\":58},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":1}],\"outputs\":[{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[24],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPSetLastLayer\"},\"widgets_values\":[-2]},{\"id\":1,\"type\":\"CheckpointLoaderSimple\",\"pos\":[100,130],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[23],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[1],\"shape\":3},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"RatatoskrV3.7.safetensors\"]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[868,117],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":23},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":24}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[29,30],\"shape\":3},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[25,28],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Furry Enhancer V2.84.safetensors\",0.9,0.9]},{\"id\":4,\"type\":\"CLIPTextEncode\",\"pos\":[1345,460],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":28}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,20],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"bad quality, watermark\"]},{\"id\":3,\"type\":\"CLIPTextEncode\",\"pos\":[1345,130],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":25}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[5,19],\"shape\":3}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes \"]}],\"links\":[[1,1,1,2,0,\"CLIP\"],[5,3,0,5,1,\"CONDITIONING\"],[6,4,0,5,2,\"CONDITIONING\"],[7,6,0,5,3,\"LATENT\"],[8,7,0,9,0,\"IMAGE\"],[10,8,0,7,1,\"VAE\"],[11,5,0,10,0,\"LATENT\"],[12,8,0,10,1,\"VAE\"],[13,10,0,12,1,\"IMAGE\"],[14,11,0,12,0,\"UPSCALE_MODEL\"],[15,12,0,13,0,\"IMAGE\"],[16,13,0,14,0,\"IMAGE\"],[17,8,0,14,1,\"VAE\"],[19,3,0,15,1,\"CONDITIONING\"],[20,4,0,15,2,\"CONDITIONING\"],[21,14,0,15,3,\"LATENT\"],[22,15,0,7,0,\"LATENT\"],[23,1,0,16,0,\"MODEL\"],[24,2,0,16,1,\"CLIP\"],[25,16,1,3,0,\"CLIP\"],[28,16,1,4,0,\"CLIP\"],[29,16,0,5,0,\"MODEL\"],[30,16,0,15,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"5\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"15\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 30, + "width": 1216, + "height": 1216, + "models": [ + "RatatoskrV3.7.safetensors" + ], + "prompt": "closeup, portrait photo of an anthro wolf viking female , Nordic knotwork tattoos, wearing a helmet with Nordic runes ", + "denoise": 1, + "sampler": "dpmpp_3m_sde_gpu", + "cfgScale": 3, + "modelIds": [], + "scheduler": "exponential", + "upscalers": [ + "4x-UltraSharp.pth" + ], + "versionIds": [], + "controlNets": [], + "negativePrompt": "bad quality, watermark", + "additionalResources": [ + { + "name": "Furry Enhancer V2.84.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + } + ] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/357604" +} \ No newline at end of file diff --git a/Furry Enhancer V2.84.preview.png b/Furry Enhancer V2.84.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..2b41999f9e3594217360c0729cb31bf9b7d95c7a Binary files /dev/null and b/Furry Enhancer V2.84.preview.png differ diff --git a/Furry Enhancer V2.84.safetensors b/Furry Enhancer V2.84.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..547672e4e8c611dcae66cf01abdd5136b1fbf58a --- /dev/null +++ b/Furry Enhancer V2.84.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39f4afde3e8fb1c4b4843179ca3056d523eb1cd843b491bbd1178620c016c3aa +size 1018661416 diff --git a/Harrlogos_v2.0.civitai.info b/Harrlogos_v2.0.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..37980ef000c0bac685be096c8f11f6591c54ecdc --- /dev/null +++ b/Harrlogos_v2.0.civitai.info @@ -0,0 +1,440 @@ +{ + "id": 214296, + "modelId": 176555, + "name": "Harrlogos_v2.0", + "createdAt": "2023-11-06T02:29:11.367Z", + "updatedAt": "2023-11-06T02:43:25.187Z", + "status": "Published", + "publishedAt": "2023-11-06T02:43:25.185Z", + "trainedWords": [ + "text logo" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

Harrlogos v2.0 is here!

Activation words added:
3D, Neon, Egyptian, Pastel, Viking

", + "stats": { + "downloadCount": 15657, + "ratingCount": 1039, + "rating": 4.99, + "thumbsUpCount": 1900 + }, + "model": { + "name": "Harrlogos XL - Finally, custom text generation in SD!", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 164886, + "sizeKB": 223110.06640625, + "name": "Harrlogos_v2.0.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-11-06T02:45:49.350Z", + "hashes": { + "AutoV1": "ADBE02BC", + "AutoV2": "69BE44EE26", + "SHA256": "69BE44EE26EC8FB5D7BB47AB890CB54F5F2126951586D66711CD0AB77B1D0C7E", + "CRC32": "93065DC2", + "BLAKE3": "965A115A93818DFF2386ABE0FBC8E9292786D8CC30445314ACFFC7A5B5D7B5BF" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/214296" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ce0be687-910a-4a85-b987-b422d31cb306/width=450/3423003.jpeg", + "nsfw": "None", + "width": 2048, + "height": 2048, + "hash": "UFBWudjbDPoy?1ayD%j[zPayEbjuuIjbMzbY", + "type": "image", + "metadata": { + "hash": "UFBWudjbDPoy?1ayD%j[zPayEbjuuIjbMzbY", + "size": 6274200, + "width": 2048, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 645582117624532, + "vaes": [], + "Model": "dynavisionXLAllInOneStylized_beta0411Bakedvae", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":645582117624532,\"steps\":20,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(HarroweD text logo), neon green, purple, spikey, splattered, dripping, blood, crown, fog, moon, trees\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"(HarroweD text logo), neon green, purple, spikey, splattered, dripping, blood, crown, fog, moon, trees\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":645582117624532,\"steps\":20,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(HarroweD text logo), neon green, purple, spikey, splattered, dripping, blood, crown, fog, moon, trees\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"output/pre\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":645582117624532,\"steps\":20,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(HarroweD text logo), neon green, purple, spikey, splattered, dripping, blood, crown, fog, moon, trees\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":10,\"last_link_id\":14,\"nodes\":[{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"double letters, repeating letters, more than 8 letters\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":13}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"(HarroweD text logo), neon green, purple, spikey, splattered, dripping, blood, crown, fog, moon, trees\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,1]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-461,288],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,186],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":12},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[645582117624532,\"randomize\",20,4.5,\"euler\",\"normal\",1]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-24,144],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[12],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[13,14],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1451,187],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"output/pre\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[12,10,0,3,0,\"MODEL\"],[13,10,1,6,0,\"CLIP\"],[14,10,1,7,0,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 20, + "width": 1024, + "height": 1024, + "models": [ + "dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors" + ], + "prompt": "(HarroweD text logo), neon green, purple, spikey, splattered, dripping, blood, crown, fog, moon, trees", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "double letters, repeating letters, more than 8 letters", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c1307f40-7674-41e5-9883-e68ab963177c/width=450/3423021.jpeg", + "nsfw": "None", + "width": 2048, + "height": 2048, + "hash": "UBGk%y,S4TvMDgM{E;z;E0KQFNaR0moH~LTI", + "type": "image", + "metadata": { + "hash": "UBGk%y,S4TvMDgM{E;z;E0KQFNaR0moH~LTI", + "size": 5338788, + "width": 2048, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 960224180049194, + "vaes": [], + "Model": "dynavisionXLAllInOneStylized_beta0411Bakedvae", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":960224180049194,\"steps\":20,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(HarroweD text logo), rainbow, pixel art, space, clouds, stars, horns\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"(HarroweD text logo), rainbow, pixel art, space, clouds, stars, horns\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":960224180049194,\"steps\":20,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(HarroweD text logo), rainbow, pixel art, space, clouds, stars, horns\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"output/pre\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":960224180049194,\"steps\":20,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(HarroweD text logo), rainbow, pixel art, space, clouds, stars, horns\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"double letters, repeating letters, more than 8 letters\",\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":10,\"last_link_id\":14,\"nodes\":[{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":14}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"double letters, repeating letters, more than 8 letters\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":13}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"(HarroweD text logo), rainbow, pixel art, space, clouds, stars, horns\"]},{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,1]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-461,288],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,186],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":12},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[960224180049194,\"randomize\",20,4.5,\"euler\",\"normal\",1]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-24,144],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[12],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[13,14],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1451,187],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"output/pre\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[12,10,0,3,0,\"MODEL\"],[13,10,1,6,0,\"CLIP\"],[14,10,1,7,0,\"CLIP\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 20, + "width": 1024, + "height": 1024, + "models": [ + "dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors" + ], + "prompt": "(HarroweD text logo), rainbow, pixel art, space, clouds, stars, horns", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 4.5, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "double letters, repeating letters, more than 8 letters", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b5a56827-def8-4716-9dd1-8023453d709d/width=450/3681923.jpeg", + "nsfw": "None", + "width": 2048, + "height": 2048, + "hash": "U9Byt6?b00Dh^-tmIoIo00no-gScEOxIxbRN", + "type": "image", + "metadata": { + "hash": "U9Byt6?b00Dh^-tmIoIo00no-gScEOxIxbRN", + "size": 5845602, + "width": 2048, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 548436435713506, + "Model": "starlightXLAnimated_v3", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":548436435713506,\"steps\":22,\"cfg\":5.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"LL\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"LL\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":548436435713506,\"steps\":22,\"cfg\":5.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"LL\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"chklogo/logo\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":548436435713506,\"steps\":22,\"cfg\":5.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"LL\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"16\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":20,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":18}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"LL\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[411,180],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":19}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"(StarlightXL) text logo, colorful, anime, space, star, stars\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-840,314],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"starlightXLAnimated_v3.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1453,185],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"chklogo/logo\"]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-353,173],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[16],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[17],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",0.9,0.9]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[24,46],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":16},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":17}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[20],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[18,19],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"xl_more_art-full_v1.safetensors\",0.5,0.5]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,184],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":20},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[548436435713506,\"randomize\",22,5.5,\"euler\",\"normal\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[16,10,0,16,0,\"MODEL\"],[17,10,1,16,1,\"CLIP\"],[18,16,1,7,0,\"CLIP\"],[19,16,1,6,0,\"CLIP\"],[20,16,0,3,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 22, + "width": 1024, + "height": 1024, + "models": [ + "starlightXLAnimated_v3.safetensors" + ], + "prompt": "(StarlightXL) text logo, colorful, anime, space, star, stars", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 5.5, + "scheduler": "normal", + "negativePrompt": "LL", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + }, + { + "name": "xl_more_art-full_v1.safetensors", + "type": "lora", + "strength": 0.5, + "strengthClip": 0.5 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/60bb582b-d195-408e-80af-d1034559068a/width=450/3681922.jpeg", + "nsfw": "None", + "width": 2048, + "height": 2048, + "hash": "U6B{l-];00E1?bEeVr$M001J~T=aDlSj%zF1", + "type": "image", + "metadata": { + "hash": "U6B{l-];00E1?bEeVr$M001J~T=aDlSj%zF1", + "size": 5507338, + "width": 2048, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 44047120043306, + "vaes": [], + "Model": "starlightXLAnimated_v3", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":44047120043306,\"steps\":32,\"cfg\":4,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"LL, TT\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"LL, TT\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":44047120043306,\"steps\":32,\"cfg\":4,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"LL, TT\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"chklogo/logo\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":44047120043306,\"steps\":32,\"cfg\":4,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"LL, TT\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"16\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.5,\"strength_clip\":0.5,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":20,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":18}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"LL, TT\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[411,180],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":19}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"(StarlightXL) text logo, colorful, anime, space, star, stars\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-840,314],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"starlightXLAnimated_v3.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1453,185],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"chklogo/logo\"]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-353,173],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[16],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[17],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",0.9,0.9]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[24,46],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":16},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":17}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[20],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[18,19],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"xl_more_art-full_v1.safetensors\",0.5,0.5]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,184],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":20},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[44047120043306,\"fixed\",32,4,\"euler\",\"normal\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[16,10,0,16,0,\"MODEL\"],[17,10,1,16,1,\"CLIP\"],[18,16,1,7,0,\"CLIP\"],[19,16,1,6,0,\"CLIP\"],[20,16,0,3,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 32, + "width": 1024, + "height": 1024, + "models": [ + "starlightXLAnimated_v3.safetensors" + ], + "prompt": "(StarlightXL) text logo, colorful, anime, space, star, stars", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 4, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "LL, TT", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + }, + { + "name": "xl_more_art-full_v1.safetensors", + "type": "lora", + "strength": 0.5, + "strengthClip": 0.5 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/44f2ea7c-1c38-4aaa-90d1-1dd4bc0b2546/width=450/3681921.jpeg", + "nsfw": "None", + "width": 2048, + "height": 2048, + "hash": "U8AciUxZ00JTK7WBRMs*00S7~VwG=zxbb_Io", + "type": "image", + "metadata": { + "hash": "U8AciUxZ00JTK7WBRMs*00S7~VwG=zxbb_Io", + "size": 5088991, + "width": 2048, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 669046964540801, + "vaes": [], + "Model": "starlightXLAnimated_v3", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":669046964540801,\"steps\":22,\"cfg\":4,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL:1.5) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"(StarlightXL:1.5) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":669046964540801,\"steps\":22,\"cfg\":4,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL:1.5) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"chklogo/logo\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":669046964540801,\"steps\":22,\"cfg\":4,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(StarlightXL:1.5) text logo, colorful, anime, space, star, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"16\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.9,\"strength_clip\":0.9,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":20,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":18}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"repeating letters, malformed letters, doubles, inaccuracy\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,184],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":20},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[669046964540801,\"randomize\",22,4,\"euler\",\"normal\",1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[411,180],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":19}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"(StarlightXL:1.5) text logo, colorful, anime, space, star, stars\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-840,314],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"starlightXLAnimated_v3.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1453,185],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"chklogo/logo\"]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-353,173],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[16],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[17],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",0.9,0.9]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[24,46],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":16},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":17}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[20],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[18,19],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"xl_more_art-full_v1.safetensors\",0.6,0.6]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[16,10,0,16,0,\"MODEL\"],[17,10,1,16,1,\"CLIP\"],[18,16,1,7,0,\"CLIP\"],[19,16,1,6,0,\"CLIP\"],[20,16,0,3,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 22, + "width": 1024, + "height": 1024, + "models": [ + "starlightXLAnimated_v3.safetensors" + ], + "prompt": "(StarlightXL:1.5) text logo, colorful, anime, space, star, stars", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 4, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "repeating letters, malformed letters, doubles, inaccuracy", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 0.9, + "strengthClip": 0.9 + }, + { + "name": "xl_more_art-full_v1.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bd317906-37b7-42f2-89d4-507e462899ab/width=450/3847819.jpeg", + "nsfw": "Mature", + "width": 2048, + "height": 2048, + "hash": "UHH1;}s:.lozS2ofoLae?uWBVsoexaWBxuof", + "type": "image", + "metadata": { + "hash": "UHH1;}s:.lozS2ofoLae?uWBVsoexaWBxuof", + "size": 4963345, + "width": 2048, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 299466818225031, + "Model": "dynavisionXLAllInOneStylized_beta0411Bakedvae", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":299466818225031,\"steps\":22,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"split head, with (Zombie text logo:1.5) inside, blood, dripping\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"Double letters, repeating letters, inaccuracy, malformed letters\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"split head, with (Zombie text logo:1.5) inside, blood, dripping\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"Double letters, repeating letters, inaccuracy, malformed letters\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":299466818225031,\"steps\":22,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"split head, with (Zombie text logo:1.5) inside, blood, dripping\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"Double letters, repeating letters, inaccuracy, malformed letters\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":299466818225031,\"steps\":22,\"cfg\":4.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"split head, with (Zombie text logo:1.5) inside, blood, dripping\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"Double letters, repeating letters, inaccuracy, malformed letters\",\"clip\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"16\":{\"inputs\":{\"lora_name\":\"split2-000015.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":0.8,\"strength_clip\":0.8,\"model\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":27,\"last_link_id\":20,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1453,185],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,1024,1]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":18}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"Double letters, repeating letters, inaccuracy, malformed letters\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,184],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":20},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[299466818225031,\"randomize\",22,4.5,\"euler\",\"normal\",1]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-840,314],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[404,165],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":19}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"split head, with (Zombie text logo:1.5) inside, blood, dripping\"]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[24,46],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":16},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":17}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[20],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[18,19],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"split2-000015.safetensors\",0.8,0.8]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-392,164],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[16],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[17],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",0.8,0.8]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[16,10,0,16,0,\"MODEL\"],[17,10,1,16,1,\"CLIP\"],[18,16,1,7,0,\"CLIP\"],[19,16,1,6,0,\"CLIP\"],[20,16,0,3,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 22, + "width": 1024, + "height": 1024, + "models": [ + "dynavisionXLAllInOneStylized_beta0411Bakedvae.safetensors" + ], + "prompt": "split head, with (Zombie text logo:1.5) inside, blood, dripping", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 4.5, + "scheduler": "normal", + "negativePrompt": "Double letters, repeating letters, inaccuracy, malformed letters", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 0.8, + "strengthClip": 0.8 + }, + { + "name": "split2-000015.safetensors", + "type": "lora", + "strength": 0.8, + "strengthClip": 0.8 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/371f8bf8-0f62-48e9-b50a-cb6ba38cdf45/width=450/3983272.jpeg", + "nsfw": "None", + "width": 1920, + "height": 1080, + "hash": "U7CF@}}O4T4}lo$wMHM+00F#54v^MHFh-swW", + "type": "video", + "metadata": { + "hash": "U7CF@}}O4T4}lo$wMHM+00F#54v^MHFh-swW", + "size": 5973931, + "audio": false, + "width": 1920, + "height": 1080, + "duration": 4.8 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/99ff12ec-e828-4e0b-84f9-236bd462eb42/width=450/4093627.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "UFIqDi~p0fxG]jb]9utQMeM{x]NGxt-on4-:", + "type": "image", + "metadata": { + "hash": "UFIqDi~p0fxG]jb]9utQMeM{x]NGxt-on4-:", + "size": 4446895, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 164447758674123, + "vaes": [], + "Model": "starlightXLAnimated_v3", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":164447758674123,\"steps\":22,\"cfg\":6.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(Sushi:1.3) text logo, trifle, japanese, fresh fish, dragon rolls, \",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"(Sushi:1.3) text logo, trifle, japanese, fresh fish, dragon rolls, \",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":164447758674123,\"steps\":22,\"cfg\":6.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(Sushi:1.3) text logo, trifle, japanese, fresh fish, dragon rolls, \",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"ComfyUI\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":164447758674123,\"steps\":22,\"cfg\":6.5,\"sampler_name\":\"euler\",\"scheduler\":\"normal\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"},\"positive\":{\"inputs\":{\"text\":\"(Sushi:1.3) text logo, trifle, japanese, fresh fish, dragon rolls, \",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"repeating letters, malformed letters, doubles, inaccuracy\",\"clip\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"10\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"16\":{\"inputs\":{\"lora_name\":\"xl_more_art-full_v1.safetensors\",\"strength_model\":0.6,\"strength_clip\":0.6,\"model\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"},\"clip\":{\"inputs\":{\"lora_name\":\"Harrlogos_v2.0.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"starlightXLAnimated_v3.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"LoraLoader\"}},\"class_type\":\"LoraLoader\"}},\"workflow\":{\"last_node_id\":16,\"last_link_id\":20,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[1209,188],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1453,185],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"ComfyUI\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":18}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"repeating letters, malformed letters, doubles, inaccuracy\"]},{\"id\":10,\"type\":\"LoraLoader\",\"pos\":[-353,173],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":10},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":11}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[16],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[17],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"Harrlogos_v2.0.safetensors\",1,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[411,180],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":19}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[4],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"(Sushi:1.3) text logo, trifle, japanese, fresh fish, dragon rolls, \"]},{\"id\":16,\"type\":\"LoraLoader\",\"pos\":[24,46],\"size\":{\"0\":315,\"1\":126},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":16},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":17}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[20],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[18,19],\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"LoraLoader\"},\"widgets_values\":[\"xl_more_art-full_v1.safetensors\",0.6,0.6]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[-840,314],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[10],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[11],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"starlightXLAnimated_v3.safetensors\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[863,184],\"size\":{\"0\":315,\"1\":262},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":20},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":4},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[164447758674123,\"randomize\",22,6.5,\"euler\",\"normal\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[4,6,0,3,1,\"CONDITIONING\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[10,4,0,10,0,\"MODEL\"],[11,4,1,10,1,\"CLIP\"],[16,10,0,16,0,\"MODEL\"],[17,10,1,16,1,\"CLIP\"],[18,16,1,7,0,\"CLIP\"],[19,16,1,6,0,\"CLIP\"],[20,16,0,3,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"seed_widgets\":{\"3\":0}}}", + "steps": 22, + "width": 1024, + "height": 768, + "models": [ + "starlightXLAnimated_v3.safetensors" + ], + "prompt": "(Sushi:1.3) text logo, trifle, japanese, fresh fish, dragon rolls, ", + "denoise": 1, + "sampler": "Euler", + "cfgScale": 6.5, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "repeating letters, malformed letters, doubles, inaccuracy", + "additionalResources": [ + { + "name": "Harrlogos_v2.0.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "xl_more_art-full_v1.safetensors", + "type": "lora", + "strength": 0.6, + "strengthClip": 0.6 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cdb9ac02-193f-4274-bb8a-d96a4346173c/width=450/5185020.jpeg", + "nsfw": "None", + "width": 1024, + "height": 768, + "hash": "U6EeZCrz009a0#Or0KKl9H-d9sMpq]#9~WtL", + "type": "video", + "metadata": { + "hash": "U6EeZCrz009a0#Or0KKl9H-d9sMpq]#9~WtL", + "size": 1088740, + "audio": false, + "width": 1024, + "height": 768, + "duration": 4 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + } + ], + "downloadUrl": "https://civitai.com/api/download/models/214296" +} \ No newline at end of file diff --git a/Harrlogos_v2.0.preview.png b/Harrlogos_v2.0.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..802281e1e45905e01131ed114a9a8cc6eecda8bd Binary files /dev/null and b/Harrlogos_v2.0.preview.png differ diff --git a/Harrlogos_v2.0.safetensors b/Harrlogos_v2.0.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..78805da34da4656d5225174ed2021faea31fea88 --- /dev/null +++ b/Harrlogos_v2.0.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69be44ee26ec8fb5d7bb47ab890cb54f5f2126951586d66711cd0ab77b1d0c7e +size 228464708 diff --git a/IPXL_v2.civitai.info b/IPXL_v2.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..62719924d4d7efed0855dbbd4d0792259704caa4 --- /dev/null +++ b/IPXL_v2.civitai.info @@ -0,0 +1,506 @@ +{ + "id": 153253, + "modelId": 135366, + "name": "v2.0", + "createdAt": "2023-09-01T18:17:04.973Z", + "updatedAt": "2023-10-27T21:38:36.883Z", + "status": "Published", + "publishedAt": "2023-09-01T18:55:10.239Z", + "trainedWords": [ + "inkpunk" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

Very minor update, but labeling as 2.0 because the primary change was that v1 was trained at 512x512. This was trained at 1024x1024.

", + "stats": { + "downloadCount": 6468, + "ratingCount": 452, + "rating": 5, + "thumbsUpCount": 774 + }, + "model": { + "name": "InkPunk XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 114311, + "sizeKB": 445788.85546875, + "name": "IPXL_v2.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-09-01T18:20:52.577Z", + "hashes": { + "AutoV1": "A0F63149", + "AutoV2": "464DEFEC14", + "SHA256": "464DEFEC1434AD1902C364747B4E82CDCF0E9ED3A14E5DAB04A774DAE377DE4A", + "CRC32": "7F359434", + "BLAKE3": "6ECD8AFB25B97EBFF36BEE0186826FCE18FD7C0CD4E7237D88B482FDC3AA4F8C" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/153253" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3a8b0e3d-9855-43ee-a357-736c692c6921/width=450/2310691.jpeg", + "nsfw": "Soft", + "width": 768, + "height": 1024, + "hash": "UHHn=]WG0K%|uP-oD*Ny}[tQE2RQ~UfnR$NF", + "type": "image", + "metadata": { + "hash": "UHHn=]WG0K%|uP-oD*Ny}[tQE2RQ~UfnR$NF", + "size": 582146, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 1974669477, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration of a beautiful woman with (blond:red:0.8) short hair wearing club gear Walking on a forest path ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.5 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "negativePrompt": "tiling poorly drawn out of frame stubby mutation mutated extra limbs extra legs extra arms disfigured deformed odd weird off putting out of frame bad anatomy double clones twins brothers same face repeated person long neck hat poorly drawn cropped text watermark signature logo split image copyright desaturated artifacts noise" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/63d6707d-9e27-47da-a28c-da81c432e767/width=450/2310692.jpeg", + "nsfw": "None", + "width": 1024, + "height": 768, + "hash": "UQH1Y_9^0y%L}[JAxuW;56R*={NaE2s.$gIo", + "type": "image", + "metadata": { + "hash": "UQH1Y_9^0y%L}[JAxuW;56R*={NaE2s.$gIo", + "size": 524733, + "width": 1024, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 3693283304, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration concept art of a landscape, late evening, cloudy skies ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.4 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "Wildcard prompt": "\"inkpunk style illustration concept art of a landscape, __time__" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1967e50f-5a26-4a4b-a906-03b120553ff8/width=450/2310697.jpeg", + "nsfw": "None", + "width": 1024, + "height": 768, + "hash": "U7ABIi?b0000_N.8M{4n8wMw%~%#00Di_N^+", + "type": "image", + "metadata": { + "hash": "U7ABIi?b0000_N.8M{4n8wMw%~%#00Di_N^+", + "size": 571628, + "width": 1024, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 3693283301, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration concept art of a landscape, midnight, clear skies ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "Wildcard prompt": "\"inkpunk style illustration concept art of a landscape, __time__" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/66025630-594b-4953-82e2-0d1cf7a761db/width=450/2310694.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UNB;F3H?Di.mpINaa{t6M{aeaKae$kr?M{WC", + "type": "image", + "metadata": { + "hash": "UNB;F3H?Di.mpINaa{t6M{aeaKae$kr?M{WC", + "size": 520222, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 2050114263, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration concept art of a busy cyberpunk street, dark alley,rain, reflections, city, mist, fog ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.45 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "Wildcard prompt": "\"inkpunk style illustration concept art of a busy cyberpunk street, dark alley,rain, reflections, city, mist" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f251368f-c02b-4873-9afd-d2d2db86bccb/width=450/2310693.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UFEC,@*08whgM,s*kqx^D$InJPTHXfNaWANF", + "type": "image", + "metadata": { + "hash": "UFEC,@*08whgM,s*kqx^D$InJPTHXfNaWANF", + "size": 792605, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 779764899, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration concept art of a busy cyberpunk street, dark alley,rain, reflections, city, mist, fog ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.75 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "Wildcard prompt": "\"inkpunk style illustration concept art of a busy cyberpunk street, dark alley,rain, reflections, city, mist" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e7fadd16-12dc-4f17-91d0-d83ba9c3a0cc/width=450/2310695.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UBF~gd_3tR_3~pWB00IU%MRj9Ft7-;t7WCWB", + "type": "image", + "metadata": { + "hash": "UBF~gd_3tR_3~pWB00IU%MRj9Ft7-;t7WCWB", + "size": 828092, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3893866607, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration of a beautiful woman wearing black leather and chains, black long hair, gothic, alluring, standing in a cathedral, ", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.5 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f720ec00-aec4-4b12-a19c-7a33b295f1e2/width=450/2312345.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "UMIX{mt7.8of~qxut7ofx]M{oeWB?bj@IUay", + "type": "image", + "metadata": { + "hash": "UMIX{mt7.8of~qxut7ofx]M{oeWB?bj@IUay", + "size": 704309, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 541811232, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration of a beautiful gothgirl wearing black leather and silver chain clothing, black long curled hair, ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.7 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "Wildcard prompt": "\"inkpunk style illustration of a beautiful gothgirl wearing black leather and silver chain clothing, black long curled hair" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2a02b22c-bd4d-40c0-b6ec-80e192e548d0/width=450/2312346.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "U5DJFv~q0000?b%M00Rj01of-pIU019F_3_2", + "type": "image", + "metadata": { + "hash": "U5DJFv~q0000?b%M00Rj01of-pIU019F_3_2", + "size": 708918, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 541811229, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style illustration of a beautiful gothgirl wearing black leather and silver chain clothing, black long curled hair, ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.45 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904", + "Wildcard prompt": "\"inkpunk style illustration of a beautiful gothgirl wearing black leather and silver chain clothing, black long curled hair" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f89cd60f-413e-43cc-9e61-bfbe6b863677/width=450/2312343.jpeg", + "nsfw": "None", + "width": 1024, + "height": 768, + "hash": "U8E2:;Ot068wr.W90oxa_Os;0h.QcZ%f#RM_", + "type": "image", + "metadata": { + "hash": "U8E2:;Ot068wr.W90oxa_Os;0h.QcZ%f#RM_", + "size": 751851, + "width": 1024, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 1630814359, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style IPXColor illustration of a large interstellar spaceport ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.55 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c82111a1-53db-4071-bace-c32582ce7c63/width=450/2312344.jpeg", + "nsfw": "None", + "width": 1024, + "height": 768, + "hash": "UNHfI-?vK%ic?at6wcpI0}Nav}Rj~WbvKOR5", + "type": "image", + "metadata": { + "hash": "UNHfI-?vK%ic?at6wcpI0}Nav}Rj~WbvKOR5", + "size": 673017, + "width": 1024, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 3229481856, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "8ccb65e904" + }, + "prompt": "inkpunk style IPXColor illustration of a speed boat race ", + "IPXL_v2": "__weightMid__>\"", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "\"IPXL_v2": "3b5fa4938f68\"", + "cfgScale": 7, + "resources": [ + { + "name": "IPXL_v2", + "type": "lora", + "weight": 0.45 + }, + { + "hash": "8ccb65e904", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "8ccb65e904" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/153253" +} \ No newline at end of file diff --git a/IPXL_v2.preview.png b/IPXL_v2.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..282d682658ee02a4f54e10c356d7158b2559fd23 Binary files /dev/null and b/IPXL_v2.preview.png differ diff --git a/IPXL_v2.safetensors b/IPXL_v2.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..90a2b65dcaf362565e04769de3409b6a5d21e4d3 --- /dev/null +++ b/IPXL_v2.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:464defec1434ad1902c364747b4e82cdcf0e9ed3a14e5dab04a774dae377de4a +size 456487788 diff --git a/LineAniRedmondV2-Lineart-LineAniAF.civitai.info b/LineAniRedmondV2-Lineart-LineAniAF.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..f2da4495be1c41416edba9239925b25562e5facd --- /dev/null +++ b/LineAniRedmondV2-Lineart-LineAniAF.civitai.info @@ -0,0 +1,412 @@ +{ + "id": 177544, + "modelId": 127018, + "name": "v2.0", + "createdAt": "2023-10-07T04:10:06.583Z", + "updatedAt": "2023-10-07T04:11:26.342Z", + "status": "Published", + "publishedAt": "2023-10-07T04:11:26.340Z", + "trainedWords": [ + "lineart", + "LineAniAF" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 5620, + "ratingCount": 399, + "rating": 4.99, + "thumbsUpCount": 709 + }, + "model": { + "name": "LineAniRedmond- Linear Manga Style for SD XL - Anime Style.", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 135727, + "sizeKB": 166542.99609375, + "name": "LineAniRedmondV2-Lineart-LineAniAF.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-07T04:15:39.011Z", + "hashes": { + "AutoV1": "14552A05", + "AutoV2": "1692A2C7D2", + "SHA256": "1692A2C7D285F6C94B7910BC8F8EC2758163BA2710214BD35BB20CC94D2488C8", + "CRC32": "53C401D2", + "BLAKE3": "5F9DD2932895C9F91B24F61097A10204C5180581AA3B5F72DF40E658217B1000" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/177544" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a831adfc-ca61-4004-83e5-ae607f791412/width=450/2829305.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "U8MtaN00009F?b%M%M9F4n~q00-;4nD%RjE1", + "type": "image", + "metadata": { + "hash": "U8MtaN00009F?b%M%M9F4n~q00-;4nD%RjE1", + "size": 1472466, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2331126365, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "best quality, anime, wearing a jacket, big breast, glasses, focus on face, room, manga, (lineart), (monochrome), black and white, (colorless)\n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6cec0f2d-4861-4ae3-bb02-0e3a68491524/width=450/2829304.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UOK_B@Rjt6%M~qxuM{Rj9FD%%MRj9Ft7D%ay", + "type": "image", + "metadata": { + "hash": "UOK_B@Rjt6%M~qxuM{Rj9FD%%MRj9Ft7D%ay", + "size": 1312301, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 4168700321, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "best quality, anime, wearing a jacket, big breast, glasses, focus on face, room, manga, (lineart), (monochrome), black and white, (colorless)\n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3088b2f5-4bd8-4952-bbd6-403a824fdc14/width=450/2829311.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCN,[5t8j@9F-;_3~q%M-;IU?b-;-;%MWBD%", + "type": "image", + "metadata": { + "hash": "UCN,[5t8j@9F-;_3~q%M-;IU?b-;-;%MWBD%", + "size": 1221474, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2882483537, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "1girl, solo, teen (white sailor-fuku, short sleeve, black collar, deep gray skirt, dark gray neckerchief:1.5) (cute,sweet,bare face,big eyes,black hair:1.4) (smile:1.2) straight hear, medium hair, short tall, cowboy shot (in classroom, no other students, hands behind:1.5) (dynamic pose,dynamic angle:1.2) BREAK japanese, japanese idol, black eyes (manga, lineart, monochrome, black and white, colorless, white skin:1.4) \n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/53aca9ab-f18c-4cfb-9290-d66b46702727/width=450/2829303.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UFJb25D%00~qM{D%00%M%MM{M{?bj[M{WBay", + "type": "image", + "metadata": { + "hash": "UFJb25D%00~qM{D%00%M%MM{M{?bj[M{WBay", + "size": 1616043, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1537594353, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "best quality, anime, lady, backpack, outdoors, (lineart), manga, (monochrome), (colorless) \n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dcf4a334-022d-468f-bc3b-692749e7b3b8/width=450/2829306.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UKJkl#Rj00_3~qRjM{%MxuofM{xuM{RjM{Rj", + "type": "image", + "metadata": { + "hash": "UKJkl#Rj00_3~qRjM{%MxuofM{xuM{RjM{Rj", + "size": 1541685, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 492839943, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "(masterpiece:1.3), (intricate:1), best quality, official art, 8k wallpaper, highly detailed, illustration, lineart, monochrome, cinematic light, monastarpact, 1 girl, (full body view:1.2), sitting down,\n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d40a1950-2492-4ff4-b308-97685e503dd4/width=450/2829307.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U4NdO8D%00~q~q~q0000xu9F_3%MRjt79Fxu", + "type": "image", + "metadata": { + "hash": "U4NdO8D%00~q~q~q0000xu9F_3%MRjt79Fxu", + "size": 1036990, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1631279845, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "masterpiece, best quality,1girl, bangs, two hair bun, messy hair, school, uniform,\n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5b9a571e-792c-4a98-885b-32ccf6b8e6ac/width=450/2829308.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UKMtaN~q00-;?b?bof?bxut7%MM{9Ft7RjfP", + "type": "image", + "metadata": { + "hash": "UKMtaN~q00-;?b?bof?bxut7%MM{9Ft7RjfP", + "size": 1279432, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 493801159, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "best quality, anime girl, bob cut hair, sleeveless solid T-shirt, plaster patch with print on a nose, dark lips, closed mouth, manga, (lineart), (monochrome), (colorless)\n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c752e2ce-9b29-409b-b701-0745166e3954/width=450/2829309.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UANdL00000_3~q9FIUayWB009FM{004nt7_3", + "type": "image", + "metadata": { + "hash": "UANdL00000_3~q9FIUayWB009FM{004nt7_3", + "size": 1582080, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2595752463, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "best quality, anime, lady, backpack, outdoors, (lineart), manga, (monochrome), (colorless)\n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c5e5742d-fe4b-4292-b855-ed7355e26a12/width=450/2829310.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UTN,_Dt700M{Rjxu%Mof-;Rjt7ay~qof%Mt7", + "type": "image", + "metadata": { + "hash": "UTN,_Dt700M{Rjxu%Mof-;Rjt7ay~qof%Mt7", + "size": 1169788, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1504032563, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "best quality, anime, lady in unzipped Adidas tracksuit, polo shirt, outdoors, touching head, (lineart), manga, (monochrome), black and white, (colorless) \n,Lineart, LineAniAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated, colors, colorful, blurry, bokeh , full body", + "\"LineAniRedmondV2-Lineart-LineAniAF": "40b6d27be3a3\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/177544" +} \ No newline at end of file diff --git a/LineAniRedmondV2-Lineart-LineAniAF.preview.png b/LineAniRedmondV2-Lineart-LineAniAF.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..2fd437ee2a61bbe7bb9c008fb46f8618f7523c2b Binary files /dev/null and b/LineAniRedmondV2-Lineart-LineAniAF.preview.png differ diff --git a/LineAniRedmondV2-Lineart-LineAniAF.safetensors b/LineAniRedmondV2-Lineart-LineAniAF.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2f4764d6d2c1d1091f8435766c3c0eec9ef025ba --- /dev/null +++ b/LineAniRedmondV2-Lineart-LineAniAF.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1692a2c7d285f6c94b7910bc8f8ec2758163ba2710214bd35bb20cc94d2488c8 +size 170540028 diff --git a/LogoRedmondV2-Logo-LogoRedmAF.civitai.info b/LogoRedmondV2-Logo-LogoRedmAF.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..c6d57ad3f83fb97a8402cf5b715024b630f893cf --- /dev/null +++ b/LogoRedmondV2-Logo-LogoRedmAF.civitai.info @@ -0,0 +1,412 @@ +{ + "id": 177492, + "modelId": 124609, + "name": "v2.0", + "createdAt": "2023-10-07T02:34:13.425Z", + "updatedAt": "2023-10-07T02:35:59.369Z", + "status": "Published", + "publishedAt": "2023-10-07T02:35:59.367Z", + "trainedWords": [ + "logo", + "logoredmaf" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 20086, + "ratingCount": 660, + "rating": 5, + "thumbsUpCount": 1370 + }, + "model": { + "name": "Logo.Redmond - Logo Lora for SD XL 1.0", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 135681, + "sizeKB": 166542.99609375, + "name": "LogoRedmondV2-Logo-LogoRedmAF.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-07T02:35:53.536Z", + "hashes": { + "AutoV1": "07D8DCA8", + "AutoV2": "55F7F40883", + "SHA256": "55F7F408831C34F84857B1A728D1DD70B76083D80E7279E22097CA5AC4430E13", + "CRC32": "D265A6E4", + "BLAKE3": "EEB64B0FA265FC7FB8F4868D52281BDB3A493C268A39979A091E34C7D5D69ED6" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/177492" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5b4fa0fe-93b9-41b7-9eca-311134aaaf62/width=450/2828404.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UYPj7Dt7~pt7WBj[t7ay_2a{9Gj[-:ayM{kC", + "type": "image", + "metadata": { + "hash": "UYPj7Dt7~pt7WBj[t7ay_2a{9Gj[-:ayM{kC", + "size": 478461, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 237225586, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nA logo for a space travel company, launching rocket, space-themed colors (black, purple, dark blue)\n),LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9194a8cf-6a4b-4fec-89a2-ae918085c7a6/width=450/2828405.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U5P#Q3=_0v=_]hj@Xmj[0cbH^SjZ?]j[iIjZ", + "type": "image", + "metadata": { + "hash": "U5P#Q3=_0v=_]hj@Xmj[0cbH^SjZ?]j[iIjZ", + "size": 338195, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 739507036, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nA logo for a bakery, freshly baked bread, warm colors (orange, yellow), no text, minimalist,\n,LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/019ed587-6f8f-4a9f-a3ed-4cdab7e41095/width=450/2828406.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U8D.E@o}8ux]tRofX8Rj8uj[@uWVpcRjr?x[", + "type": "image", + "metadata": { + "hash": "U8D.E@o}8ux]tRofX8Rj8uj[@uWVpcRjr?x[", + "size": 394036, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2110787180, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nA colorful logo, of a unicorn, minimalist color,\n,LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bf8c1303-eebd-4157-a04d-f93a91af6440/width=450/2828407.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U88g?Oxa00IqX9ayn$of00NH~W$%rrofS$Rk", + "type": "image", + "metadata": { + "hash": "U88g?Oxa00IqX9ayn$of00NH~W$%rrofS$Rk", + "size": 438434, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 366868260, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nA logo for a surf school, surfer riding a wave, beachy and ocean-inspired colors,\n,LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/be13be44-b666-4ab5-a17f-b061f1b61dfe/width=450/2828408.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAR2}Q%gqF-V-:kCbbe.p_jZUwX8%gaenOoz", + "type": "image", + "metadata": { + "hash": "UAR2}Q%gqF-V-:kCbbe.p_jZUwX8%gaenOoz", + "size": 312060, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 207260981, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nA logo for a fitness app, dynamic running figure, energetic colors (red, orange)\n),LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ac86ca03-2825-4f10-82e3-bb7a51b9090b/width=450/2828409.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UEOWZ$xu~V%M-pj[NGa{~BkC4:V@Ntay%2oL", + "type": "image", + "metadata": { + "hash": "UEOWZ$xu~V%M-pj[NGa{~BkC4:V@Ntay%2oL", + "size": 385433, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 603851934, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nLogo of mountain, hike, modern, colorful,\n),LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/39f8aac0-2774-4cab-8d10-8321e8d89ab4/width=450/2828410.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U3Oy|a_29r~V~Vj[WUj[9rj[=#a{_2j[s:fj", + "type": "image", + "metadata": { + "hash": "U3Oy|a_29r~V~Vj[WUj[9rj[=#a{_2j[s:fj", + "size": 148111, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2632847862, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\na logo for a burguer shop, burguer, food,\n),LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1aef035d-7213-4e47-9bfb-c228af40644b/width=450/2828412.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U5C*astk2[tkK5a|%1j[7dbH:+jZ~Vj[57WB", + "type": "image", + "metadata": { + "hash": "U5C*astk2[tkK5a|%1j[7dbH:+jZ~Vj[57WB", + "size": 328356, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2843774373, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\nA logo for a yoga studio, lotus position, soft and tranquil colors\n),LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/570f0408-1390-4f4e-a8f3-0f145715a037/width=450/2828413.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U4Nv[0?a+:?G~pj@ITj[vcWBKnofMcj[_4ay", + "type": "image", + "metadata": { + "hash": "U4Nv[0?a+:?G~pj@ITj[vcWBKnofMcj[_4ay", + "size": 374874, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 4013720250, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "logo,\na logo for a coffe shop, coffe\n,LogoRedAF\n,", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated", + "\"LogoRedmondV2-Logo-LogoRedmAF": "73ea578ffb4f\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/177492" +} \ No newline at end of file diff --git a/LogoRedmondV2-Logo-LogoRedmAF.preview.png b/LogoRedmondV2-Logo-LogoRedmAF.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..79be96098cf39f5d6abbaba5fc0783830c78b727 Binary files /dev/null and b/LogoRedmondV2-Logo-LogoRedmAF.preview.png differ diff --git a/LogoRedmondV2-Logo-LogoRedmAF.safetensors b/LogoRedmondV2-Logo-LogoRedmAF.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a33aa18184f754e4ad2658bf3fe801075bd09e9b --- /dev/null +++ b/LogoRedmondV2-Logo-LogoRedmAF.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55f7f408831c34f84857b1a728d1dd70b76083d80e7279e22097ca5ac4430e13 +size 170540028 diff --git a/M_niji_XL.civitai.info b/M_niji_XL.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..83a6fcb15d9f9ece110406b483bb054166d39f86 --- /dev/null +++ b/M_niji_XL.civitai.info @@ -0,0 +1,446 @@ +{ + "id": 142343, + "modelId": 129816, + "name": "v1.0", + "createdAt": "2023-08-17T06:16:07.899Z", + "updatedAt": "2023-08-17T06:22:28.914Z", + "status": "Published", + "publishedAt": "2023-08-17T06:22:28.911Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1241, + "ratingCount": 194, + "rating": 4.99, + "thumbsUpCount": 292 + }, + "model": { + "name": "M_niji_XL \u5c3c\u57fa\u6bd4XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 104749, + "sizeKB": 223104.02734375, + "name": "M_niji_XL.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-17T06:28:47.056Z", + "hashes": { + "AutoV1": "9884AE0C", + "AutoV2": "8E00176159", + "SHA256": "8E00176159390744E3849199D314E827BB45ECBB6B5794ABE31E8B3C2951D1CC", + "CRC32": "BD3D7079", + "BLAKE3": "137A2576188E666BC529F397A63F348511BC39D8232D6FD21034019ECE48C1DE" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/142343" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ef5993e5-d9a3-4a70-ab12-036828954e2e/width=450/2068518.jpeg", + "nsfw": "None", + "width": 960, + "height": 1280, + "hash": "U9EVdYF|00-A$GeS0fO?1YrrVZo}~8kqD,+]", + "type": "image", + "metadata": { + "hash": "U9EVdYF|00-A$GeS0fO?1YrrVZo}~8kqD,+]", + "size": 1584369, + "width": 960, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "ENSD": "31337", + "Size": "960x1280", + "seed": 2122884612, + "\"niji": "6efe5a8f4634\"", + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 20, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": ",1girl,smile,niji,", + "Version": "v1.5.1", + "sampler": "Euler a", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "name": "niji", + "type": "lora", + "weight": 0.7 + }, + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f0d8444f-b220-4ae0-91f7-a749bd4e3b82/width=450/2068529.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UOHm}m-n}@9aEOShOX$%00E3E1-o-Pr=VsIq", + "type": "image", + "metadata": { + "hash": "UOHm}m-n}@9aEOShOX$%00E3E1-o-Pr=VsIq", + "size": 1079517, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 3186754108, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,female,,izakaya,glow,back light,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ac2b3ee6-315e-4e5b-812f-0f8cfa96e96a/width=450/2068523.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UNJjb[OEyWcZpIF30$W-Ej$LrD$eYkrqEjkX", + "type": "image", + "metadata": { + "hash": "UNJjb[OEyWcZpIF30$W-Ej$LrD$eYkrqEjkX", + "size": 1055170, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 2598148802, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "1girl,niji,looking_at_viewer,high saturation,,cowboy_shot,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c7fddd09-f34e-405b-96bc-32e7cd0590d1/width=450/2068521.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UED[uILM53$L}YoZEIRj00+ERh5[3rs=R4OH", + "type": "image", + "metadata": { + "hash": "UED[uILM53$L}YoZEIRj00+ERh5[3rs=R4OH", + "size": 1034965, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 1671898641, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,looking_at_viewer,high saturation,female,,stage,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/29c03087-155b-4f12-9c04-73407a66d3e6/width=450/2068525.jpeg", + "nsfw": "None", + "width": 784, + "height": 1048, + "hash": "UBD+3l02M}tQ~B0LwvxuD,%e?ENfNZazMytk", + "type": "image", + "metadata": { + "hash": "UBD+3l02M}tQ~B0LwvxuD,%e?ENfNZazMytk", + "size": 1094804, + "width": 784, + "height": 1048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "784x1048", + "seed": 2772276575, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,female,,(glow:0.8),(back light:0.8),coffee house,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0fae0f0a-fd2a-415a-9854-8c3ee9d79821/width=450/2068532.jpeg", + "nsfw": "None", + "width": 784, + "height": 1048, + "hash": "UJFh^#^+EKD*~B?GD%NGn$%2-;t7$*t7xuRj", + "type": "image", + "metadata": { + "hash": "UJFh^#^+EKD*~B?GD%NGn$%2-;t7$*t7xuRj", + "size": 1034343, + "width": 784, + "height": 1048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "ENSD": "31337", + "Size": "784x1048", + "seed": 2883200428, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 37, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,,female,hakama_skirt,", + "Version": "v1.5.1", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2b9cd009-2b24-41de-821a-73c42333efc7/width=450/2068527.jpeg", + "nsfw": "None", + "width": 784, + "height": 1048, + "hash": "UPKusS-=?^PA~Wpc%gNbKjY5XRTK#8RjD%Sh", + "type": "image", + "metadata": { + "hash": "UPKusS-=?^PA~Wpc%gNbKjY5XRTK#8RjD%Sh", + "size": 1127367, + "width": 784, + "height": 1048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "784x1048", + "seed": 1637937426, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,,teenager,female,raincoat,windbreaker,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/23a93155-a5d9-4ac1-93d3-30b974171b33/width=450/2068517.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UDEU=@:=003M-txSDzRqBfX-IUzzPra6DsO:", + "type": "image", + "metadata": { + "hash": "UDEU=@:=003M-txSDzRqBfX-IUzzPra6DsO:", + "size": 1003548, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 2567939841, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,looking_at_viewer,high saturation,,wide_shot,solo,adorable_girl,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ee0bdd4b-67fe-49ee-b5d5-6b19eca99aad/width=450/2068519.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UTFZNMOay?xt.mozP9k8V|smbus9IAVsZ$WB", + "type": "image", + "metadata": { + "hash": "UTFZNMOay?xt.mozP9k8V|smbus9IAVsZ$WB", + "size": 1141279, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 3744380702, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,looking_at_viewer,high saturation,female,,soldier,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/761a197f-e512-4d7e-a945-68378b4f8c58/width=450/2068520.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UJF;{n~V]}gP-:-WM_xV004o9aS0ImE2$RI.", + "type": "image", + "metadata": { + "hash": "UJF;{n~V]}gP-:-WM_xV004o9aS0ImE2$RI.", + "size": 1043076, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 636493348, + "Model": "sd_xl_base_1.0_fixvae_V2_fp16", + "steps": 50, + "hashes": { + "model": "1fa5725f4f" + }, + "prompt": "niji,looking_at_viewer,high saturation,female,,otaku room,", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "1fa5725f4f", + "name": "sd_xl_base_1.0_fixvae_V2_fp16", + "type": "model" + } + ], + "Model hash": "1fa5725f4f", + "\"niji-000008": "7fc6366dfd6d\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/142343" +} \ No newline at end of file diff --git a/M_niji_XL.preview.png b/M_niji_XL.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..dbc21f5d53d413508d620daace216af47578262a Binary files /dev/null and b/M_niji_XL.preview.png differ diff --git a/M_niji_XL.safetensors b/M_niji_XL.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..11b0ef0bc5aa0bd703999e99a32d1ba65b55433a --- /dev/null +++ b/M_niji_XL.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e00176159390744e3849199d314e827bb45ecbb6b5794abe31e8b3c2951d1cc +size 228458524 diff --git a/NicolaSamori.civitai.info b/NicolaSamori.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..919ddafcaf6e49dde93ea62cc232ecda7f9de32f --- /dev/null +++ b/NicolaSamori.civitai.info @@ -0,0 +1,758 @@ +{ + "id": 181422, + "modelId": 161205, + "name": "DarkArts", + "createdAt": "2023-10-13T04:10:22.756Z", + "updatedAt": "2023-10-13T19:05:26.916Z", + "status": "Published", + "publishedAt": "2023-10-13T19:05:26.910Z", + "trainedWords": [], + "trainingStatus": "Approved", + "trainingDetails": { + "type": "Style", + "params": { + "unetLR": 0.0005, + "clipSkip": 1, + "loraType": "lora", + "keepTokens": 0, + "networkDim": 32, + "numRepeats": 6, + "resolution": 1024, + "lrScheduler": "cosine_with_restarts", + "minSnrGamma": 5, + "targetSteps": 525, + "enableBucket": true, + "networkAlpha": 16, + "optimizerArgs": "weight_decay=0.1", + "optimizerType": "AdamW8Bit", + "textEncoderLR": 5e-05, + "maxTrainEpochs": 10, + "shuffleCaption": false, + "trainBatchSize": 4, + "flipAugmentation": false, + "lrSchedulerNumCycles": 3 + }, + "baseModel": "sdxl" + }, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 5758, + "ratingCount": 524, + "rating": 4.98, + "thumbsUpCount": 997 + }, + "model": { + "name": "Dark Art Style", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 139134, + "sizeKB": 5608.1162109375, + "name": "181422_training_data.zip", + "type": "Training Data", + "metadata": { + "format": "Other", + "numImages": 35, + "ownRights": false, + "numCaptions": 0, + "shareDataset": false, + "trainingResults": { + "epochs": [ + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000001.safetensors", + "epoch_number": 1, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013082340_e000001_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013082400_e000001_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013082420_e000001_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000002.safetensors", + "epoch_number": 2, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013082736_e000002_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013082756_e000002_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013082816_e000002_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000003.safetensors", + "epoch_number": 3, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083130_e000003_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083150_e000003_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083210_e000003_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000004.safetensors", + "epoch_number": 4, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083524_e000004_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083544_e000004_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083604_e000004_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000005.safetensors", + "epoch_number": 5, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083920_e000005_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013083940_e000005_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084000_e000005_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000006.safetensors", + "epoch_number": 6, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084315_e000006_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084335_e000006_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084355_e000006_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000007.safetensors", + "epoch_number": 7, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084709_e000007_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084729_e000007_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013084749_e000007_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000008.safetensors", + "epoch_number": 8, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085105_e000008_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085125_e000008_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085144_e000008_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori-000009.safetensors", + "epoch_number": 9, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085459_e000009_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085519_e000009_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085539_e000009_02.png" + } + ] + }, + { + "model_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori.safetensors", + "epoch_number": 10, + "sample_images": [ + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085850_e000010_00.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085910_e000010_01.png" + }, + { + "prompt": "NicolaSamori", + "image_url": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori_20231013085930_e000010_02.png" + } + ] + } + ], + "history": [ + { + "time": "2023-10-13T04:10:54.440Z", + "jobId": "f2bc1914-cfce-44e7-b6a5-3819f42b5a4d", + "status": "Submitted", + "jobToken": "eyJRdWV1ZU5hbWUiOiJjaXZpdGFpOjEwMTA1NUAxMjgwNzgiLCJKb2JJZCI6ImYyYmMxOTE0LWNmY2UtNDRlNy1iNmE1LTM4MTlmNDJiNWE0ZCJ9" + }, + { + "time": "2023-10-13T04:13:22.641Z", + "jobId": "f2bc1914-cfce-44e7-b6a5-3819f42b5a4d", + "status": "Processing", + "message": "", + "jobToken": "eyJRdWV1ZU5hbWUiOiJjaXZpdGFpOjEwMTA1NUAxMjgwNzgiLCJKb2JJZCI6ImYyYmMxOTE0LWNmY2UtNDRlNy1iNmE1LTM4MTlmNDJiNWE0ZCJ9" + }, + { + "time": "2023-10-13T09:00:49.372Z", + "jobId": "f2bc1914-cfce-44e7-b6a5-3819f42b5a4d", + "status": "InReview", + "message": "Job complete", + "jobToken": "eyJRdWV1ZU5hbWUiOiJjaXZpdGFpOjEwMTA1NUAxMjgwNzgiLCJKb2JJZCI6ImYyYmMxOTE0LWNmY2UtNDRlNy1iNmE1LTM4MTlmNDJiNWE0ZCJ9" + } + ], + "attempts": 0, + "end_time": "2023-10-13T09:00:43.750Z", + "start_time": "2023-10-13T04:13:22.313Z" + } + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-13T04:15:52.138Z", + "hashes": { + "AutoV1": "1EA6A380", + "AutoV2": "22D992E4D2", + "SHA256": "22D992E4D26567986C1F56810D4C565E8D30721EE9BAF70134620AAB331FA23C", + "CRC32": "13C23B88", + "BLAKE3": "006DE27946CB6BB0F6AC1D6FC5D95A97C01F5FBA441B09DAA38DD6DFB59432E5" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/181422" + }, + { + "id": 139561, + "sizeKB": 223099.64453125, + "name": "NicolaSamori.safetensors", + "type": "Model", + "metadata": { + "format": "SafeTensor", + "selectedEpochUrl": "https://image-generation.civitai.com/v1/consumer/jobs/f2bc1914-cfce-44e7-b6a5-3819f42b5a4d/assets/NicolaSamori.safetensors" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-13T19:00:48.012Z", + "hashes": { + "AutoV1": "247CF4A0", + "AutoV2": "FB1212A747", + "SHA256": "FB1212A74745BBA38E6B9AC4FCC832461041360E963BF74123461396A8534F26", + "CRC32": "254A52C8", + "BLAKE3": "EB9EB39C0AA6A0F2A28A153DE28E30FBEEEEE3A16A3E76DCAFB5471E51332B4C" + }, + "primary": false, + "downloadUrl": "https://civitai.com/api/download/models/181422?type=Model&format=SafeTensor" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/134fa377-2d14-4b9f-bc61-ef5f2b852630/width=450/2932123.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UJDJ3Et7~Vt7xtWBt7oe?Hj[Rjj[~Vofxuof", + "type": "image", + "metadata": { + "hash": "UJDJ3Et7~Vt7xtWBt7oe?Hj[Rjj[~Vofxuof", + "size": 1122008, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 2069017666, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a ominous prismatic diamond shaped contraption with bundles of wires attached to extracting praying monks , machine claws, descending to below, dark black paint vibe, in the style of nicola samori ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5.5, + "resources": [ + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6d4ed993-8266-4f61-affd-797a96f8c24d/width=450/2932126.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U796%X$*009t}sjZ5kX801R*?vxa9FjZ.7W;", + "type": "image", + "metadata": { + "hash": "U796%X$*009t}sjZ5kX801R*?vxa9FjZ.7W;", + "size": 1078519, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 1259136073, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a giant skull with a woman's fleshy mouth bright red bloody lips, dripping lipstick plump lips , (surreal deep black background:1.2) of parallax space between distorted waves and line drawings , separation of colors, in the style of nicola samori", + "Version": "v1.6.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5, + "Mask blur": "4", + "resources": [ + { + "name": "NicolaSamori", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "\"NicolaSamori": "002696f2abc2\"", + "negativePrompt": "(teeth:1.3)", + "Denoising strength": "0.71", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c0ece326-dc10-45f6-9947-0b6a946b07c8/width=450/2932116.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U15r0%.800004TV@byNG9aR*~q-=00M{-oxa", + "type": "image", + "metadata": { + "hash": "U15r0%.800004TV@byNG9aR*~q-=00M{-oxa", + "size": 865474, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 2269602640, + "Model": "sdvn7Nijistylexl_v1", + "steps": 36, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a very skinny woman wearing long sleek dress with leather thigh high strapped boots , , long hair cut, posing, lipstick plump lips , surreal dark smeared background, black and gold mutations tears in fabric , separation of colors, in the style of nicola samori , glow effects, godrays, Hand drawn, render, 8k, octane render, cinema 4d, blender, dark, atmospheric 4k ultra detailed, cinematic sensual, Sharp focus, humorous illustration, big depth of field, Masterpiece, colors, 3d octane render, 4k, concept art, trending on artstation, hyperrealistic, Vivid colors, extremely detailed CG unity 8k wallpaper, trending on ArtStation, trending on CGSociety, Intricate, High Detail, dramatic, absurdes, a realistic baroque bedroom, white canopy over bed, white drapes, damask patterns, intricate details, in the style of Vittorio Matteo Corcos", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 13, + "resources": [ + { + "name": "punk1", + "type": "lora", + "weight": 1 + }, + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4859e435-c9a0-4282-aafd-66c79e904ab1/width=450/2932127.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U25ONe%M01IU~qofD%RjD*Rj%Lxt9FRj_2%M", + "type": "image", + "metadata": { + "hash": "U25ONe%M01IU~qofD%RjD*Rj%Lxt9FRj_2%M", + "size": 1372508, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 2488654177, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a other world space aliens, tentacles and slimy appearance, disgusting , vicious face, skin dripping off into space , in a decrepit throne room, wave distortions, glitched , split screen , scraping canvas, deformed face, , impressionist, dark vibe, in the style of nicola samori", + "Version": "v1.6.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 10, + "resources": [ + { + "name": "NicolaSamori", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "\"NicolaSamori": "002696f2abc2\"", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/526b7fe6-53b6-483e-8af3-82ae8456c830/width=450/2932121.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U24_zz009E~qD%%MM{R%00?v-;4n%MIBx]xu", + "type": "image", + "metadata": { + "hash": "U24_zz009E~qD%%MM{R%00?v-;4n%MIBx]xu", + "size": 1043715, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 1710418566, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a agonized madame bovary after consuming poison , body laid out on ground dramatically , victorian era clothing, gothic terror, smeared dream like black background, in the style of nicola samori ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5.5, + "resources": [ + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "negativePrompt": "( person:1.3)", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4857ccd5-665c-445f-9c4f-27e862d3f90e/width=450/2932115.jpeg", + "nsfw": "X", + "width": 896, + "height": 1152, + "hash": "U25hV;IU00_300xt~pD%%Lxat7IU%MIoD%?H", + "type": "image", + "metadata": { + "hash": "U25hV;IU00_300xt~pD%%Lxat7IU%MIoD%?H", + "size": 1303970, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 3220599769, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a nude woman praying in a decrepit church, chained to the ground by large steel chains tightly tangling around nude body , smeared and mutated composition , impressionist, deep black background,, in the style of nicola samori ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5.5, + "resources": [ + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6b9c2c4f-14be-4792-8795-304da6e04be3/width=450/2932125.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U05q^h_N0000004o^*_200Mx~p~qjXbI-:oL", + "type": "image", + "metadata": { + "hash": "U05q^h_N0000004o^*_200Mx~p~qjXbI-:oL", + "size": 1499628, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 2803610843, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a lab with chemical devices, grignard contraption experiment with octopus tentacles escaping the round flask, smeared dream like black background, in the style of nicola samori ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5.5, + "resources": [ + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "negativePrompt": "( person:1.3)", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b1b0a1a2-a832-4394-bddb-21d34c5e5a25/width=450/2932124.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U471p2%N00bb_4RkIV?bD%Rj?H-pD%t7-;M{", + "type": "image", + "metadata": { + "hash": "U471p2%N00bb_4RkIV?bD%Rj?H-pD%t7-;M{", + "size": 1339489, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 264345261, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a queen in agony, pale white skin dripping off into space , in a decrepit throne room, wave distortions, glitched , split screen , scraping canvas, deformed face, , impressionist, dark vibe, in the style of nicola samori", + "Version": "v1.6.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 10, + "resources": [ + { + "name": "NicolaSamori", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "\"NicolaSamori": "002696f2abc2\"", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e3c7b545-624a-4f8f-b4f1-eb37ad3bf281/width=450/2932117.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U44_^Nt78wRjRNayNHWB?wofIUayMwaybcay", + "type": "image", + "metadata": { + "hash": "U44_^Nt78wRjRNayNHWB?wofIUayMwaybcay", + "size": 1399070, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 3133065552, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a glitched out castle, not rendering properly , destroyed hologram flickering , dark tear in space dripping to ground,, background of an upside down interior room gothic, in the style of nicola samori ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5.5, + "resources": [ + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "negativePrompt": "( person:1.3)", + "Token merging ratio": "0.2" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4fb453e0-af99-41e8-aca3-76927bec4db9/width=450/2932128.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U57KSYtl00H?~qo}8_Q-s:jZNGa|9ZaKxuo}", + "type": "image", + "metadata": { + "hash": "U57KSYtl00H?~qo}8_Q-s:jZNGa|9ZaKxuo}", + "size": 1517724, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae(1).safetensors", + "NGMS": "0.33", + "Size": "896x1152", + "seed": 2779860172, + "Model": "sdvn7Nijistylexl_v1", + "steps": 25, + "hashes": { + "model": "6e0ba74ac1" + }, + "prompt": "painting of a giant skull with a woman's fleshy mouth bright red bloody lips, dripping lipstick (plump lips:1.2) , surreal dark black background of parallax space between distorted waves and line drawings , separation of colors, in the style of nicola samori ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "235745af8d", + "cfgScale": 5.5, + "resources": [ + { + "hash": "6e0ba74ac1", + "name": "sdvn7Nijistylexl_v1", + "type": "model" + } + ], + "Model hash": "6e0ba74ac1", + "negativePrompt": "(teeth:1.5)", + "Token merging ratio": "0.2" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/181422" +} \ No newline at end of file diff --git a/NicolaSamori.preview.png b/NicolaSamori.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..04cd0b66186c8a5228b0f6599d8ff0c9a3707c7b Binary files /dev/null and b/NicolaSamori.preview.png differ diff --git a/NicolaSamori.safetensors b/NicolaSamori.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0ef11773590a0ce76f10e237c98ee365e63eea82 --- /dev/null +++ b/NicolaSamori.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb1212a74745bba38e6b9ac4fcc832461041360e963bf74123461396a8534f26 +size 228454036 diff --git a/ParchartXL-2.0.civitai.info b/ParchartXL-2.0.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..92bcc2a3e72822f6c065637657d27792470881c6 --- /dev/null +++ b/ParchartXL-2.0.civitai.info @@ -0,0 +1,421 @@ +{ + "id": 318677, + "modelId": 141471, + "name": "v2.0", + "createdAt": "2024-01-29T05:49:31.130Z", + "updatedAt": "2024-01-29T14:26:43.662Z", + "status": "Published", + "publishedAt": "2024-01-29T06:02:05.868Z", + "trainedWords": [ + "on parchment" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

approaching a final version

", + "stats": { + "downloadCount": 5213, + "ratingCount": 388, + "rating": 4.98, + "thumbsUpCount": 809 + }, + "model": { + "name": "ParchartXL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 252115, + "sizeKB": 104232.078125, + "name": "ParchartXL-2.0.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-01-29T05:55:47.587Z", + "hashes": { + "AutoV1": "F2DBEE0C", + "AutoV2": "3149502826", + "SHA256": "3149502826BB1CC7787BAEE766F603FA78D570801D5D7C5DF70CCBA887EFC1E5", + "CRC32": "EEA06CBE", + "BLAKE3": "18FF41B0D92E6EFD550399C5088627E16A984F62BC311795FEBB30B499D1A839", + "AutoV3": "CF5F050163F3D7A9EB5FBC5EA24247BA53F3106CBD9D1EDDDE8559C47ACA8081" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/318677" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/88ec3d19-9818-4c37-b738-2af4ee7d9ea3/width=450/6009719.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U4L47y-nHX-oElD*00aK|.RP0f%L00%M.8E3", + "type": "image", + "metadata": { + "hash": "U4L47y-nHX-oElD*00aK|.RP0f%L00%M.8E3", + "size": 1901490, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 1012358797181047, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":1012358797181047,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1012358797181047,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":1012358797181047,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":63,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[1012358797181047,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[726,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "ink and watercolor on parchment baby dragon playfully causing chaos on the floor of the New York Stock Exchange", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/54f12da7-25d9-438e-85de-1a246c182d6e/width=450/6009677.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UDIqfID*~U%L4;Ip-oWBaKofD*WBWBR*%2xa", + "type": "image", + "metadata": { + "hash": "UDIqfID*~U%L4;Ip-oWBaKofD*WBWBR*%2xa", + "size": 1502875, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 470150025377076, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"black ink on parchment dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":470150025377076,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"black ink on parchment dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":470150025377076,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"black ink on parchment dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":470150025377076,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"black ink on parchment dark moody minimal portrait of a model emerging from the shadows\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":false},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":false},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":false},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"black ink on parchment dark moody minimal portrait of a model emerging from the shadows\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":false},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[470150025377076,\"fixed\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":false},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":false},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":false},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"black ink on parchment dark moody minimal portrait of a model emerging from the shadows\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[741,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "black ink on parchment dark moody minimal portrait of a model emerging from the shadows", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/87ef79d7-907e-4f35-ac01-8b99322e49d3/width=450/6009684.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UOIg_l^*M{IV~UaefPRkRkM|Iot6jYM{V[%2", + "type": "image", + "metadata": { + "hash": "UOIg_l^*M{IV~UaefPRkRkM|Iot6jYM{V[%2", + "size": 1786985, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 874623202900644, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":874623202900644,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":874623202900644,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":874623202900644,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":false},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":false},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":false},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":false},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":false},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":false},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[874623202900644,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[741,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":false},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment closeup portrait of the luminous queen of faerie with her armored guardsmen marble and onyx throne kintsugi detailing magical realm fantasy scene dramatic light sharp details crown of iron bands and flowers", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9abf792b-080e-40c9-ba89-b168ef9c3de1/width=450/6009683.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UBO{mh_M$y={^+kBxaRj~A%LE2IVs8IokWt6", + "type": "image", + "metadata": { + "hash": "UBO{mh_M$y={^+kBxaRj~A%LE2IVs8IokWt6", + "size": 1431664, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 341863197077753, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":341863197077753,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":341863197077753,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":341863197077753,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":false},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":false},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":false},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":false},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":false},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":false},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[341863197077753,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[741,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":false},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment black in lines and wash illustration of a chickadee on a cherry blossom tree branch minimal meditative art", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/80afe4d7-ce91-4288-863e-9e76a569e505/width=450/6009682.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UWI;U~%1~TWC%2WCWBoe$%WCM|j@oeoea|WC", + "type": "image", + "metadata": { + "hash": "UWI;U~%1~TWC%2WCWBoe$%WCM|j@oeoea|WC", + "size": 1483179, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 536921439756175, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":536921439756175,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":536921439756175,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":536921439756175,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait \"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[536921439756175,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[726,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait \"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment Kenku wizard design Dungeons and Dragons inspired fantasy theme full body portrait ", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d7f3828a-66d2-4c78-a9f1-cf2099a3cc51/width=450/6009685.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UQLDY%t6~Ut6-:RkR+fk-oxaIoWBRkayoeoe", + "type": "image", + "metadata": { + "hash": "UQLDY%t6~Ut6-:RkR+fk-oxaIoWBRkayoeoe", + "size": 1500116, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 978598381680413, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":978598381680413,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":978598381680413,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":978598381680413,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[978598381680413,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[726,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment female dragonborn character design Dungeons and Dragons inspired fantasy theme standing", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b8e7aa64-5b1f-4b0b-b96c-07bc838584f3/width=450/6009681.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "USHK]{-oDjIp~nkCICWBeUf7I@afDkRkb]t6", + "type": "image", + "metadata": { + "hash": "USHK]{-oDjIp~nkCICWBeUf7I@afDkRkb]t6", + "size": 1639128, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 524944525613421, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":524944525613421,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":524944525613421,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":524944525613421,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":false},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":false},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":false},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":false},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":false},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":false},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[524944525613421,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[741,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":false},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment bottle filled with glowing luminous magical potion on a rustic bench background magical forest", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/64a63910-ed1e-477b-9c77-75d30ed87c72/width=450/6009686.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "U7HKUlxZR+~ANHxZ^%t6-.9b~U-o^*?F%19u", + "type": "image", + "metadata": { + "hash": "U7HKUlxZR+~ANHxZ^%t6-.9b~U-o^*?F%19u", + "size": 1924681, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 180602326097116, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":180602326097116,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":180602326097116,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":180602326097116,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk \",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk \"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[180602326097116,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[726,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk \"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment isometric concept drawing of the kitchen interior of a scary village house at dusk ", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f9c2610c-3142-4e31-af4b-ef80724536bd/width=450/6009715.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UJLgX%?Z$zof~Uj]M|t6~V%LWCM|D*R*%Maf", + "type": "image", + "metadata": { + "hash": "UJLgX%?Z$zof~Uj]M|t6~V%LWCM|D*R*%Maf", + "size": 1593876, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 697418438647040, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":697418438647040,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":697418438647040,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":697418438647040,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.55,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":true},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":true},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":true},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":true},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{\"pinned\":true},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":true},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":true},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.55,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":true},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[697418438647040,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[749,431],\"size\":{\"0\":769,\"1\":577},\"flags\":{\"pinned\":true},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":true},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "black ink outline and watercolor on parchment rowboat on a still pond with lily pads and koi fish", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0762408d-df58-4855-8270-8bb1fd9b1862/width=450/6009711.jpeg", + "nsfw": "None", + "width": 832, + "height": 1216, + "hash": "UFMZN%=_9GR-~V%LWBV@-.ENxtxs9GIURloz", + "type": "image", + "metadata": { + "hash": "UFMZN%=_9GR-~V%LWBV@-.ENxtxs9GIURloz", + "size": 1579330, + "width": 832, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 269410711265306, + "vaes": [], + "Model": "sd_xl_base_1.0", + "comfy": "{\"prompt\":{\"4\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"6\":{\"inputs\":{\"text\":\"on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"49\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"},\"55\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"56\":{\"inputs\":{\"seed\":269410711265306,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"57\":{\"inputs\":{\"width\":1280,\"height\":1280,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"58\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":269410711265306,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"62\":{\"inputs\":{\"filename_prefix\":\"SDXL-Lora-NoRefiner\",\"file_type\":\"PNG\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":269410711265306,\"steps\":28,\"cfg\":4.7,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"},\"positive\":{\"inputs\":{\"text\":\"on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"\",\"clip\":{\"inputs\":{\"lora_01\":\"ParchartXL-2.0.safetensors\",\"strength_01\":1,\"lora_02\":\"None\",\"strength_02\":0.35,\"lora_03\":\"None\",\"strength_03\":1,\"lora_04\":\"None\",\"strength_04\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Lora Loader Stack (rgthree)\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"ratio_selected\":\"2:3 [832x1216 portrait]\",\"batch_size\":2},\"class_type\":\"Empty Latent Ratio Select SDXL\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"sd_xl_base_1.0.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImageExtended\"}},\"workflow\":{\"last_node_id\":62,\"last_link_id\":71,\"nodes\":[{\"id\":49,\"type\":\"Empty Latent Ratio Select SDXL\",\"pos\":[21.980016937255833,759.3601147460939],\"size\":{\"0\":319.20001220703125,\"1\":82},\"flags\":{\"collapsed\":false,\"pinned\":false},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[70],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Empty Latent Ratio Select SDXL\"},\"widgets_values\":[\"2:3 [832x1216 portrait]\",2],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":42,\"type\":\"Note\",\"pos\":[29.980016937255847,1098.360114746094],\"size\":{\"0\":260,\"1\":210},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":1,\"mode\":0,\"title\":\"Note - Empty Latent Image\",\"properties\":{\"text\":\"\"},\"widgets_values\":[\"LANDSCAPE BASE RESOLUTION\\n3:2 1216 x 832 1:1 1024x1024\\n4:3 1152 x 896\\n8:5 1216 x 768\\n16:9 1344 x 768\\n19:9 1472 x 704\\n21:9 1536 x 640\\n\\nPORTRAIT\\n2:3 832 x 1216\\n3:4 896 x 1152\\n5:8 768 x 1216\\n9:16 768 x 1344\\n9:19 704 x 1472\\n9:21 640 x 1536\\n\\nnot precise math here - but these dimensions reflect image sizes SAI used to train SDXL\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":57,\"type\":\"EmptyLatentImage\",\"pos\":[20.980016937255836,941.3601147460939],\"size\":{\"0\":315,\"1\":106},\"flags\":{\"pinned\":false},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1280,1280,1],\"color\":\"#291529\",\"bgcolor\":\"#3d293d\",\"shape\":1},{\"id\":14,\"type\":\"PrimitiveNode\",\"pos\":[27.400015068054174,498.6200128173828],\"size\":{\"0\":300,\"1\":160},\"flags\":{\"pinned\":false},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[18],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Negative Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"\"],\"color\":\"#322\",\"bgcolor\":\"#533\",\"shape\":1},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[437,428],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":59},{\"name\":\"text\",\"type\":\"STRING\",\"link\":16,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[63],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[438,468],\"size\":{\"0\":210,\"1\":54},\"flags\":{\"collapsed\":true,\"pinned\":false},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":60},{\"name\":\"text\",\"type\":\"STRING\",\"link\":18,\"widget\":{\"name\":\"text\"},\"slot_index\":1}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[62],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"\"],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":58,\"type\":\"VAEDecode\",\"pos\":[432,514],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":66},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":67}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[71],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#233\",\"bgcolor\":\"#355\",\"shape\":1},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[18.610643461792005,88.68996839242544],\"size\":{\"0\":350,\"1\":100},\"flags\":{\"pinned\":false},\"order\":4,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[56],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[57],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[67],\"slot_index\":2}],\"title\":\"Load Checkpoint - BASE\",\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"sd_xl_base_1.0.safetensors\"],\"color\":\"#323\",\"bgcolor\":\"#535\",\"shape\":1},{\"id\":55,\"type\":\"Lora Loader Stack (rgthree)\",\"pos\":[452,53],\"size\":{\"0\":420.0951232910156,\"1\":246},\"flags\":{\"pinned\":false},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":56},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":57}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[64],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[59,60],\"shape\":3,\"slot_index\":1}],\"properties\":{\"Node name for S&R\":\"Lora Loader Stack (rgthree)\"},\"widgets_values\":[\"ParchartXL-2.0.safetensors\",1,\"None\",0.35,\"None\",1,\"None\",1],\"color\":\"#2a363b\",\"bgcolor\":\"#3f5159\",\"shape\":1},{\"id\":56,\"type\":\"KSampler\",\"pos\":[380,707],\"size\":{\"0\":315,\"1\":262},\"flags\":{\"pinned\":false},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":64},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":63},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":62},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":70}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[66],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[269410711265306,\"randomize\",28,4.7,\"dpmpp_sde\",\"karras\",1],\"color\":\"#223\",\"bgcolor\":\"#335\",\"shape\":1},{\"id\":62,\"type\":\"SaveImageExtended\",\"pos\":[741,429],\"size\":{\"0\":769,\"1\":577},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":71}],\"properties\":{\"Node name for S&R\":\"SaveImageExtended\"},\"widgets_values\":[\"SDXL-Lora-NoRefiner\",\"PNG\"],\"color\":\"#232020\",\"bgcolor\":\"#373434\",\"shape\":1},{\"id\":13,\"type\":\"PrimitiveNode\",\"pos\":[27.20003185272222,296.6200585937502],\"size\":{\"0\":297.8631286621094,\"1\":160.9127655029297},\"flags\":{\"pinned\":false},\"order\":5,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[16],\"widget\":{\"name\":\"text\"},\"slot_index\":0}],\"title\":\"Positive Prompt (Text)\",\"properties\":{\"Run widget replace on values\":false},\"widgets_values\":[\"on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate\"],\"color\":\"#232\",\"bgcolor\":\"#353\",\"shape\":1}],\"links\":[[16,13,0,6,1,\"STRING\"],[18,14,0,7,1,\"STRING\"],[56,4,0,55,0,\"MODEL\"],[57,4,1,55,1,\"CLIP\"],[59,55,1,6,0,\"CLIP\"],[60,55,1,7,0,\"CLIP\"],[62,7,0,56,2,\"CONDITIONING\"],[63,6,0,56,1,\"CONDITIONING\"],[64,55,0,56,0,\"MODEL\"],[66,56,0,58,0,\"LATENT\"],[67,4,2,58,1,\"VAE\"],[70,49,0,56,3,\"LATENT\"],[71,58,0,62,0,\"IMAGE\"]],\"groups\":[{\"title\":\"Text Prompts\",\"bounding\":[7,214,343,454],\"color\":\"#3f789e\",\"font_size\":24},{\"title\":\"Load in BASE SDXL Model\",\"bounding\":[9,8,373,202],\"color\":\"#a1309b\",\"font_size\":24},{\"title\":\"Empty Latent Image\",\"bounding\":[8,674,342,440],\"color\":\"#a1309b\",\"font_size\":24}],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"56\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 28, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "on parchment happy rat eating freshly baked bread loaf and one slice on a patterned plate", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 4.7, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "additionalResources": [] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/318677" +} \ No newline at end of file diff --git a/ParchartXL-2.0.preview.png b/ParchartXL-2.0.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..2fb5117699c0448d4813a2d98df759ba9b423efa Binary files /dev/null and b/ParchartXL-2.0.preview.png differ diff --git a/ParchartXL-2.0.safetensors b/ParchartXL-2.0.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2994ca721238bdc0097bb2e006e211ccf3185863 --- /dev/null +++ b/ParchartXL-2.0.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3149502826bb1cc7787baee766f603fa78d570801d5d7c5df70ccba887efc1e5 +size 106733648 diff --git a/Perfect Hands v2.civitai.info b/Perfect Hands v2.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..bd7250b0e95c557be56486912292c48011d5cf48 --- /dev/null +++ b/Perfect Hands v2.civitai.info @@ -0,0 +1,480 @@ +{ + "id": 254267, + "modelId": 200255, + "name": "Hands v2.1", + "createdAt": "2023-12-07T20:38:59.492Z", + "updatedAt": "2024-02-28T04:16:49.191Z", + "status": "Published", + "publishedAt": "2023-12-07T20:45:19.062Z", + "trainedWords": [ + "Perfect Hands" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

An updated version.

Try playing with lora weight to get best possible results.

Also, keep in mind, base model plays a very important part.

", + "stats": { + "downloadCount": 14310, + "ratingCount": 606, + "rating": 4.95, + "thumbsUpCount": 1302 + }, + "model": { + "name": "Hands XL + SD 1.5", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 197141, + "sizeKB": 445788.21484375, + "name": "Perfect Hands v2.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-12-07T20:45:55.113Z", + "hashes": { + "AutoV1": "CAFC1FFB", + "AutoV2": "6BF36964D8", + "SHA256": "6BF36964D8952B4F4A773B3FA65D32003085E7B05D095BAE81825DFA0F10F4A9", + "CRC32": "9CEF125C", + "BLAKE3": "18532245B1B58D2D6122A7DEC7E0EBD1E7B748AC9BDA3025EA81CA739EEA0A47", + "AutoV3": "8448755682F7315FB11756E771DC39D1DCFE8874EC10283CFFDED96E69D23D82" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/254267" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ebbf44fe-5b12-413b-8209-4a871020bbcd/width=450/4294058.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UHHULUIWkV~CS~9u^*-o009ZNGNG~BM{D*WC", + "type": "image", + "metadata": { + "hash": "UHHULUIWkV~CS~9u^*-o009ZNGNG~BM{D*WC", + "size": 1041475, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2797862302, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \na person holding a butterfly in their hand Perfect Hands, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5df7be56-cc1e-4b6c-9460-d7829d3b04cd/width=450/4294063.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7BON]4TG^5=B=K6H?vy0N=_zA%ME2NH%gX9", + "type": "image", + "metadata": { + "hash": "U7BON]4TG^5=B=K6H?vy0N=_zA%ME2NH%gX9", + "size": 906574, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3401011063, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \na five fingers hand with a fist gesture on a blue background Perfect Hands, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a6127696-9485-478e-b5da-eef95148ba51/width=450/4294056.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAA1R}8_M{a#.Tw]M{MyDObvnhRjIAj@tRt7", + "type": "image", + "metadata": { + "hash": "UAA1R}8_M{a#.Tw]M{MyDObvnhRjIAj@tRt7", + "size": 1160698, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3252938027, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \na futuristic five_fingers hand with a metal glove on Perfect Hands, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d7a30786-f9dc-42ef-8c89-57d833829805/width=450/4294064.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UD9tS]_M%f%LtQ%M%L-pS#-:-:jsaKWBxaoy", + "type": "image", + "metadata": { + "hash": "UD9tS]_M%f%LtQ%M%L-pS#-:-:jsaKWBxaoy", + "size": 980033, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3035163542, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \na vampire creature's idle detailed Perfect Hand with long nails , shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e1d0844d-1fc3-4463-a099-aa80d7cb991f/width=450/4294047.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCBW-~Ipp{?b${%1ENNeIns:n,R*S6NyxWwG", + "type": "image", + "metadata": { + "hash": "UCBW-~Ipp{?b${%1ENNeIns:n,R*S6NyxWwG", + "size": 1074378, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3569304184, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "Hyperrealistic art of \na woman in a blue dress pointing at the camera Perfect Hands, Extremely high-resolution details, photographic, realism pushed to extreme, fine texture, incredibly lifelike", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "simplified, abstract, unrealistic, impressionistic, low resolution, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Hyperrealism", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/37f1197b-76f7-47d2-8748-2b6b7c6e6e67/width=450/4294044.jpeg", + "nsfw": "X", + "width": 1024, + "height": 1024, + "hash": "UDGI4M~pGbD%PB9Zw[%1%MV@IAX9?at8IU$%", + "type": "image", + "metadata": { + "hash": "UDGI4M~pGbD%PB9Zw[%1%MV@IAX9?at8IU$%", + "size": 1138747, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1855762480, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "Hyperrealistic art of \na woman with Perfect Hands and a baseball cap is posing naked in front of camera, Extremely high-resolution details, photographic, realism pushed to extreme, fine texture, incredibly lifelike", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "simplified, abstract, unrealistic, impressionistic, low resolution, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Hyperrealism", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a1792eaf-f950-4329-96a7-b74f94bc581f/width=450/4294051.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "U4Dl4w[l02py00KQ570MMw%M}?]#5rIn~B?H", + "type": "image", + "metadata": { + "hash": "U4Dl4w[l02py00KQ570MMw%M}?]#5rIn~B?H", + "size": 1138896, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1045363989, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "Hyperrealistic art of \na cartoon picture of a woman with a microphone Perfect Hands, Extremely high-resolution details, photographic, realism pushed to extreme, fine texture, incredibly lifelike", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 5.5, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "simplified, abstract, unrealistic, impressionistic, low resolution, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Hyperrealism", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/94c18b32-b1f1-4146-a639-a626a6925d15/width=450/4294041.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "U3C?1E000LIA0g?H=^9G00o}]#_2~V9aDi_3", + "type": "image", + "metadata": { + "hash": "U3C?1E000LIA0g?H=^9G00o}]#_2~V9aDi_3", + "size": 1054400, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 45990749, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \na Perfect Hands woman with a big breast posing for a picture , shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/08b94d44-75cf-4cf0-a928-c655b8f502ff/width=450/4294046.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "UWD+Se~Bw]%1xtxuf+NGM{IoIoNGjZV@V@oK", + "type": "image", + "metadata": { + "hash": "UWD+Se~Bw]%1xtxuf+NGM{IoIoNGjZV@V@oK", + "size": 1456789, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 83357724, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "Hyperrealistic art of \na woman holding a gun in front of a giant planet Perfect Hands, Extremely high-resolution details, photographic, realism pushed to extreme, fine texture, incredibly lifelike", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "simplified, abstract, unrealistic, impressionistic, low resolution, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Hyperrealism", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5fae9373-f74a-40b3-ac98-0ae7634e2a6c/width=450/4294059.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UDD0Dj?b}Rx].TM|8^IUx]%gr=kB58%2IVxu", + "type": "image", + "metadata": { + "hash": "UDD0Dj?b}Rx].TM|8^IUx]%gr=kB58%2IVxu", + "size": 1328956, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3096183861, + "Model": "realisticStockPhoto_v10", + "steps": 35, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \na batman is in the rain with a bat Perfect Hands, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.1", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"Perfect Hands": "8448755682f7\"", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/254267" +} \ No newline at end of file diff --git a/Perfect Hands v2.preview.png b/Perfect Hands v2.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..0e38b2b90db91a4cd248da009b370d591e63ecc6 Binary files /dev/null and b/Perfect Hands v2.preview.png differ diff --git a/Perfect Hands v2.safetensors b/Perfect Hands v2.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..80983bd5aae362e81fa18b5d87d7fb2bef4ea0e0 --- /dev/null +++ b/Perfect Hands v2.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6bf36964d8952b4f4a773b3fa65d32003085e7b05d095bae81825dfa0f10f4a9 +size 456487132 diff --git a/PerfectEyesXL.civitai.info b/PerfectEyesXL.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..1ba8b532f4fa301c416a774795a72bea306976aa --- /dev/null +++ b/PerfectEyesXL.civitai.info @@ -0,0 +1,514 @@ +{ + "id": 128461, + "modelId": 118427, + "name": "v1.0", + "createdAt": "2023-07-29T09:44:46.924Z", + "updatedAt": "2023-09-22T13:36:59.194Z", + "status": "Published", + "publishedAt": "2023-07-29T09:55:32.553Z", + "trainedWords": [ + "green eyes", + "blue eyes", + "brown eyes", + "perfecteyes" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 15108, + "ratingCount": 658, + "rating": 4.97, + "thumbsUpCount": 1225 + }, + "model": { + "name": "Perfect Eyes XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 92996, + "sizeKB": 223099.60546875, + "name": "PerfectEyesXL.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-07-29T09:50:41.716Z", + "hashes": { + "AutoV1": "77EDFF4F", + "AutoV2": "F248364CF0", + "SHA256": "F248364CF0ACC7A00BC4B5145B44C0DFBF10E71E9B0EFB6B77F0956AEAF2411C", + "CRC32": "129F3961", + "BLAKE3": "AFF04F96DD855033C6B60E511005B5D050C88B01A74F784F54B16F8DC032EDE2" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/128461" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cb6d5e7f-ab3b-4e63-9e49-cacabd99dcb9/width=450/1772388.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UOLfN@~BM{=xoyX7-ot79axGkWj]?GozIpV@", + "type": "image", + "metadata": { + "hash": "UOLfN@~BM{=xoyX7-ot79axGkWj]?GozIpV@", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1152", + "seed": 2652962572, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "hazel eye, close up, perfecteyes, woman", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e6414ebf-fe5b-4eaa-9dcd-39459bfc22f2/width=450/1772443.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UGJj9x]iKQ=tIn?Hi^%Mt7aKOZIo}?%2E1oz", + "type": "image", + "metadata": { + "hash": "UGJj9x]iKQ=tIn?Hi^%Mt7aKOZIo}?%2E1oz", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1152", + "seed": 2570918667, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "green eye, close up, perfecteyes, man", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/95089c2a-cf2c-499e-80c6-ce39bee6fc8b/width=450/1772394.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UQJjh@_NY5?H^jSgWA%2ELa#%2t7tRozxuso", + "type": "image", + "metadata": { + "hash": "UQJjh@_NY5?H^jSgWA%2ELa#%2t7tRozxuso", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3484494680, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "hazel eye, close up, perfecteyes, man", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/62f45669-bd0e-403d-a868-2fbb1987f06e/width=450/1772406.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CR5tRb^Se%LxutSxu%MR*kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CR5tRb^Se%LxutSxu%MR*kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1092613371, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "photo of 1girl, redhead, blue eyes, portrait perfecteyes", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.63" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/67b1fc07-7944-42c1-a38a-88633ce1dad0/width=450/1772407.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CMxtRb^Se%LxutSxu%MR*kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CMxtRb^Se%LxutSxu%MR*kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3206352365, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "photo of 1girl, redhead, grey eyes, portrait perfecteyes", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.63" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e69ba502-eb33-4e22-bba4-57acc3b09e45/width=450/1772409.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CR5tSb^Se%LxutSxu%MR*kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CR5tSb^Se%LxutSxu%MR*kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3193574599, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "photo of 1girl, redhead, brown eyes, portrait perfecteyes", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.63" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3f498628-f571-4b65-8ac4-e3b3a3422b73/width=450/1772415.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CR5o}XmSe%LxutSxu%MR*kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CR5o}XmSe%LxutSxu%MR*kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2606139458, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "photo of 1girl, redhead, green brown hazel eyes, portrait perfecteyes", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.63" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d1415874-f3d0-4b44-8e6b-796a050d4951/width=450/1772417.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CR5tRXmSe%LxutSxu%MR+kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CR5tRXmSe%LxutSxu%MR+kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3761462867, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "blue eye, brown eye, heterochromia, perfecteyes, woman", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.63" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dc440e5e-f960-432e-815b-544554e45c4f/width=450/1772423.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CMxtRb^Se%LxutSxu%MR*kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CMxtRb^Se%LxutSxu%MR*kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 934808835, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "photo of 1girl, redhead, blue yellow eyes, perfecteyes, woman", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 0.7 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.63" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bf6111c7-5f3f-437a-9d4a-9d68c3d5e504/width=450/1772427.jpeg", + "nsfw": "None", + "width": 768, + "height": 1152, + "hash": "UHF~5F^%OEIB~CR5tRb^Se%LxutSxu%MR*kD", + "type": "image", + "metadata": { + "hash": "UHF~5F^%OEIB~CR5tRb^Se%LxutSxu%MR*kD", + "width": 768, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3994257378, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "hashes": { + "model": "0f1b80cfe8" + }, + "prompt": "photo of 1girl, redhead, green blue eyes, perfecteyes, woman", + "Version": "v1.5.1", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "name": "PerfectEyesXL", + "type": "lora", + "weight": 1 + }, + { + "hash": "0f1b80cfe8", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "0f1b80cfe8", + "\"PerfectEyesXL": "8167a8057946\"", + "Noise multiplier": "1.05", + "Denoising strength": "0.55" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/128461" +} \ No newline at end of file diff --git a/PerfectEyesXL.preview.png b/PerfectEyesXL.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..1ac5e21ada7d7a4289f3c43b1a354623284fb38e Binary files /dev/null and b/PerfectEyesXL.preview.png differ diff --git a/PerfectEyesXL.safetensors b/PerfectEyesXL.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b63969c1b3dedac320a9d90d59e860bacf14795b --- /dev/null +++ b/PerfectEyesXL.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f248364cf0acc7a00bc4b5145b44c0dfbf10e71e9b0efb6b77f0956aeaf2411c +size 228453996 diff --git a/SDXLFaeTastic2400.civitai.info b/SDXLFaeTastic2400.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..ad55a1cfe6091c5eec84bc7c913f5405eaaa9b47 --- /dev/null +++ b/SDXLFaeTastic2400.civitai.info @@ -0,0 +1,400 @@ +{ + "id": 293991, + "modelId": 134338, + "name": "v24", + "createdAt": "2024-01-10T21:00:43.998Z", + "updatedAt": "2024-01-17T03:28:38.413Z", + "status": "Published", + "publishedAt": "2024-01-10T21:40:26.140Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

testest

", + "stats": { + "downloadCount": 5871, + "ratingCount": 259, + "rating": 4.98, + "thumbsUpCount": 554 + }, + "model": { + "name": "SDXL FaeTastic Details", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 231373, + "sizeKB": 445797.55859375, + "name": "SDXLFaeTastic2400.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-01-10T21:05:52.037Z", + "hashes": { + "AutoV1": "75FF1B13", + "AutoV2": "1CF798ACA8", + "SHA256": "1CF798ACA8E6193A26A08DD5B29290E76E440A01951FD03B8A697D5FE5369610", + "CRC32": "A249C612", + "BLAKE3": "3212CA105A526EAD6D4C2BD44A5DB9AA75E6EE0E9E7EA67AAFA4E85361770DC7", + "AutoV3": "E7DA1E0C0933E86F379590E48498CAC03130753874A389045B546F4CA88DE937" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/293991" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ee60fd2f-88ea-47bd-ab7d-66223ac29079/width=450/5361646.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "UNGa^V-:9sIo}]$*IoxGD$I;$MadK5X7M_ni", + "type": "image", + "metadata": { + "hash": "UNGa^V-:9sIo}]$*IoxGD$I;$MadK5X7M_ni", + "size": 3822107, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "ENSD": "31337", + "Size": "832x1216", + "seed": 1504062174, + "Model": "SDXLFaetastic_v24", + "steps": 40, + "hashes": { + "model": "07b985d12f", + "lora:SDXLFaeTastic2400": "6565d403e7" + }, + "prompt": "Highly detailed Dynamic shot of texture fur adorable pink and blue baby tiger, extremely detailed fur High quality texture, intricate details, detailed texture, High quality shadow, Cinematic Light, Depth of field, light source contrast, cosmic jungle background filled with clouds, whimsical fantasy", + "Version": "v1.7.0", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "07b985d12f", + "name": "SDXLFaetastic_v24", + "type": "model" + } + ], + "Model hash": "07b985d12f", + "negativePrompt": "3d, cartoon, low quality, blurry, collar", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"", + "Denoising strength": "0.44", + "SD upscale overlap": "96", + "SD upscale upscaler": "SwinIR_4x" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/39d3550e-403b-4f18-9f66-676c435007e6/width=450/5361653.jpeg", + "nsfw": "None", + "width": 1160, + "height": 1696, + "hash": "UEDId^JD9YR4}aBUITemGFOAv#rWK$OGrrNI", + "type": "image", + "metadata": { + "hash": "UEDId^JD9YR4}aBUITemGFOAv#rWK$OGrrNI", + "size": 3142748, + "width": 1160, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 2616895373, + "Model": "SDXLFaetastic_v24", + "steps": 40, + "hashes": { + "model": "07b985d12f", + "lora:SDXLFaeTastic2400": "6565d403e7" + }, + "prompt": "macro photo, sparkling magical fantasy glass flower dewdrop, very detailed, amazing quality, intricate, cinematic light, highly detail, beautiful, surreal, dramatic, galaxy fantasy colors, \n ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 1 + }, + { + "hash": "07b985d12f", + "name": "SDXLFaetastic_v24", + "type": "model" + } + ], + "Model hash": "07b985d12f", + "negativePrompt": "clutter, bad quality, low quality, blurry", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"", + "Denoising strength": "0.3", + "SD upscale overlap": "64", + "SD upscale upscaler": "4x_foolhardy_Remacri" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/96dace17-833f-432e-a1f7-dde2ee924bdf/width=450/5361659.jpeg", + "nsfw": "None", + "width": 1664, + "height": 1398, + "hash": "UeKTAHEyNG%3~qadjZX7u4r=jGn$yDN_oJS#", + "type": "image", + "metadata": { + "hash": "UeKTAHEyNG%3~qadjZX7u4r=jGn$yDN_oJS#", + "size": 2709185, + "width": 1664, + "height": 1398 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL-VAE_0.9.safetensors", + "Size": "832x1216", + "seed": 686086729, + "Model": "FaeTasticSDXL24", + "steps": 45, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "vae": "63aeecb90f", + "model": "603b6d615a", + "lora:SDXLFaeTastic2400": "1cf798aca8" + }, + "prompt": "a melting candle made of gumdrops in the shape of a heart , fantasy, extremely detailed, beautiful, sparkly gummy melting hearts, high quality, extremely elegant", + "Version": "v1.7.0", + "sampler": "DPM++ 2M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7.5, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "603b6d615a", + "name": "FaeTasticSDXL24", + "type": "model" + } + ], + "Model hash": "603b6d615a", + "SDXLFaeTastic2400": "0.8>, none\"", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/028df0f3-09ed-4656-a7d3-e26df14752e7/width=450/5361660.jpeg", + "nsfw": "None", + "width": 1664, + "height": 1398, + "hash": "UdF=:eNHozR:~qWobHa}_3jYoIax?cs.e.sm", + "type": "image", + "metadata": { + "hash": "UdF=:eNHozR:~qWobHa}_3jYoIax?cs.e.sm", + "size": 2643415, + "width": 1664, + "height": 1398 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL-VAE_0.9.safetensors", + "Size": "832x1216", + "seed": 3492623220, + "Model": "FaeTasticSDXL24", + "steps": 45, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "vae": "63aeecb90f", + "model": "603b6d615a", + "lora:SDXLFaeTastic2400": "1cf798aca8" + }, + "prompt": "Delicate details, Splash art, The movie, (A cell bottle in a bottle ), cordialidad intricately detailed, fantastical, complementary colours, fantasy, concept art, 8k resolution blur background Vivid colors, Broken Glass effect, no background, stunning, something that even doesn't exist, mythical being, energy, molecular, textures, iridescent and luminescent scales, breathtaking beauty, pure perfection, divine presence, unforgettable, impressive, breathtaking beauty, Volumetric light, auras, rays, vivid colors reflects", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 5.5, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "603b6d615a", + "name": "FaeTasticSDXL24", + "type": "model" + } + ], + "Model hash": "603b6d615a", + "SDXLFaeTastic2400": "0.8>, none\"", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/14bcbd63-2fb3-49a0-9c55-b938699992c1/width=450/5361661.jpeg", + "nsfw": "None", + "width": 1664, + "height": 1398, + "hash": "UgFYcSadX7j^~XWUWCbb.8ogjFWU?un%f7j]", + "type": "image", + "metadata": { + "hash": "UgFYcSadX7j^~XWUWCbb.8ogjFWU?un%f7j]", + "size": 3199180, + "width": 1664, + "height": 1398 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 2616895373, + "Model": "SDXLFaetastic_v24", + "steps": 40, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "07b985d12f", + "lora:SDXLFaeTastic2400": "6565d403e7" + }, + "prompt": "macro photo, sparkling magical fantasy glass flower dewdrop, very detailed, amazing quality, intricate, cinematic light, highly detail, beautiful, surreal, dramatic, galaxy fantasy colors, \n ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7.5, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 1 + }, + { + "hash": "07b985d12f", + "name": "SDXLFaetastic_v24", + "type": "model" + } + ], + "Model hash": "07b985d12f", + "negativePrompt": "clutter, bad quality, low quality, blurry", + "SDXLFaeTastic2400": "1>, none\"", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d2a894ba-00d8-4a47-a51e-fb16281fa896/width=450/5361686.jpeg", + "nsfw": "None", + "width": 1160, + "height": 1696, + "hash": "UHH2EB~WH=4UoObw0zMxU[$$OsAG}l-CWAJB", + "type": "image", + "metadata": { + "hash": "UHH2EB~WH=4UoObw0zMxU[$$OsAG}l-CWAJB", + "size": 2593053, + "width": 1160, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 2075307309, + "Model": "SDXLFaetastic_v24", + "steps": 40, + "hashes": { + "model": "07b985d12f", + "lora:SDXLFaeTastic2400": "6565d403e7" + }, + "prompt": "sparkling kawaii little baby unicorn sitting on a cloud of nebula dust, light shines through, magical artifact, very detailed, amazing quality, intricate, cinematic light, highly detail, beautiful, surreal, dramatic, galaxy ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "07b985d12f", + "name": "SDXLFaetastic_v24", + "type": "model" + } + ], + "Model hash": "07b985d12f", + "negativePrompt": "bad quality, blurry, low quality, deformity, bad anatomy, ugly, jewelry", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"", + "Denoising strength": "0.3", + "SD upscale overlap": "64", + "SD upscale upscaler": "SwinIR_4x" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a3bca214-4f87-4882-a9ec-3618c7bda4ef/width=450/5362397.jpeg", + "nsfw": "None", + "width": 1248, + "height": 1824, + "hash": "U99@@kW?9DQ,E4x[vzR6r1$+kXI;~Tm+K7Ki", + "type": "image", + "metadata": { + "hash": "U99@@kW?9DQ,E4x[vzR6r1$+kXI;~Tm+K7Ki", + "size": 2946444, + "width": 1248, + "height": 1824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 3797470999, + "Model": "SDXLFaetastic_v24", + "steps": 40, + "hashes": { + "model": "07b985d12f", + "lora:SDXLFaeTastic2400": "6565d403e7" + }, + "prompt": "macro photo, a beautiful translucent glass dragon frog that glows within, glowing lights, beautiful waterfall , made out of multicolored transparent delicate glass, magical sparkles,vibrant whimsical colors", + "Version": "v1.7.0", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "name": "SDXLFaeTastic2400", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "07b985d12f", + "name": "SDXLFaetastic_v24", + "type": "model" + } + ], + "Model hash": "07b985d12f", + "negativePrompt": "eyes", + "\"SDXLFaeTastic2400": "e7da1e0c0933\"", + "Denoising strength": "0.23", + "SD upscale overlap": "64", + "SD upscale upscaler": "SwinIR_4x" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/293991" +} \ No newline at end of file diff --git a/SDXLFaeTastic2400.preview.png b/SDXLFaeTastic2400.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..ba3b489740f9b8636ec2884d0cab176c21d208f0 Binary files /dev/null and b/SDXLFaeTastic2400.preview.png differ diff --git a/SDXLFaeTastic2400.safetensors b/SDXLFaeTastic2400.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..892b096edff7cc6fb0cb139d9088ed18bb77f9ab --- /dev/null +++ b/SDXLFaeTastic2400.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cf798aca8e6193a26a08dd5b29290e76e440a01951fd03b8a697d5fe5369610 +size 456496700 diff --git a/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.civitai.info b/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..6a0b26d8f50e218b4278a88b8283b15a1c2f6164 --- /dev/null +++ b/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.civitai.info @@ -0,0 +1,317 @@ +{ + "id": 180569, + "modelId": 158945, + "name": "beta v0.4", + "createdAt": "2023-10-11T21:09:38.793Z", + "updatedAt": "2023-10-13T03:38:41.577Z", + "status": "Published", + "publishedAt": "2023-10-13T03:38:41.574Z", + "trainedWords": [ + "film photography style", + "light grain", + "medium grain", + "heavy grain" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

Better response to prompts, more coherent lines, and less jpeg artifacting than Alpha v0.2. Trained on a photorealistic model, not SDXL 1.0 base.

Use the following prompting template:

\"film photography style\" [your prompt], [modifiers], [shot type], [shot angle], [grain level]

  • [modifiers] - still a WIP, optional

    • nothing, bokeh, bloom, god rays, moody, etc.

  • [shot type] - only use for portraits or pictures with people

    • nothing, Extreme Close Up, Close Up, Medium Close Up, Medium Shot, Medium Full Shot, Full Shot

  • [shot angle] - describes the camera angle

    • nothing, bokeh, bloom, god rays, moody, etc.

  • [grain level] - important to reduce Jpeg artifacts

    • nothing, light grain, medium grain, heavy grain

** Note: nothing means \"\", so DO NOT type \"nothing\" **

", + "stats": { + "downloadCount": 7242, + "ratingCount": 317, + "rating": 4.98, + "thumbsUpCount": 621 + }, + "model": { + "name": "SDXL Film Photography Style", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 139125, + "sizeKB": 227497.6171875, + "name": "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-13T03:40:52.568Z", + "hashes": { + "AutoV1": "D8FBA61F", + "AutoV2": "BC6DB9D8F1", + "SHA256": "BC6DB9D8F167ADF51C2AD9280CCCAFF108FC8A6D6E8CD654E3AFCFDBF13E1048", + "CRC32": "AFD0EA85", + "BLAKE3": "50EDFFC2D82BDF7F302475488091A562E38989B813AD1F1EB6458C799F334909" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/180569" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8068f6f4-7031-4c7f-a918-fa87dd2f51c0/width=450/2923439.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UtI;R}%1fit6~Wxaj?ofOaj]WCWCM|a#R+R+", + "type": "image", + "metadata": { + "hash": "UtI;R}%1fit6~Wxaj?ofOaj]WCWCM|a#R+R+", + "size": 199891, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 1386049944, + "Model": "juggernautXL_version5", + "steps": 25, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "film photography style a golden field with a small barn at sunset, a few clouds, light grain ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "(worst quality, low quality, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)", + "\"FILM_PHOTOGRAPHY_STYLE-000049": "e1474929e3e2\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/21ba785b-3110-4551-a9a0-a9b65eb87e38/width=450/2923442.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAHe8$^*uN005T4n%0Di0ME2~C~Wxt%2M^Ri", + "type": "image", + "metadata": { + "hash": "UAHe8$^*uN005T4n%0Di0ME2~C~Wxt%2M^Ri", + "size": 103635, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 630410583, + "Model": "juggernautXL_version5", + "steps": 40, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "film photography style a woman with hazel eyes and light hair wearing a baseball cap, Close Up, Eye Level, light grain ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "Mask blur": "4", + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "(worst quality, low quality, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)", + "Denoising strength": "0.4", + "\"FILM_PHOTOGRAPHY_STYLE-000049": "e1474929e3e2\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c44ecbef-cfc3-40cc-afcf-d60bd303f532/width=450/2923451.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCHenG[n00-;K-H=Uuo#LNt7={M{00Sht,oy", + "type": "image", + "metadata": { + "hash": "UCHenG[n00-;K-H=Uuo#LNt7={M{00Sht,oy", + "size": 260191, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 4206273713, + "Model": "juggernautXL_version5", + "steps": 40, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "film photography style a red house framed by a snowy mountain in the background and fall trees on either side, Low Angle, light noise ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "(worst quality, low quality, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)", + "\"FILM_PHOTOGRAPHY_STYLE-000049": "e1474929e3e2\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6acd7432-88e4-442f-8d50-b3540dc734c7/width=450/2923443.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UbIN$gWAx-s:~7j?xpazNZt6ogRkS1j[WFoJ", + "type": "image", + "metadata": { + "hash": "UbIN$gWAx-s:~7j?xpazNZt6ogRkS1j[WFoJ", + "size": 101278, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x768", + "seed": 3218527887, + "Model": "juggernautXL_version5", + "steps": 35, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "film photography style a red turtleneck, Close Up Shot, light grain ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "Mask blur": "4", + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "(worst quality, low quality, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)", + "Denoising strength": "0.6", + "\"FILM_PHOTOGRAPHY_STYLE-000049": "e1474929e3e2\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f877bf66-b9ca-404b-8ada-3b97bb469fa0/width=450/2923444.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UBA1R@yZIVDiyXtQE2WB4ot3aeoe-SNLs,Rj", + "type": "image", + "metadata": { + "hash": "UBA1R@yZIVDiyXtQE2WB4ot3aeoe-SNLs,Rj", + "size": 135421, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 1499591468, + "Model": "juggernautXL_version5", + "steps": 40, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "film photography style a moody picture of an 80's cyberpunk woman leaning out of a car window below a city skyline at night engulfed in dense fog at night, Low Angle, light grain ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "Mask blur": "4", + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "(worst quality, low quality, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)", + "Denoising strength": "0.45", + "\"FILM_PHOTOGRAPHY_STYLE-000049": "e1474929e3e2\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/19168b59-490a-49e3-b722-c4f86a92afb8/width=450/2923440.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U5B|vguO*x0000W?8w~V028_%3?Z-U?acZE1", + "type": "image", + "metadata": { + "hash": "U5B|vguO*x0000W?8w~V028_%3?Z-U?acZE1", + "size": 196632, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x1024", + "seed": 2035865504, + "Model": "juggernautXL_version5", + "steps": 25, + "hashes": { + "model": "70229e1d56" + }, + "prompt": "film photography style a man with brown hair during a snowstorm at night, Low Angle, Medium Shot ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "Mask blur": "4", + "resources": [ + { + "hash": "70229e1d56", + "name": "juggernautXL_version5", + "type": "model" + } + ], + "Model hash": "70229e1d56", + "negativePrompt": "(worst quality, low quality, grayscale, bw, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry, grainy), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, (airbrushed, cartoon, anime, semi-realistic, cgi, render, blender, digital art, manga, amateur:1.3), (3D ,3D Game, 3D Game Scene, 3D Character:1.1), (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3)", + "Denoising strength": "0.4", + "\"FILM_PHOTOGRAPHY_STYLE-000049": "e1474929e3e2\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/180569" +} \ No newline at end of file diff --git a/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.preview.png b/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..241d0fbc421dcbc9e7995e35fa205c4309aef72f Binary files /dev/null and b/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.preview.png differ diff --git a/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors b/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4f3ff00f6f23ea05cc4b2f7814fb401a92dc02c0 --- /dev/null +++ b/SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bc6db9d8f167adf51c2ad9280cccaff108fc8a6d6e8cd654e3afcfdbf13e1048 +size 232957560 diff --git a/Soviet-poster.civitai.info b/Soviet-poster.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..097da4e7a38b15b179066634006902bf14303b2a --- /dev/null +++ b/Soviet-poster.civitai.info @@ -0,0 +1,760 @@ +{ + "id": 206390, + "modelId": 183892, + "name": "Soviet-poster", + "createdAt": "2023-10-31T06:07:34.028Z", + "updatedAt": "2023-10-31T08:04:32.852Z", + "status": "Published", + "publishedAt": "2023-10-31T08:04:32.851Z", + "trainedWords": [ + "soviet poster" + ], + "trainingStatus": "Approved", + "trainingDetails": { + "type": "Style", + "params": { + "unetLR": 0.0005, + "clipSkip": 1, + "loraType": "lora", + "keepTokens": 0, + "networkDim": 32, + "numRepeats": 6, + "resolution": 1024, + "lrScheduler": "cosine_with_restarts", + "minSnrGamma": 5, + "targetSteps": 648, + "enableBucket": true, + "networkAlpha": 16, + "optimizerArgs": "weight_decay=0.1", + "optimizerType": "AdamW8Bit", + "textEncoderLR": 5e-05, + "maxTrainEpochs": 12, + "shuffleCaption": false, + "trainBatchSize": 4, + "flipAugmentation": false, + "lrSchedulerNumCycles": 3 + }, + "baseModel": "sdxl" + }, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 3519, + "ratingCount": 343, + "rating": 5, + "thumbsUpCount": 628 + }, + "model": { + "name": "Soviet poster XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 158326, + "sizeKB": 6476.880859375, + "name": "206390_training_data.zip", + "type": "Training Data", + "metadata": { + "format": "Other", + "numImages": 36, + "ownRights": true, + "numCaptions": 36, + "shareDataset": true, + "trainingResults": { + "jobId": "67d6645b-bb88-4a69-a897-26ba0472f156", + "epochs": [ + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000001.safetensors", + "epoch_number": 1, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063033_e000001_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063045_e000001_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063057_e000001_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000002.safetensors", + "epoch_number": 2, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063336_e000002_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063348_e000002_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063400_e000002_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000003.safetensors", + "epoch_number": 3, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063638_e000003_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063650_e000003_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063702_e000003_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000004.safetensors", + "epoch_number": 4, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063940_e000004_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031063953_e000004_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064005_e000004_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000005.safetensors", + "epoch_number": 5, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064243_e000005_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064255_e000005_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064307_e000005_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000006.safetensors", + "epoch_number": 6, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064545_e000006_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064557_e000006_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064609_e000006_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000007.safetensors", + "epoch_number": 7, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064847_e000007_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064859_e000007_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031064911_e000007_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000008.safetensors", + "epoch_number": 8, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065149_e000008_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065201_e000008_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065213_e000008_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000009.safetensors", + "epoch_number": 9, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065452_e000009_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065504_e000009_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065516_e000009_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000010.safetensors", + "epoch_number": 10, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065754_e000010_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065806_e000010_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031065818_e000010_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster-000011.safetensors", + "epoch_number": 11, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031070056_e000011_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031070108_e000011_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031070120_e000011_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster.safetensors", + "epoch_number": 12, + "sample_images": [ + { + "prompt": "worker in winter hat with earflaps", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031070357_e000012_00.png" + }, + { + "prompt": "worker", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031070409_e000012_01.png" + }, + { + "prompt": "grandpa and granddaughter shopping", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster_20231031070421_e000012_02.png" + } + ] + } + ], + "history": [ + { + "time": "2023-10-31T06:19:26.506Z", + "status": "Submitted" + }, + { + "time": "2023-10-31T06:25:25.427Z", + "status": "Processing", + "message": "" + }, + { + "time": "2023-10-31T07:06:59.437Z", + "status": "InReview", + "message": "Job complete" + } + ], + "attempts": 0, + "end_time": "2023-10-31T07:06:54.454Z", + "start_time": "2023-10-31T06:25:24.662Z", + "transactionId": "c2e25c8b-914e-4e8a-9605-df41529d1424" + } + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-31T06:20:37.633Z", + "hashes": { + "AutoV1": "C5D26EDD", + "AutoV2": "53B527D41E", + "SHA256": "53B527D41E09FE52680263DAB9A97A84FC082E4ADD726B54E99D3DA5854593F2", + "CRC32": "5A428DEB", + "BLAKE3": "042ABE9DD3D7B6EF5067FF2DF7EE9C07DE950E0774B36BB252B38C682881ACCB" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/206390" + }, + { + "id": 158377, + "sizeKB": 223101.23046875, + "name": "Soviet-poster.safetensors", + "type": "Model", + "metadata": { + "format": "SafeTensor", + "selectedEpochUrl": "https://orchestration.civitai.com/v1/consumer/jobs/67d6645b-bb88-4a69-a897-26ba0472f156/assets/Soviet-poster.safetensors" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-31T07:40:37.824Z", + "hashes": { + "AutoV1": "B834385A", + "AutoV2": "BB1CEDFCEC", + "SHA256": "BB1CEDFCEC667E192AC3006C43F5137B87EE6D0AA8006AF50612526502D3EACE", + "CRC32": "7103D2BA", + "BLAKE3": "D37F1BBBF5054DF903E324D0B35D808A53E081DAEF2AFA031BDE99BA3C980B23" + }, + "primary": false, + "downloadUrl": "https://civitai.com/api/download/models/206390?type=Model&format=SafeTensor" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/da3efb58-935e-4348-a1f2-cf344abeb91d/width=450/3287095.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "URE{h3IW~UW;Ir$fWBxtRjw^t7e:xZSixZWA", + "type": "image", + "metadata": { + "hash": "URE{h3IW~UW;Ir$fWBxtRjw^t7e:xZSixZWA", + "size": 1167419, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 2199396963, + "Model": "a_xl_formulaxl_v10", + "steps": 35, + "hashes": { + "model": "500b9f3c49" + }, + "prompt": " soviet poster, Jedi Master Lenin rises lightsaber in space", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "500b9f3c49", + "name": "a_xl_formulaxl_v10", + "type": "model" + } + ], + "Model hash": "500b9f3c49", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "[deformed | disfigured], poorly drawn, [bad, wrong] anatomy, [extra|missing|floating|disconnected] limb, (mutated hands and fingers), blurry, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.43", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "1" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1792b868-8be3-4f06-a5fa-d0057548a0a3/width=450/3285166.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UhIg4WZ$R*tR}@eTI;OFVtr=t6W=bIRjoet7", + "type": "image", + "metadata": { + "hash": "UhIg4WZ$R*tR}@eTI;OFVtr=t6W=bIRjoet7", + "size": 1150488, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 1245012955, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": "soviet poster, Trump holds a girl in his hands at the republican party congress, ", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "stars,, (deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.28", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "4" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d68b602d-454a-4648-aaaa-38ba0003c4a6/width=450/3285364.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UNGIWFS6o}_3?d%Nb_I[XVWFNzt8IWNGxuad", + "type": "image", + "metadata": { + "hash": "UNGIWFS6o}_3?d%Nb_I[XVWFNzt8IWNGxuad", + "size": 1117419, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 2165462470, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": ", Black widow collective farmer milkmaid on the farm", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.43", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "1" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b888823f-f0ad-4025-98cc-7ce3f86c0727/width=450/3286536.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "U6F$8~_1009H=Xxtn2em~VRkwt?F049b}-xt", + "type": "image", + "metadata": { + "hash": "U6F$8~_1009H=Xxtn2em~VRkwt?F049b}-xt", + "size": 1210919, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 2691000853, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": " soviet poster, Buryat scientists summon angelic creature at uranium deposit area in Siberia", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "[deformed | disfigured], poorly drawn, [bad, wrong] anatomy, [extra|missing|floating|disconnected] limb, (mutated hands and fingers), blurry, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.43", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "3" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1ac65b15-4460-4427-9d90-70dfcbb8339e/width=450/3285303.jpeg", + "nsfw": "None", + "width": 1024, + "height": 768, + "hash": "URGa%%$LnOxb}t-o9aaLrraeEhM{=dbctQoM", + "type": "image", + "metadata": { + "hash": "URGa%%$LnOxb}t-o9aaLrraeEhM{=dbctQoM", + "size": 1299756, + "width": 1024, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "1024x768", + "seed": 4032266071, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": "abstract expressionist painting soviet poster, Darth Vader raises his lightsaber at the party congress . energetic brushwork, bold colors, abstract forms, expressive, emotional", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, logo, text, watermark, sign, caption, \"realistic", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.28", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "6" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f7764ba1-e2da-456c-bb26-a031eaf9736e/width=450/3285306.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "URFPA]RO=^sl~qVrspn$-qxFMzs8N3-ojEs.", + "type": "image", + "metadata": { + "hash": "URFPA]RO=^sl~qVrspn$-qxFMzs8N3-ojEs.", + "size": 1257183, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 321925958, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": " soviet poster, Hulk collective farmer", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.43", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "1" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8551c0bb-7d4f-4dd8-b306-e9945df938de/width=450/3285382.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UTG8T2D,%JyC~WVtI@x]%NxBI[t8EmRjR.Nf", + "type": "image", + "metadata": { + "hash": "UTG8T2D,%JyC~WVtI@x]%NxBI[t8EmRjR.Nf", + "size": 1183996, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 265190126, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": " soviet poster, Black widow collective farmer at wheat harvesting", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.43", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "1" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3d69aa36-7788-44ea-8934-3d460ae9052d/width=450/3285308.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "U8Eev6={?K%104J:55t6n1E3Mu9r%O^GWAIn", + "type": "image", + "metadata": { + "hash": "U8Eev6={?K%104J:55t6n1E3Mu9r%O^GWAIn", + "size": 1175092, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 2430102153, + "Model": "a_xl_MOHAWK_v16", + "steps": 35, + "hashes": { + "model": "fa80372c9f" + }, + "prompt": " soviet poster, thanos electrician on a pole", + "Version": "1.6.0", + "sampler": "Restart", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "fa80372c9f", + "name": "a_xl_MOHAWK_v16", + "type": "model" + } + ], + "Model hash": "fa80372c9f", + "\"Soviet-poster": "f697c639098f\"", + "negativePrompt": "(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, logo, text, watermark, sign, caption", + "ADetailer model": "face_yolov8s.pt", + "ADetailer version": "23.9.3", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "SGM noise multiplier": "True", + "ADetailer dilate/erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.43", + "ADetailer inpaint only masked": "True", + "ADetailer mask only top k largest": "1" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/206390" +} \ No newline at end of file diff --git a/Soviet-poster.preview.png b/Soviet-poster.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..b7f01b880849238c96ced3b506e5003f42fc2736 Binary files /dev/null and b/Soviet-poster.preview.png differ diff --git a/Soviet-poster.safetensors b/Soviet-poster.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1bec1a6e9a26176523e2b159af14a09efb7e311b --- /dev/null +++ b/Soviet-poster.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb1cedfcec667e192ac3006c43f5137b87ee6d0aa8006af50612526502d3eace +size 228455660 diff --git a/StickersRedmond.civitai.info b/StickersRedmond.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..d0366bd05e852038f6993b9d3aa36f7d3e305fa1 --- /dev/null +++ b/StickersRedmond.civitai.info @@ -0,0 +1,460 @@ +{ + "id": 160130, + "modelId": 144142, + "name": "v1.0", + "createdAt": "2023-09-12T06:18:51.601Z", + "updatedAt": "2023-09-12T14:00:43.773Z", + "status": "Published", + "publishedAt": "2023-09-12T14:00:43.767Z", + "trainedWords": [ + "Stickers", + "Sticker" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 5235, + "ratingCount": 389, + "rating": 4.99, + "thumbsUpCount": 664 + }, + "model": { + "name": "Stickers.Redmond - Stickers Lora for SD XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 120440, + "sizeKB": 166543.00390625, + "name": "StickersRedmond.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-09-12T06:20:37.934Z", + "hashes": { + "AutoV1": "D4E0FE27", + "AutoV2": "927A840722", + "SHA256": "927A840722D1AC9103223C09DA55CC3AE750300E1C0E22A15FFCF460AC6F8A56", + "CRC32": "A14141A7", + "BLAKE3": "FF25CF7E012AA92697509C7C7BCEF54DD58675031E6223B29AF8EAE53E781B07" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/160130" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/431c9ce7-bfbe-425b-9105-d0cb7539d8b0/width=450/2464224.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCN^YrniNG?b~qt7VsRjRjkCofWBR*R*tSxa", + "type": "image", + "metadata": { + "hash": "UCN^YrniNG?b~qt7VsRjRjkCofWBR*R*tSxa", + "size": 896475, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3383490575, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "A cute cat sticker, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/eae61d34-2cd7-49f6-848e-15d060f7893f/width=450/2464226.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UQQl|7i_.T%g-pozNIV@xvj[WAa|t8V@n$oz", + "type": "image", + "metadata": { + "hash": "UQQl|7i_.T%g-pozNIV@xvj[WAa|t8V@n$oz", + "size": 740784, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3953195269, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "A cute cat sticker, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/714db2db-9c4f-4964-8f4d-bb10de9527ff/width=450/2464230.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAJ*I?-o18oheTt7x]sm0kkD+sNF_NM{MxX9", + "type": "image", + "metadata": { + "hash": "UAJ*I?-o18oheTt7x]sm0kkD+sNF_NM{MxX9", + "size": 1058029, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2564487756, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "A pirate boy sticker,, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/28fc78e8-9cf7-45f9-8b93-af286c5081d4/width=450/2464233.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UKNm.*-;~qt7xuxuoyM{~qM{RjofaxM{Rjt7", + "type": "image", + "metadata": { + "hash": "UKNm.*-;~qt7xuxuoyM{~qM{RjofaxM{Rjt7", + "size": 693344, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 4221125831, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "A ferrari car, black and white, (((sticker, stickers, )))", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/73da1701-11c5-44c5-a1b5-08455b03edf1/width=450/2464237.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCNm.*%M?bxu~qt7fPWB_3WBM{j]%MM|Rjt7", + "type": "image", + "metadata": { + "hash": "UCNm.*%M?bxu~qt7fPWB_3WBM{j]%MM|Rjt7", + "size": 643069, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 4221125832, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "A ferrari car, black and white, (((sticker, stickers, )))", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cc916704-6990-40be-95fc-a1cdca40775f/width=450/2464239.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UII=0{?aT2M|~otR9ZoM?vj^ROjFN3bE%2od", + "type": "image", + "metadata": { + "hash": "UII=0{?aT2M|~otR9ZoM?vj^ROjFN3bE%2od", + "size": 1447961, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2644104772, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "1girl, holdinga sword, anime style, sticker, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/31e1e3cc-d060-4d3a-a689-303ca01d9739/width=450/2464243.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UPQ9m1xb?]o}xuofozV[%|bai}njxuWBM|oz", + "type": "image", + "metadata": { + "hash": "UPQ9m1xb?]o}xuofozV[%|bai}njxuWBM|oz", + "size": 805785, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1479704386, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Kawaii unicorn, sticker, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5f91033c-1728-4b7f-ba96-7865dc12361b/width=450/2464249.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UVN^F|WU.Axux]ofs:fi%OfkROoen$bHR-af", + "type": "image", + "metadata": { + "hash": "UVN^F|WU.Axux]ofs:fi%OfkROoen$bHR-af", + "size": 1046761, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 686138225, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Cute owl, sticker, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/90b38e12-162d-4b8f-a388-184fd6470083/width=450/2464262.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UdNdBrt7_4t7xufQoej[.8ayV@fQM{j[afa|", + "type": "image", + "metadata": { + "hash": "UdNdBrt7_4t7xufQoej[.8ayV@fQM{j[afa|", + "size": 1134694, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2425469216, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 33, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Angry owl, sticker, stickers, ", + "Version": "v1.6.0-125-g59544321", + "sampler": "DPM++ 2M SDE Exponential", + "cfgScale": 7, + "resources": [ + { + "name": "StickersRedmond", + "type": "lora", + "weight": 1 + }, + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "ugly, disfigured, duplicate, mutated, bad art, blur, blurry, dof", + "\"StickersRedmond": "84b001a38538\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/160130" +} \ No newline at end of file diff --git a/StickersRedmond.preview.png b/StickersRedmond.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..130583e5b65c602da91af193453da47f4512436f Binary files /dev/null and b/StickersRedmond.preview.png differ diff --git a/StickersRedmond.safetensors b/StickersRedmond.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6f73e06af0118ae78070674fdf33239855fbd387 --- /dev/null +++ b/StickersRedmond.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:927a840722d1ac9103223c09da55cc3ae750300e1c0e22a15ffcf460ac6f8a56 +size 170540036 diff --git a/Stop-Motion Animation.civitai.info b/Stop-Motion Animation.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..20cedf1758d0edcc777589c74eda23349db04f9d --- /dev/null +++ b/Stop-Motion Animation.civitai.info @@ -0,0 +1,460 @@ +{ + "id": 154225, + "modelId": 78526, + "name": "SD XL", + "createdAt": "2023-09-03T04:20:01.636Z", + "updatedAt": "2023-09-03T05:02:36.191Z", + "status": "Published", + "publishedAt": "2023-09-11T02:00:00.000Z", + "trainedWords": [ + "Stop-Motion Animation" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 2201, + "ratingCount": 218, + "rating": 5, + "thumbsUpCount": 381 + }, + "model": { + "name": "Stop-Motion Animation", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 115263, + "sizeKB": 223124.37890625, + "name": "Stop-Motion Animation.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-09-03T04:36:20.655Z", + "hashes": { + "AutoV1": "F8449DA9", + "AutoV2": "1E1FCAE07B", + "SHA256": "1E1FCAE07B17B450C5F1B119D90A1D81F32462E1E9CECD3FAEB89BBB8FA7CD84", + "CRC32": "81063A51", + "BLAKE3": "D70744BD72E23860BC74502F28D0AB74CD21C066D94A47E276F84EEEE10621C3" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/154225" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a9dabc5d-563f-479a-b9b0-d6ed6de5a3a8/width=450/2331937.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U4E2qG-ntPIu00S6tSw[1IXm?uIo}GD*9YkE", + "type": "image", + "metadata": { + "hash": "U4E2qG-ntPIu00S6tSw[1IXm?uIo}GD*9YkE", + "size": 1466728, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 2421258089, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - In this claymation or plasticine style artwork we find ourselves in a university lecture hall during a crucial final exam. The scene is characterized by an atmosphere of exhaustion and desperation. The focal point is a student who, burdened by the weight of the academic challenge, displays visible signs of weariness and distress. Visual Elements: Medium: Claymation - The artwork takes on the distinct aesthetic of claymation or plasticine style, but light and full of color, lending a tactile and textured quality to the scene. Setting: Lecture Hall - The backdrop consists of a traditional university lecture hall, complete with rows of desks and chairs. Lighting: The overall lighting in the scene is bright and colorful. Student Character: Desperation and Exhaustion - The student at the center of the artwork is visibly drained and disheveled. Their posture is slouched, with sagging shoulders and tired eyes that betray their mental and physical exhaustion. The character's face is etched with anxiety, highlighting the intensity of the final exam. Symbolic Props: textbooks and crumpled notes and pen and paper on the desks - Surrounding the student's desk are scattered remnants of study materials. Surrounding Students: Anxious camaraderie - The surrounding students in the lecture hall also bear signs of weariness and anxiety. Artistic References: Elements reminiscent of the stop-motion techniques employed by Aardman Animations, known for their iconic characters like Wallace and Gromit and Shaun the Sheep.", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bc19fbb0-9c5a-475b-ab0c-37ae513c7e87/width=450/2331941.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UJHBxSMwVtROPW9FsTD%~qM{NHRjRjjERjjG", + "type": "image", + "metadata": { + "hash": "UJHBxSMwVtROPW9FsTD%~qM{NHRjRjjERjjG", + "size": 1534611, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 2676655752, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - Craft a stop-motion animation that fuses the inventive charm of Laika Studios with the comedic office environment of The Office, featuring a withering, animated seedling personified amidst an upbeat office setting. Bathe the scene in soft, natural light from office windows, subtly emphasizing the seedling's plight. Use a color palette marked by dull greens of the seedling set against bright, lively office colors to underline the seedling's melancholic state. The composition should be a medium shot of the seedling character, with the office antics unfolding in the background.", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/004ef1be-26e3-4101-a907-8e4edd5bbad6/width=450/2331934.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UGHw}gMyIV-o01kBt6Io~og2%LWWxHM{RQs:", + "type": "image", + "metadata": { + "hash": "UGHw}gMyIV-o01kBt6Io~og2%LWWxHM{RQs:", + "size": 1862329, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 3005084372, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - the Epic Battle of Ink and Pages, anthropomorphic books and pens clash in a literary showdown. The books, ancient, unleash their stories as weapons, pens scribble, battlefield, ink 2 in the navy and crimson style, superb garment detail, diverse curatorial style, brimming with hidden details", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bd369c3d-c80f-4599-b0fd-e67316ade243/width=450/2331944.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U9F}iq-n8{D*D*xs#8NG~9v~J.Ki*eS~AaEO", + "type": "image", + "metadata": { + "hash": "U9F}iq-n8{D*D*xs#8NG~9v~J.Ki*eS~AaEO", + "size": 1644098, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 867543018, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - surreal retro 3d diorama, in the style of Florence Thomas,Adobe Photoshop, ultra HD, strong perspective, depth of field view finder lens, detailed scenes, SMC Takumar 35mm f/ 2. 8 c 50v 5", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1bd6bb2e-7598-4cc7-8b9a-fab84c7c2baf/width=450/2331940.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UBIytb0e^+R50PZ#?bD%~WD4?HR4xa9G-:Mx", + "type": "image", + "metadata": { + "hash": "UBIytb0e^+R50PZ#?bD%~WD4?HR4xa9G-:Mx", + "size": 1347402, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 2111566206, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - Photo of a Teacher doll made of clay. Bright background in one color. space to the left. Bright & simple image that could be used in textbooks. 3dcg. Refreshing image.", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d77a55cb-952f-4907-a575-dc07c3092c30/width=450/2331953.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "U6B{[cpx_Nt,0$%L%zx],.V[j[NG0N%MVsRj", + "type": "image", + "metadata": { + "hash": "U6B{[cpx_Nt,0$%L%zx],.V[j[NG0N%MVsRj", + "size": 1428626, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 3240235085, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - A medium film shot, of Harold, 40yr old man, glasses, and tech engineer, good looking but thin, staring mouth agape at a strange creature standing on hus desk", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/73583e71-0465-4b8f-a0a6-3833247f3601/width=450/2331955.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UAF#p[MxD$-:1RIVNbtR~BRki^bI~Vs-V?R*", + "type": "image", + "metadata": { + "hash": "UAF#p[MxD$-:1RIVNbtR~BRki^bI~Vs-V?R*", + "size": 1492696, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 3663810292, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - character with aluminium foil kid style walking for stop motion, add a hand in frame or little sticks linking to character hands", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e3dc0191-ed27-4b37-b1bd-ef7aa3fd7f14/width=450/2331956.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UJJQ_KMNIYwd22rrviiy~ln5rrwKO?Vav~Rn", + "type": "image", + "metadata": { + "hash": "UJJQ_KMNIYwd22rrviiy~ln5rrwKO?Vav~Rn", + "size": 1434781, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 800806598, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - Cinematic colourful lomographic minimalist rotoscope claymation. A Confident program manager from Meta working at Stripe", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/535689a5-351b-4f89-bfd4-07226d6ac5ba/width=450/2331960.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UHD+;:^j:*-BK%%1rDsT_MbFixoftQWBi_kW", + "type": "image", + "metadata": { + "hash": "UHD+;:^j:*-BK%%1rDsT_MbFixoftQWBi_kW", + "size": 1298317, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 1920086786, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - plasticine, a sad man walks down the street to work with a suitcase in his hands, full body character CLAYMATION", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/aaf7e86d-0376-424d-b1f5-bb1566ef922a/width=450/2331962.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1536, + "hash": "UEFXYV^P}?NG9u$j-oNGtQxax[xZj=xa%1s:", + "type": "image", + "metadata": { + "hash": "UEFXYV^P}?NG9u$j-oNGtQxax[xZj=xa%1s:", + "size": 1468940, + "width": 1024, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "SDXL VAE.safetensors", + "Size": "1024x1536", + "seed": 1585521864, + "Model": "SDXL Base 1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Stop-Motion Animation - stop motion film of toys that have come to life, cute, happy, charaters with a cinema-camera filming a scene", + "Version": "v1.6.0-RC-29-g897312de", + "sampler": "DPM++ 3M SDE Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "SDXL Base 1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Stop-Motion Animation": "18d5c16ca9ef\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/154225" +} \ No newline at end of file diff --git a/Stop-Motion Animation.preview.png b/Stop-Motion Animation.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..d79c43e5faf48e40b48d23e6ef1b7f9a1de6b4d3 Binary files /dev/null and b/Stop-Motion Animation.preview.png differ diff --git a/Stop-Motion Animation.safetensors b/Stop-Motion Animation.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5e0b587ced345be83fdd383da43bf7d4248c680c --- /dev/null +++ b/Stop-Motion Animation.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e1fcae07b17b450c5f1b119d90a1d81f32462e1e9cecd3faeb89bbb8fa7cd84 +size 228479364 diff --git a/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.civitai.info b/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..21e28a26da24d31be4a127ca3bfc737011de21ce --- /dev/null +++ b/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.civitai.info @@ -0,0 +1,451 @@ +{ + "id": 178022, + "modelId": 133031, + "name": "v2.0", + "createdAt": "2023-10-07T21:47:37.842Z", + "updatedAt": "2023-10-07T21:50:53.335Z", + "status": "Published", + "publishedAt": "2023-10-07T21:50:53.333Z", + "trainedWords": [ + "T shirt design", + "TshirtDesignAF" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 4701, + "ratingCount": 305, + "rating": 5, + "thumbsUpCount": 577 + }, + "model": { + "name": "TShirtDesignRedmond - T-Shirt Design Lora for SD XL 1.0", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 136145, + "sizeKB": 166543.00390625, + "name": "TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-07T21:50:45.543Z", + "hashes": { + "AutoV1": "74B84C9C", + "AutoV2": "14AF8A3134", + "SHA256": "14AF8A3134C352943F377CD8E4D63D60DC2BB91B714A41EA9DC3BB7C1ACB806B", + "CRC32": "5C12D6FE", + "BLAKE3": "C323056E0F2E009A16B833F7B437416CCA5C9CC351BFA3B628941B5FB651F453" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/178022" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fb290da5-300f-49de-8d14-c19e215b8d7d/width=450/2840916.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UNM$}EnO-n^j#mtQkpr=}uXSyDM|O[WBtQS5", + "type": "image", + "metadata": { + "hash": "UNM$}EnO-n^j#mtQkpr=}uXSyDM|O[WBtQS5", + "size": 1213032, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1665035885, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Illustrated T-shirt, Whimsical Cat Character, Sunset Beach Landscape, Tropical Color Palette, Fabric Texture, Bold Lineart, Dynamic Perspective ,\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/16cc59cf-dabf-4b79-a802-7fa91aba2071/width=450/2840928.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UUP};r%1-:o0~9oKI=fk.7WCozofD+RktQof", + "type": "image", + "metadata": { + "hash": "UUP};r%1-:o0~9oKI=fk.7WCozofD+RktQof", + "size": 1100452, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 999030895, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Vintage T-shirt, Classic Car Object, Desert Highway Landscape, Retro Color Palette, Distressed Texture, Sketch Style, Horizon Perspective ,\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/38645d9d-abdd-4c4e-a978-b447ee021350/width=450/2840917.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UIKTPP^%584p}SM|F1ozMhIpcE-:NgtQS$nO", + "type": "image", + "metadata": { + "hash": "UIKTPP^%584p}SM|F1ozMhIpcE-:NgtQS$nO", + "size": 1155046, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1665035884, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Illustrated T-shirt, Whimsical Cat Character, Sunset Beach Landscape, Tropical Color Palette, Fabric Texture, Bold Lineart, Dynamic Perspective ,\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/276b050b-7006-496e-97b9-11000bcc8ce5/width=450/2840920.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UBAwC%%M9GIU~qxtWBM|D*WBWBaz0KRjt7t7", + "type": "image", + "metadata": { + "hash": "UBAwC%%M9GIU~qxtWBM|D*WBWBaz0KRjt7t7", + "size": 1241170, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 773093816, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Gothic T-shirt, Skull Object, Haunted Castle Landscape, Dark Monochrome Color Palette, Rough Stone Texture, Etching Style, Dramatic Perspective\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a9ce8dad-46e5-404f-89cc-2de72a95fff0/width=450/2840919.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UNMsvCxuVYi_~pofIUWA$yaeW=oz4.V@oft7", + "type": "image", + "metadata": { + "hash": "UNMsvCxuVYi_~pofIUWA$yaeW=oz4.V@oft7", + "size": 705559, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3159856981, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Fitness T-shirt, Yoga Pose Character, Zen Garden Landscape, Calming Neutral Color Palette, Smooth Fabric Texture, Silhouette Style, Balanced Perspective\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/af4edef1-9b12-461c-b683-179ca473e59a/width=450/2840921.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U8HUUh={01E2~9%1DjRj02oz9FNHK,s:KOIp", + "type": "image", + "metadata": { + "hash": "U8HUUh={01E2~9%1DjRj02oz9FNHK,s:KOIp", + "size": 1263927, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2320972245, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Illustrated T-shirt, Whimsical Cat Character, Sunset Beach Landscape, Tropical Color Palette, Fabric Texture, Bold Lineart, Dynamic Perspective ,\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fa0938f5-16ed-4561-b6ab-c3879944508f/width=450/2840927.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U8B:,4of4nof_3j[D%j[4Uay?aof00ay~pay", + "type": "image", + "metadata": { + "hash": "U8B:,4of4nof_3j[D%j[4Uay?aof00ay~pay", + "size": 620481, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2898698325, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Graphic T-shirt, Iconic Superhero Character, City Skyline Landscape, Monochromatic Color Palette, Smooth Texture, Stencil Style, Aerial Perspective\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a45a6ece-d266-4e76-aca9-7642a7072f3d/width=450/2840923.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U6B58s?[0000S=b.r|%gI=%L%LIUo?X3X9nU", + "type": "image", + "metadata": { + "hash": "U6B58s?[0000S=b.r|%gI=%L%LIUo?X3X9nU", + "size": 1808821, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3910634944, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Adventure T-shirt, Explorer Character, Jungle Waterfall Landscape, Lush Green Color Palette, Leafy Texture, Pen and Ink Style, Diagonal Perspective\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0b8ac13c-95ef-434e-943c-b7f8d5bef069/width=450/2840924.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UJENtjNHCitk_MWXJ7t5G]oeWZoLRkodd?M~", + "type": "image", + "metadata": { + "hash": "UJENtjNHCitk_MWXJ7t5G]oeWZoLRkodd?M~", + "size": 1115113, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1862648302, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Nostalgic T-shirt, Retro Game Character, Pixelated Landscape, 8-bit Color Palette, Pixel Texture, Pixel Art Style, Isometric Perspective\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8c1b3a29-e8dd-4e79-a7d5-4b812a1e48b4/width=450/2840925.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UFJavCtS56ROu4%2M{E2~CXSS~$%E1jFr=t7", + "type": "image", + "metadata": { + "hash": "UFJavCtS56ROu4%2M{E2~CXSS~$%E1jFr=t7", + "size": 1270859, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3498444041, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Athletic T-shirt, Basketball Player Character, Stadium Landscape, Team Color Palette, Jersey Texture, Dynamic Lineart, Action Perspective\n,T shirt design,TshirtDesignAF,\n", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "negativePrompt": "bad art, ugly, deformed, watermark, duplicated,", + "\"TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF": "95f26a473071\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/178022" +} \ No newline at end of file diff --git a/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.preview.png b/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..ac9a6f1b5b2a45069281e57ccb64cdc9f8eb7d93 Binary files /dev/null and b/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.preview.png differ diff --git a/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.safetensors b/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b49e621f844656cb50f861d2df55aea18e8e009b --- /dev/null +++ b/TShirtDesignRedmondV2-Tshirtdesign-TshirtDesignAF.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14af8a3134c352943f377cd8e4d63d60dc2bb91b714a41ea9dc3bb7c1acb806b +size 170540036 diff --git a/The_Simplest.civitai.info b/The_Simplest.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..aba91eb52ab81852bbc1029a509f82ed34ea50cd --- /dev/null +++ b/The_Simplest.civitai.info @@ -0,0 +1,804 @@ +{ + "id": 376536, + "modelId": 336225, + "name": "The Simplest", + "createdAt": "2024-03-05T21:26:38.731Z", + "updatedAt": "2024-03-06T22:40:40.935Z", + "status": "Published", + "publishedAt": "2024-03-06T22:40:40.934Z", + "trainedWords": [], + "trainingStatus": "Approved", + "trainingDetails": { + "type": "Style", + "params": { + "unetLR": 0.0005, + "clipSkip": 1, + "loraType": "lora", + "keepTokens": 0, + "networkDim": 64, + "numRepeats": 7, + "resolution": 1024, + "lrScheduler": "cosine_with_restarts", + "minSnrGamma": 5, + "noiseOffset": 0.1, + "targetSteps": 1243, + "enableBucket": true, + "networkAlpha": 32, + "optimizerType": "Adafactor", + "textEncoderLR": 5e-05, + "maxTrainEpochs": 10, + "shuffleCaption": false, + "trainBatchSize": 4, + "flipAugmentation": false, + "lrSchedulerNumCycles": 3 + }, + "baseModel": "sdxl" + }, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 473, + "ratingCount": 7, + "rating": 5, + "thumbsUpCount": 139 + }, + "model": { + "name": "The Simplest", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 303242, + "sizeKB": 445804.83984375, + "name": "The_Simplest.safetensors", + "type": "Model", + "metadata": { + "format": "SafeTensor", + "selectedEpochUrl": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest.safetensors" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-03-06T22:41:06.276Z", + "hashes": { + "AutoV1": "5DD180B6", + "AutoV2": "EC43D26E93", + "SHA256": "EC43D26E93173195BF6873BE6AD829EC3C4DE3F2A1F5D7F3A31E83B5BDD6C6EA", + "CRC32": "75CD773F", + "BLAKE3": "B18238DD59D72530221A7E814B5A0F8FD22DD999A5E3C6D158CFB948C641F118", + "AutoV3": "A482D04BF14465B90140E05E3568D84D8805243EB7BFD914809E38946EB46508" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/376536" + }, + { + "id": 301701, + "sizeKB": 76066.7138671875, + "name": "376536_training_data.zip", + "type": "Training Data", + "metadata": { + "format": "Other", + "numImages": 71, + "ownRights": true, + "numCaptions": 71, + "shareDataset": false, + "trainingResults": { + "jobId": "554d27a3-f026-4f84-818d-bb43d292c0fc", + "epochs": [ + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000001.safetensors", + "epoch_number": 1, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240305233714_e000001_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240305233740_e000001_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240305233806_e000001_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000002.safetensors", + "epoch_number": 2, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240305235055_e000002_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240305235121_e000002_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240305235147_e000002_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000003.safetensors", + "epoch_number": 3, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306000437_e000003_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306000503_e000003_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306000529_e000003_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000004.safetensors", + "epoch_number": 4, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306001846_e000004_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306001911_e000004_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306001935_e000004_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000005.safetensors", + "epoch_number": 5, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306003222_e000005_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306003248_e000005_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306003314_e000005_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000006.safetensors", + "epoch_number": 6, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306004623_e000006_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306004649_e000006_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306004715_e000006_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000007.safetensors", + "epoch_number": 7, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306010018_e000007_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306010044_e000007_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306010110_e000007_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000008.safetensors", + "epoch_number": 8, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306011414_e000008_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306011440_e000008_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306011505_e000008_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest-000009.safetensors", + "epoch_number": 9, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306012753_e000009_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306012819_e000009_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306012845_e000009_02.png" + } + ] + }, + { + "model_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest.safetensors", + "epoch_number": 10, + "sample_images": [ + { + "prompt": "a cabin in woods surrounded by nature and mountains., Girl reads book", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306014122_e000010_00.png" + }, + { + "prompt": "seated at desk, a woman with shoulder-length hair", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306014147_e000010_01.png" + }, + { + "prompt": "Iron Man stands outside suburban house., wearing a sweater with long sleeves. Her arms are crossed as she gazes off to the side with an expression that seems contemplative or slightly concerned", + "image_url": "https://orchestration.civitai.com/v1/consumer/jobs/554d27a3-f026-4f84-818d-bb43d292c0fc/assets/The_Simplest_20240306014213_e000010_02.png" + } + ] + } + ], + "history": [ + { + "time": "2024-03-05T21:27:30.905Z", + "status": "Submitted" + }, + { + "time": "2024-03-05T23:22:47.053Z", + "status": "Processing", + "message": "" + }, + { + "time": "2024-03-06T01:43:17.305Z", + "status": "InReview", + "message": "Job complete" + } + ], + "attempts": 0, + "end_time": "2024-03-06T01:43:13.077Z", + "start_time": "2024-03-05T23:22:46.263Z", + "submittedAt": "2024-03-05T21:27:30.905Z", + "transactionId": "58185d8a-aaf1-40ec-b90d-e9be145e583b" + } + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-03-05T21:30:52.171Z", + "hashes": { + "AutoV1": "32393819", + "AutoV2": "7396C028AD", + "SHA256": "7396C028AD36AE79A0D11ECFC55401B2D7759939B89A1D040B2CC4E7EC27EEFC", + "CRC32": "7C8F05D5", + "BLAKE3": "1EFB4815D109D2B56B5BE3551C5E69065E95CE69A8AACB697232F3C5260A8C5E" + }, + "primary": false, + "downloadUrl": "https://civitai.com/api/download/models/376536?type=Training%20Data" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/72ded1f0-23f8-4e75-acaf-8a6c956eb48e/width=450/7547500.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UeN^e:9Fof-;~qRjfQt7IUfQWBayxuofoft7", + "type": "image", + "metadata": { + "hash": "UeN^e:9Fof-;~qRjfQt7IUfQWBayxuofoft7", + "size": 1059399, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 457683540, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of A witch making a potion , , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/63b5e48c-ea86-498a-ac91-c3a35d4ba074/width=450/7547568.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UMOp*|D%9F-;~qj[WBRj~qM{M{M{IUt7ofxu", + "type": "image", + "metadata": { + "hash": "UMOp*|D%9F-;~qj[WBRj~qM{M{M{IUt7ofxu", + "size": 1488936, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 22359364, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful color paint of a cat in a gardenn , highly detailed, 8k, sharp, professional, clear, high contrast, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "noisy, grainy, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 4", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/56ab35e9-550f-46e8-b27d-1ee384955e09/width=450/7547567.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UQQmCrt7~qt7-;j[t7fQt7xuayofxuM{Rjof", + "type": "image", + "metadata": { + "hash": "UQQmCrt7~qt7-;j[t7fQt7xuayofxuM{Rjof", + "size": 941283, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 3042807076, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful color paint of cute Joker pointing a gun to the camera , highly detailed, 8k, sharp, professional, clear, high contrast, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "noisy, grainy, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 4", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/80a8f8c1-7de3-4c43-8c8e-d2349cdd8cd5/width=450/7547566.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UUQmCr-;-;M{ofxuxuofxuRjWBay~qM{M{t7", + "type": "image", + "metadata": { + "hash": "UUQmCr-;-;M{ofxuxuofxuRjWBay~qM{M{t7", + "size": 818377, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 2456701470, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "a man near a tree in a park ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f5df3894-7cd6-4a02-a1f4-b7869958e4bb/width=450/7547571.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U8NdO8_44n0000IU00%M00~qWBofRj~q%MM{", + "type": "image", + "metadata": { + "hash": "U8NdO8_44n0000IU00%M00~qWBofRj~q%MM{", + "size": 1445559, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 1261675815, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a man near a tree in a park , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/71488705-bd35-4abc-9c67-b65dc221c395/width=450/7547569.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UcNKFyIUayt7-;M{t7of~qt7Int7-;t7xuWB", + "type": "image", + "metadata": { + "hash": "UcNKFyIUayt7-;M{t7of~qt7Int7-;t7xuWB", + "size": 1601704, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 1745579655, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a mountain cottage , , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cc29bb5d-7b3c-448e-87f4-cbd7775881ae/width=450/7547570.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UHOWvn00?bWB_3xu-;%M~q%MD%IUt7WBIUD%", + "type": "image", + "metadata": { + "hash": "UHOWvn00?bWB_3xu-;%M~q%MD%IUt7WBIUD%", + "size": 1366590, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 2484588997, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "A Monster playing poker in a casino ,", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5dd458e3-fa81-4251-8ead-6963c00b906d/width=450/7547576.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UWP%O.IUt7%M~qxu-;xuM{ayRjof%MayofRj", + "type": "image", + "metadata": { + "hash": "UWP%O.IUt7%M~qxu-;xuM{ayRjof%MayofRj", + "size": 951160, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 317175202, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of Superman doing homeworks , , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fe291245-2f1f-40b9-a512-310b326bddeb/width=450/7547573.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UUQcn{of~qxu_3j[RjofxuWUM{j[j[ayt7j[", + "type": "image", + "metadata": { + "hash": "UUQcn{of~qxu_3j[RjofxuWUM{j[j[ayt7j[", + "size": 872340, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 1368559420, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of Spiderman reading a book , , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/250bfdb9-da11-47c4-ba4d-7d55a6b57af2/width=450/7547572.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UBH.QaM{M{00IU%M%MfPIURjxu-;4n%M~qD%", + "type": "image", + "metadata": { + "hash": "UBH.QaM{M{00IU%M%MfPIURjxu-;4n%M~qD%", + "size": 1208809, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1024x1024", + "seed": 3075436991, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of A chonk in a fridge , , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "The_Simplest", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"The_Simplest": "a482d04bf144\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/376536" +} \ No newline at end of file diff --git a/The_Simplest.preview.png b/The_Simplest.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..5041d24c4ade74752b10f62cb610d6079e2d3afe Binary files /dev/null and b/The_Simplest.preview.png differ diff --git a/The_Simplest.safetensors b/The_Simplest.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..82a38c32c149bfa97f944c7aaf82951a701ff735 --- /dev/null +++ b/The_Simplest.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec43d26e93173195bf6873be6ad829ec3c4de3f2a1f5d7f3a31e83b5bdd6c6ea +size 456504156 diff --git a/Vintage_Anime.civitai.info b/Vintage_Anime.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..1c8b7a83f297d535fa03e2086710fb7600545848 --- /dev/null +++ b/Vintage_Anime.civitai.info @@ -0,0 +1,573 @@ +{ + "id": 351556, + "modelId": 313318, + "name": "Vintage Anime", + "createdAt": "2024-02-19T20:43:20.267Z", + "updatedAt": "2024-02-19T23:11:10.376Z", + "status": "Published", + "publishedAt": "2024-02-19T23:11:10.375Z", + "trainedWords": [], + "trainingStatus": "Approved", + "trainingDetails": { + "type": "Style", + "params": { + "unetLR": 0.0005, + "clipSkip": 1, + "loraType": "lora", + "keepTokens": 0, + "networkDim": 64, + "numRepeats": 15, + "resolution": 1024, + "lrScheduler": "cosine_with_restarts", + "minSnrGamma": 5, + "targetSteps": 1238, + "enableBucket": true, + "networkAlpha": 32, + "optimizerArgs": "weight_decay=0.1", + "optimizerType": "AdamW8Bit", + "textEncoderLR": 5e-05, + "maxTrainEpochs": 10, + "shuffleCaption": false, + "trainBatchSize": 4, + "flipAugmentation": false, + "lrSchedulerNumCycles": 3 + }, + "baseModel": "sdxl" + }, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1056, + "ratingCount": 154, + "rating": 4.99, + "thumbsUpCount": 205 + }, + "model": { + "name": "Vintage Anime", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 280158, + "sizeKB": 445790.98046875, + "name": "Vintage_Anime.safetensors", + "type": "Model", + "metadata": { + "format": "SafeTensor", + "selectedEpochUrl": "https://orchestration.civitai.com/v1/consumer/jobs/2140218e-e527-460d-905d-0f2445c9c59b/assets/Vintage_Anime.safetensors" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-19T22:51:28.859Z", + "hashes": { + "AutoV1": "D7668BB4", + "AutoV2": "87C39CA765", + "SHA256": "87C39CA765E0EC9A1F1A50161F396F5E2BE640A4A5FDA590874BDA13B735BB4A", + "CRC32": "28A2BD2C", + "BLAKE3": "0789F5422E1A60B39E56FBAB635881BC29756EF5A75F58056605B69C7A5A39BF", + "AutoV3": "3AD5A0BA4211545B0F9D2108320AC8433AB5FC6F71AD4AFCF07E6E42EB318CA3" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/351556" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f13c08be-2ce7-4518-9961-1f324b7ef27d/width=450/6851782.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "UBCGS,.T0MxtO@oMt8?G9G9FVXOsS$M|9a$%", + "type": "image", + "metadata": { + "hash": "UBCGS,.T0MxtO@oMt8?G9G9FVXOsS$M|9a$%", + "size": 1688050, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 203441625, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a woman in spacesuit in a spaceship , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5b0a84d2-f56c-4000-8878-0dfc74be3f6b/width=450/6851829.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "U6EC8.-A00H?yB*00hMxo~%h+uMx21R4?HR%", + "type": "image", + "metadata": { + "hash": "U6EC8.-A00H?yB*00hMxo~%h+uMx21R4?HR%", + "size": 1702150, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 3000550669, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of Margot Robbie in spacesuit in a spaceship , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/25879b94-1676-4c79-b2cb-c2e0e0e68588/width=450/6851828.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "UADI|8~q9F$+?]%LrsM|4nV[%2IUoexuRkM|", + "type": "image", + "metadata": { + "hash": "UADI|8~q9F$+?]%LrsM|4nV[%2IUoexuRkM|", + "size": 1620436, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 4268221779, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of The Joker in a Bank , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c0c9dd20-a43b-44e4-b74c-c0261de23e0e/width=450/6851830.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "U8B:sb*I01*{0.D,Mcnz*J~B#Q0#t,E3NG-6", + "type": "image", + "metadata": { + "hash": "U8B:sb*I01*{0.D,Mcnz*J~B#Q0#t,E3NG-6", + "size": 1712206, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 2511090774, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a man in a park , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/05354698-00fd-40e1-a1d3-6599814fc28c/width=450/6851832.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "UECPha~pI@NGkqxu%Lt74oD*wvxZNGM{RjNH", + "type": "image", + "metadata": { + "hash": "UECPha~pI@NGkqxu%Lt74oD*wvxZNGM{RjNH", + "size": 1545090, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 1994566731, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a woman in a pub , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/52e1d38a-820c-4362-aa5b-cef8e19ddc67/width=450/6851831.jpeg", + "nsfw": "Soft", + "width": 2688, + "height": 1536, + "hash": "UFAT[+QRNHTL.TDOxutR-;E1ofj]xuM|t6s:", + "type": "image", + "metadata": { + "hash": "UFAT[+QRNHTL.TDOxutR-;E1ofj]xuM|t6s:", + "size": 5216926, + "width": 2688, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "2688x1536", + "seed": 1766198057, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Lara Croft climbing a rock ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "Denoising strength": "0.44", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/76242022-705b-4095-bf3b-29279353f7f2/width=450/6851834.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "UD8;#{?wNHofRjRmM{MxRPbctSxvWBRPW=xu", + "type": "image", + "metadata": { + "hash": "UD8;#{?wNHofRjRmM{MxRPbctSxvWBRPW=xu", + "size": 1653517, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 2102279210, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of Gundam in space , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/45775d7a-45d6-4f8e-adde-a556d824c5e4/width=450/6851833.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "UFAdr?MdS5D*.TVrV@WC%zMxIBt7NfM{jEs;", + "type": "image", + "metadata": { + "hash": "UFAdr?MdS5D*.TVrV@WC%zMxIBt7NfM{jEs;", + "size": 1794933, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 3780743653, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a Monster in a graveyard , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/30c32805-6635-4ac8-9c69-5bfd63e6edc8/width=450/6851835.jpeg", + "nsfw": "Soft", + "width": 1344, + "height": 768, + "hash": "U5BDfzJC00^P*0n44TTJkppH+tVEt-tlMxIB", + "type": "image", + "metadata": { + "hash": "U5BDfzJC00^P*0n44TTJkppH+tVEt-tlMxIB", + "size": 1747168, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 2048429992, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of princess Xena in a forest with a sword , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c261b24b-09b9-4e82-a9fe-b820685523e8/width=450/6851837.jpeg", + "nsfw": "None", + "width": 1344, + "height": 768, + "hash": "UKCOwO-:70Ip}t%LK4NHO=xawynjOroy$iae", + "type": "image", + "metadata": { + "hash": "UKCOwO-:70Ip}t%LK4NHO=xawynjOroy$iae", + "size": 1729100, + "width": 1344, + "height": 768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vaefp16.safetensors", + "Size": "1344x768", + "seed": 3494450910, + "Model": "sd_xl_base_1.0", + "steps": 70, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "impactful paint of a red Rose , highly detailed, 8k, sharp, professional, clear, high contrast, high saturated, , vivid deep blacks, crystal clear", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "235745af8d", + "cfgScale": 7, + "resources": [ + { + "name": "Vintage_Anime", + "type": "lora", + "weight": 0.8 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"Vintage_Anime": "3ad5a0ba4211\"", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, disfigured, deformed, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, multiple hands, low contrast, low resolution, out of focus", + "Style Selector Style": "Paint 1 - No Color", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/351556" +} \ No newline at end of file diff --git a/Vintage_Anime.preview.png b/Vintage_Anime.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..af3bb817dfeed2bdeb3b9995d43faf22615b9237 Binary files /dev/null and b/Vintage_Anime.preview.png differ diff --git a/Vintage_Anime.safetensors b/Vintage_Anime.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..471a6d58c38470081f70590b2bb5a5a3761ea723 --- /dev/null +++ b/Vintage_Anime.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87c39ca765e0ec9a1f1a50161f396f5e2be640a4a5fda590874bda13b735bb4a +size 456489964 diff --git a/add-detail-xl.civitai.info b/add-detail-xl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..4eabbaa2bc4e2f9aae43fc63486fbe98cf81fea5 --- /dev/null +++ b/add-detail-xl.civitai.info @@ -0,0 +1,468 @@ +{ + "id": 135867, + "modelId": 122359, + "name": "v1.0", + "createdAt": "2023-08-07T14:51:04.506Z", + "updatedAt": "2023-08-07T14:55:02.629Z", + "status": "Published", + "publishedAt": "2023-08-07T14:55:02.627Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 58534, + "ratingCount": 4079, + "rating": 4.98, + "thumbsUpCount": 4367 + }, + "model": { + "name": "Detail Tweaker XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 99264, + "sizeKB": 223097.9921875, + "name": "add-detail-xl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-07T14:55:53.517Z", + "hashes": { + "AutoV1": "29A40D2E", + "AutoV2": "0D9BD1B873", + "SHA256": "0D9BD1B873A7863E128B4672E3E245838858F71469A3CEC58123C16C06F83BD7", + "CRC32": "A94E124F", + "BLAKE3": "595854A2079ABB9AF0FD84830DDF1142ABFFAA70DEBF51C25C2E73A248CB11CA" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/135867" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/217179cb-87a0-4e96-8d77-e410f757aba0/width=450/1917130.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UUHK;5WYoJR*~qWCj[j[_Ns:a}of_3s:azof", + "type": "image", + "metadata": { + "hash": "UUHK;5WYoJR*~qWCj[j[_Ns:a}of_3s:azof", + "size": 4192036, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 3308533307, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "photo, 8k portrait of beautiful cyborg with brown hair, intricate, elegant, highly detailed, majestic, digital photography, art by artgerm and ruan jia and greg rutkowski surreal painting gold butterfly filigree, broken glass, (masterpiece, sidelighting, finely detailed beautiful eyes: 1.2), hdr, realistic, high definition, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1132c8aa-053e-468b-9f80-32a071f73f57/width=450/1917133.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UWDb~Kofa|of?vj[j[kC~WbHj[kC~qoffkof", + "type": "image", + "metadata": { + "hash": "UWDb~Kofa|of?vj[j[kC~WbHj[kC~qoffkof", + "size": 3810435, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 3407892333, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "city street, photo, night, lights, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/270b159f-d91b-424b-8ac6-14857726f1f1/width=450/1917136.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UjHBSroJays._4oJazs.?cWDayWXpJa}oLWX", + "type": "image", + "metadata": { + "hash": "UjHBSroJays._4oJazs.?cWDayWXpJa}oLWX", + "size": 3386323, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 4138801743, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "anime, road, mountains, sunset, village, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1.5>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f7cef77a-eabb-4d89-9faa-399bef495e32/width=450/1917137.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UgF5]ot7ayt8_Mofjtof?cbHfQfk-=ayj[ay", + "type": "image", + "metadata": { + "hash": "UgF5]ot7ayt8_Mofjtof?cbHfQfk-=ayj[ay", + "size": 3989467, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 2161439391, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "cyberpunk city, , ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "\"eduardo-xl": "1be1874242ec", + "add-detail-xl": "1>\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4d6138b0-6e54-4331-8326-1b5291fed0d8/width=450/1917142.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UONAYXIUbIs;_Ns:ayof.Aayjsof~Vofofof", + "type": "image", + "metadata": { + "hash": "UONAYXIUbIs;_Ns:ayof.Aayjsof~Vofofof", + "size": 1777229, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 1138549894, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "a very cute tiny mouse standing with a piece of cheese, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fc49dbba-7f86-4d9d-a8cb-bfc8e483e3af/width=450/1917146.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UTE{h0t7e=of_Na#jubH~qofa{oM?vodj@of", + "type": "image", + "metadata": { + "hash": "UTE{h0t7e=of_Na#jubH~qofa{oM?vodj@of", + "size": 5359238, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 4199339366, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "old man, long beard, smoking pipe, oil painting, sitting in garden, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f7a46c72-8d6e-41fe-b6e1-3b093039b036/width=450/1917151.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UYF#,ho#ofoz~podj[oe_4WXfQbI?vofj[bH", + "type": "image", + "metadata": { + "hash": "UYF#,ho#ofoz~podj[oe_4WXfQbI?vofj[bH", + "size": 5450868, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 3638304036, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "(masterpiece:1.1), (highest quality:1.1), (HDR:1.3), (top quality, best quality, official art, beautiful and aesthetic:1.2), woman, extremely detailed, (fractal art:1.1), (colorful:1.1), highest detailed, (zentangle:1.2), (dynamic), (abstract background:1.3), (shiny), (many colors:1.4), solo, coral background, yellow lightning, cinematic lighting, long hair, detailed black eyes, highest quality face, (sky aesthetic), ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/41d4eb52-3728-4fd1-864b-73f23cfd969b/width=450/1917152.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "UnNvrYt7WBt6?wV@j[bHpJkCj[j?%Ms.bIkC", + "type": "image", + "metadata": { + "hash": "UnNvrYt7WBt6?wV@j[bHpJkCj[j?%Ms.bIkC", + "size": 3356481, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 1330402432, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "cat made of spaghetti, perfect composition, masterpiece, best quality, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "1>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0e537b14-d13c-4053-90f8-4ccc081fdf74/width=450/1917153.jpeg", + "nsfw": "None", + "width": 1797, + "height": 1158, + "hash": "UFON2nRjRj={~qM{RjS$?c%2t7R-%gNGWVaK", + "type": "image", + "metadata": { + "hash": "UFON2nRjRj={~qM{RjS$?c%2t7R-%gNGWVaK", + "size": 1378156, + "width": 1797, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 1603824148, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "dog, low poly, white background, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "0>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/08a14430-af56-40a1-971f-4180f187626d/width=450/1917154.jpeg", + "nsfw": "None", + "width": 2698, + "height": 1158, + "hash": "URIXv?RQkCV@?wRjt7kC~qkCbHWV_3t7R*t7", + "type": "image", + "metadata": { + "hash": "URIXv?RQkCV@?wRjt7kC~qkCbHWV_3t7R*t7", + "size": 2706285, + "width": 2698, + "height": 1158 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1024", + "seed": 447634022, + "Model": "dreamshaperXL10_alpha2Xl10", + "steps": 20, + "Script": "X/Y/Z plot", + "X Type": "Prompt S/R", + "hashes": { + "model": "82b5f664ae" + }, + "prompt": "anime girl eating ramen, ", + "Version": "v1.5.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "82b5f664ae", + "name": "dreamshaperXL10_alpha2Xl10", + "type": "model" + } + ], + "Model hash": "82b5f664ae", + "add-detail-xl": "2>\"", + "\"add-detail-xl": "9c783c8ce46c\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/135867" +} \ No newline at end of file diff --git a/add-detail-xl.preview.png b/add-detail-xl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..4e42e82bc40db572dead7d0e150239eedd089995 Binary files /dev/null and b/add-detail-xl.preview.png differ diff --git a/add-detail-xl.safetensors b/add-detail-xl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b900275cbf39f556ed0d01be306dc372039cb42e --- /dev/null +++ b/add-detail-xl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d9bd1b873a7863e128b4672e3e245838858f71469a3cec58123c16c06f83bd7 +size 228452344 diff --git a/aesthetic_anime_v1s.civitai.info b/aesthetic_anime_v1s.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..62bc2cf2c0df23d2966a78ead3912f7ab7b5d31e --- /dev/null +++ b/aesthetic_anime_v1s.civitai.info @@ -0,0 +1,574 @@ +{ + "id": 331598, + "modelId": 295100, + "name": "v1s", + "createdAt": "2024-02-06T14:59:02.922Z", + "updatedAt": "2024-02-06T15:07:34.028Z", + "status": "Published", + "publishedAt": "2024-02-06T15:07:34.026Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 6359, + "ratingCount": 316, + "rating": 5, + "thumbsUpCount": 629 + }, + "model": { + "name": "Aesthetic Anime LoRA", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 262864, + "sizeKB": 332781.63671875, + "name": "aesthetic_anime_v1s.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-06T15:05:51.454Z", + "hashes": { + "AutoV1": "15851BA6", + "AutoV2": "2B83812DFA", + "SHA256": "2B83812DFA9E63D0EE1966CF202A22F4634B5CDB203D1FA2D66AA68E03CEDF36", + "CRC32": "68BD7100", + "BLAKE3": "67423C1613E31AADC7B023365A7FAED69189B0ABF8E3A263C466F4A73168FA42", + "AutoV3": "96312F4032A93BCE3589469F5A84528A1B858A9A46E41949DFD009CC40F6A487" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/331598" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7cd96ef0-a9dc-4906-b258-f389ff640a1a/width=450/6328373.jpeg", + "nsfw": "None", + "width": 1072, + "height": 1696, + "hash": "U8EDC:=DwcNH~p%g-44n4m4.-PR-I:DiDhf-", + "type": "image", + "metadata": { + "hash": "U8EDC:=DwcNH~p%g-44n4m4.-PR-I:DiDhf-", + "size": 2773195, + "width": 1072, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "768x1216", + "seed": 2067885437, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "embed:negativeXL_D": "fff5d51ab6", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "masterpiece, best quality,\n1girl, solo, mechanical wings, blue eyes, wings, looking at viewer, long hair, covered navel, blue hair, bodysuit, medium breasts, mecha musume, lips, ponytail, bangs, aqua hair, skin tight, parted lips, headgear,\nhighres, 4k, 8k, intricate detail, cinematic lighting, amazing quality, wallpaper ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "negativeXL_D": "fff5d51ab655\"", + "\"negativeXL_D": "fff5d51ab655", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "nipples, (low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs, negativeXL_D", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dbf72811-9c2e-4f41-8896-d7f97924a68b/width=450/6328390.jpeg", + "nsfw": "None", + "width": 1696, + "height": 1072, + "hash": "U8Av|VRODgNy.AE1waxuI_$$M{n#Iq-oS%IU", + "type": "image", + "metadata": { + "hash": "U8Av|VRODgNy.AE1waxuI_$$M{n#Iq-oS%IU", + "size": 2573880, + "width": 1696, + "height": 1072 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "1216x768", + "seed": 2067885444, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "embed:negativeXL_D": "fff5d51ab6", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "8k,((best quality)),((masterpiece)),high_res,realistic,(from above:1.2),(full shot:1.4),cowboy shot 1 girl,solo,mature,on the balcony,mutlicolor hair,black sport shirt,white shorties,sneakers,background,(river, night city),background,bioluminescent,HDR,Deph Of Field,DOF,dynamicpose,perspectives, ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "negativeXL_D": "fff5d51ab655\"", + "\"negativeXL_D": "fff5d51ab655", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs, negativeXL_D", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "Denoising strength": "0.52", + "ADetailer mask blur": "4", + "\"aesthetic_anime_v1s": "96312f4032a9\"", + "ADetailer confidence": "0.25", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.3", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/11c3ecce-5cb3-4745-89cf-6ae381e2bc5d/width=450/6328376.jpeg", + "nsfw": "None", + "width": 1072, + "height": 1696, + "hash": "UGGuRIWBIAxu~Wxu4:E1IUt7M|xtD*IoNGxu", + "type": "image", + "metadata": { + "hash": "UGGuRIWBIAxu~Wxu4:E1IUt7M|xtD*IoNGxu", + "size": 2881348, + "width": 1072, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "768x1216", + "seed": 2067885437, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "embed:negativeXL_D": "fff5d51ab6", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "masterpiece, best quality,\n1girl, breasts, armor, looking at viewer, shoulder armor, belt, cleavage, white hair, long hair, gloves, solo, pants, earrings, medium breasts, jewelry, cleavage cutout, black gloves, pauldrons, closed mouth, hair between eyes, standing, cowboy shot, pillar, black pants, gauntlets, breastplate, sepia tones, \nhighres, 4k, 8k, intricate detail, cinematic lighting, amazing quality, wallpaper ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "nipples, (low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs, negativeXL_D", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3e57e31d-0870-4938-88eb-0431301770f4/width=450/6328486.jpeg", + "nsfw": "None", + "width": 3392, + "height": 1272, + "hash": "UnFiJlaeWBay~qR*j@ay_3WBWBt7?bayWVof", + "type": "image", + "metadata": { + "hash": "UnFiJlaeWBay~qR*j@ay_3WBWBt7?bayWVof", + "size": 4522572, + "width": 3392, + "height": 1272 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9d40877b-4def-4428-8d7d-dd1dff3ad3a0/width=450/6328442.jpeg", + "nsfw": "None", + "width": 1072, + "height": 1696, + "hash": "U7Ci{#_2?wDi5YxZ9uxD=Xo~RibI9Fx]Rkn}", + "type": "image", + "metadata": { + "hash": "U7Ci{#_2?wDi5YxZ9uxD=Xo~RibI9Fx]Rkn}", + "size": 2576704, + "width": 1072, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1216", + "seed": 2067885438, + "Model": "DreamShaperXL_Turbo_dpmppSdeKarras_half_pruned_6", + "steps": 8, + "hashes": { + "model": "676f0d60c8", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "(masterpiece, best quality, ultra-detailed, best shadow), cinematic film still, realistic, man wearing a high tech scifi armor, mecha armor, male focus, armor, solo, facial hair, cape, beard, looking at viewer, blue eyes, blurry background, power armor, realistic, knee protection, standing, brown hair, science fiction ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "676f0d60c8", + "name": "DreamShaperXL_Turbo_dpmppSdeKarras_half_pruned_6", + "type": "model" + } + ], + "Model hash": "676f0d60c8", + "Hires steps": "5", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "ADetailer model": "mediapipe_face_mesh_eyes_only", + "ADetailer version": "23.11.1", + "Denoising strength": "0.52", + "ADetailer mask blur": "4", + "\"aesthetic_anime_v1s": "96312f4032a9\"", + "ADetailer confidence": "0.2", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.3", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/23d09832-a46e-4c9d-b185-77df7e7e786f/width=450/6328381.jpeg", + "nsfw": "None", + "width": 1696, + "height": 1072, + "hash": "URG+L7Zz%M.TOa%LxvT0NFtmWAoK%LjYr;o#", + "type": "image", + "metadata": { + "hash": "URG+L7Zz%M.TOa%LxvT0NFtmWAoK%LjYr;o#", + "size": 2307961, + "width": 1696, + "height": 1072 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "1216x768", + "seed": 1802424623, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "embed:negativeXL_D": "fff5d51ab6", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "masterpiece, best quality,\n1girl, solo, flower, long hair, outdoors, letterboxed, school uniform, day, sky, looking up, short sleeves, parted lips, shirt, cloud, black hair, sunlight, white shirt, serafuku, upper body, from side, pink flower, blurry, brown hair, blue sky, depth of field,\nhighres, 4k, 8k, intricate detail, cinematic lighting, amazing quality, wallpaper ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "negativeXL_D": "fff5d51ab655\"", + "\"negativeXL_D": "fff5d51ab655", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "long neck, (low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs, negativeXL_D", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/064d6bdb-6743-40a7-8988-b5b6ca7d47b8/width=450/6328400.jpeg", + "nsfw": "None", + "width": 1072, + "height": 1696, + "hash": "UCEofS^K00xu}Y=^M^WY$%oLNGnO%e%3spNw", + "type": "image", + "metadata": { + "hash": "UCEofS^K00xu}Y=^M^WY$%oLNGnO%e%3spNw", + "size": 3632804, + "width": 1072, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "768x1216", + "seed": 2067885437, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "embed:negativeXL_D": "fff5d51ab6", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "(masterpiece, top quality, best quality, official art, beautiful and aesthetic:1.2),(1girl:1.4),upper body,([pink|blue] hair:1.5),extreme detailed,(fractal art:1.3),(colorful:1.5),highest detailed,(Mechanical modification:1.5), ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "negativeXL_D": "fff5d51ab655\"", + "\"negativeXL_D": "fff5d51ab655", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs, negativeXL_D", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bfcbc59b-1ae3-4b65-9e5a-61d53154636f/width=450/6328377.jpeg", + "nsfw": "None", + "width": 1696, + "height": 1072, + "hash": "ULFsDK%MD%IqyZof-:-po#s+a#aKIpj@aJV?", + "type": "image", + "metadata": { + "hash": "ULFsDK%MD%IqyZof-:-po#s+a#aKIpj@aJV?", + "size": 3077125, + "width": 1696, + "height": 1072 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "1216x768", + "seed": 2067885437, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "embed:negativeXL_D": "fff5d51ab6", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "masterpiece, best quality,\nsolo, white mecha robot, cape, science fiction, torn clothes, glowing, standing, robot joints, mecha, armor, cowboy shot, (floating cape), intense sunlight, silver dragonborn, outdoors, landscape, nature\nhighres, 4k, 8k, intricate detail, cinematic lighting, amazing quality, wallpaper ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "negativeXL_D": "fff5d51ab655\"", + "\"negativeXL_D": "fff5d51ab655", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "nipples, (low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs, negativeXL_D", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b9271037-1022-49b5-b1e6-31b19413bd92/width=450/6328379.jpeg", + "nsfw": "None", + "width": 1072, + "height": 1696, + "hash": "UBBDpNbcxUNe%hV@RjsS^*NHo#M{?wM_Mwxv", + "type": "image", + "metadata": { + "hash": "UBBDpNbcxUNe%hV@RjsS^*NHo#M{?wM_Mwxv", + "size": 2694394, + "width": 1072, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1216", + "seed": 637670718, + "Model": "DreamShaperXL_Turbo_dpmppSdeKarras_half_pruned_6", + "steps": 8, + "hashes": { + "model": "676f0d60c8", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "masterpiece, best quality,\ncinematic film still, realistic, 1girl, mecha scifi, solo, blue hair, blue eyes, cape, long hair, armor, looking at viewer, outdoors, lips, headgear, standing, science fiction, gauntlets, mechanical arms, realistic, arms at sides, depth of field,\nhighres, 4k, 8k, intricate detail, cinematic lighting, amazing quality, wallpaper ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "676f0d60c8", + "name": "DreamShaperXL_Turbo_dpmppSdeKarras_half_pruned_6", + "type": "model" + } + ], + "Model hash": "676f0d60c8", + "Hires steps": "5", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "long neck, (low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5d39b8b4-51bd-4c16-a223-86cc0ec629bd/width=450/6328382.jpeg", + "nsfw": "None", + "width": 1072, + "height": 1696, + "hash": "UNFO}{-pxYD%?dM{M{Iq9aWZt7xu57IqWYNF", + "type": "image", + "metadata": { + "hash": "UNFO}{-pxYD%?dM{M{Iq9aWZt7xu57IqWYNF", + "size": 2580910, + "width": 1072, + "height": 1696 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "ENSD": "31337", + "Size": "768x1216", + "seed": 2067885437, + "Model": "AAM_XL_Anime_Mix", + "steps": 28, + "hashes": { + "model": "d48c2391e0", + "lora:aesthetic_anime_v1s": "2b83812dfa" + }, + "prompt": "masterpiece, best quality,\n(1girl), long straight white hair, blue eyes, detailed face, schoolgirl uniform, detailed clothing, combat pose, gust of wind, vivid colors, highest resolution, wallpaper, professional illustration, intricate, highly detailed, landscape, dramatic lighting, cityscape view from harbor, intense light, \nhighres, 4k, 8k, intricate detail, cinematic lighting, amazing quality, wallpaper ", + "Version": "v1.6.1", + "sampler": "Euler a", + "cfgScale": 8, + "clipSkip": 2, + "resources": [ + { + "name": "aesthetic_anime_v1s", + "type": "lora", + "weight": 1.2 + }, + { + "hash": "d48c2391e0", + "name": "AAM_XL_Anime_Mix", + "type": "model" + } + ], + "Model hash": "d48c2391e0", + "Hires steps": "15", + "Hires upscale": "1.4", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "nipples, (low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "Denoising strength": "0.52", + "\"aesthetic_anime_v1s": "96312f4032a9\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/331598" +} \ No newline at end of file diff --git a/aesthetic_anime_v1s.preview.png b/aesthetic_anime_v1s.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..a56652ea3114e62e9a572e2d63b6654dbe3d92e4 --- /dev/null +++ b/aesthetic_anime_v1s.preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b32c18a1295a06c3e2b72fd62e82bcec252e52ae227ad7d04ca407cceca795b +size 2864317 diff --git a/aesthetic_anime_v1s.safetensors b/aesthetic_anime_v1s.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2b906d9eebbb9f357a063db3aeda7f3aff73347f --- /dev/null +++ b/aesthetic_anime_v1s.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b83812dfa9e63d0ee1966cf202a22f4634b5cdb203d1fa2d66aa68e03cedf36 +size 340768396 diff --git a/blur_control_xl_v1.civitai.info b/blur_control_xl_v1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..559e5e2ab63ffce2e1b2adb5703edb8f377057ba --- /dev/null +++ b/blur_control_xl_v1.civitai.info @@ -0,0 +1,385 @@ +{ + "id": 364137, + "modelId": 324815, + "name": "v1.0", + "createdAt": "2024-02-27T16:15:13.846Z", + "updatedAt": "2024-02-27T16:24:56.772Z", + "status": "Published", + "publishedAt": "2024-02-27T16:24:56.768Z", + "trainedWords": [ + "sharp background" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 374, + "ratingCount": 25, + "rating": 5, + "thumbsUpCount": 46 + }, + "model": { + "name": "Better Blur Control / Less Depth of Field XL LoRA", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 291043, + "sizeKB": 332781.63671875, + "name": "blur_control_xl_v1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-27T16:20:53.797Z", + "hashes": { + "AutoV1": "EFCF21DB", + "AutoV2": "72C7EE925F", + "SHA256": "72C7EE925F08880A63790134FF809A4CF6A46E42462B1C33EC1199F393CD37EA", + "CRC32": "A37D4E55", + "BLAKE3": "24ABF309663B976C72C681DB865C0B8CE61C1C2BCCE50435F6336C54CD440901", + "AutoV3": "41ED30CA3A728EF53789F4E9E0B75A462352B2C684237D36353A0EA4DBF5752C" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/364137" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bf86159a-5562-4523-ba42-752a06994667/width=450/7188221.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "ULHeLN9tbv?b=DShNGxZ_NV@Vsxu%gIUVsxv", + "type": "image", + "metadata": { + "hash": "ULHeLN9tbv?b=DShNGxZ_NV@Vsxu%gIUVsxv", + "size": 3314619, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580709, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.35000000000000003]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580709,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality, blurry background, depth of field\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "sharp background,a girl taking a selfie, instagram, cinematic, photography, realistic, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality, blurry background, depth of field", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6da7c4b2-ea24-4f4c-9fee-e40da5bca604/width=450/7188222.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1177, + "hash": "UXJR8SV@R+fj~qoLbHRP_NfQj[oL?vt7jst7", + "type": "image", + "metadata": { + "hash": "UXJR8SV@R+fj~qoLbHRP_NfQj[oL?vt7jst7", + "size": 2120357, + "width": 1536, + "height": 1177 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0ea1f04f-dd2e-4d97-9522-a2ca682c4df7/width=450/7188251.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1177, + "hash": "UeHeUkkWWBoz~qofa}oL-;aykCju?bWBa|a}", + "type": "image", + "metadata": { + "hash": "UeHeUkkWWBoz~qofa}oL-;aykCju?bWBa|a}", + "size": 1830871, + "width": 1536, + "height": 1177 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5a1aee8a-cace-49aa-83d6-78ab71428c57/width=450/7188224.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "U3Bftn4U009a02WB~9~V00.7RPI;0MozE1Ri", + "type": "image", + "metadata": { + "hash": "U3Bftn4U009a02WB~9~V00.7RPI;0MozE1Ri", + "size": 3408875, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580685, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580685,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580685,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality, blurry background\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "sharp background, a man with a black trench coat and a het walking in the streets in the evening, cinematic, photography, realistic, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality, blurry background", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9d713604-6b89-4a77-b44e-8d94512ffa8a/width=450/7188236.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "URH_#+IpS5~q_3WBRjxutRM{Rjoft7WVayt7", + "type": "image", + "metadata": { + "hash": "URH_#+IpS5~q_3WBRjxutRM{Rjoft7WVayt7", + "size": 3250316, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580709, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580709,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.35000000000000003]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580709,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality, blurry background, depth of field\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "sharp background,a cat sitting by a window, cinematic, photography, realistic, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality, blurry background, depth of field", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8d909e0d-21d9-4ce4-9632-c0daa67c4e00/width=450/7188234.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "UMHn]J0KT0~W~qozozxaxvM_aeRkx]M{Rjxu", + "type": "image", + "metadata": { + "hash": "UMHn]J0KT0~W~qozozxaxvM_aeRkx]M{Rjxu", + "size": 3104253, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580708, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580708,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality,\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.35000000000000003]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality,\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580708,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "blurry background,a cat sitting by a window, cinematic, photography, realistic, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality,", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/85bdc09f-7bd5-49ea-8f90-9d4f2a407471/width=450/7188226.jpeg", + "nsfw": "Mature", + "width": 2048, + "height": 1536, + "hash": "UIHC1Q-=S}$%O[jc9FWU~qNHnhWUX.bcM{s:", + "type": "image", + "metadata": { + "hash": "UIHC1Q-=S}$%O[jc9FWU~qNHnhWUX.bcM{s:", + "size": 3567862, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580694, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580694,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580694,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality, blurry background\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "sharp background, a girl by the pool in a resort, cinematic, photography, realistic, instagram, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality, blurry background", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0694d21c-128f-4296-ae74-354c21ea9435/width=450/7188249.jpeg", + "nsfw": "None", + "width": 2304, + "height": 1177, + "hash": "UjJ*hvWVWCay_NWBayof%gaekBj[-;ayaxkC", + "type": "image", + "metadata": { + "hash": "UjJ*hvWVWCay_NWBayof%gaekBj[-;ayaxkC", + "size": 3928379, + "width": 2304, + "height": 1177 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/00ae72b3-c8cd-49a5-9ae7-0ba78545a6e5/width=450/7188225.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "UEFrz#tL9rV]~VouIVs;00xanSoz8}-poLI.", + "type": "image", + "metadata": { + "hash": "UEFrz#tL9rV]~VouIVs;00xanSoz8}-poLI.", + "size": 3752796, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580692, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580692,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580692,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality, blurry background\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "sharp background, adorable puppies playing in the grass, cinematic, photography, realistic, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality, blurry background", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f2ecae1a-bd87-4d18-a4f4-f4cdf7bad67b/width=450/7188232.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "U5BDsR%L-.x]*0E1IqtS00E2DiNH8woz-6M{", + "type": "image", + "metadata": { + "hash": "U5BDsR%L-.x]*0E1IqtS00E2DiNH8woz-6M{", + "size": 3025892, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580707, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"blur_control/blur_control_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality, blurry background, depth of field\",\"clip\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"blur_control_xl_v1.safetensors\",\"strength_model\":1.1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"blur_control/blur_control_upscale\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580707,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality, blurry background, depth of field\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"blur_control_xl_v1.safetensors\",1.1,1]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.35000000000000003]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "sharp background,a beautiful black woman walking in futuristic city wearing a headset, full body shot, cinematic, photography, realistic, scifi, cyberpunk, amazing quality", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality, blurry background, depth of field", + "additionalResources": [] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/364137" +} \ No newline at end of file diff --git a/blur_control_xl_v1.preview.png b/blur_control_xl_v1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..942b60e8223880d05cebbe3142271e0f17459ed2 Binary files /dev/null and b/blur_control_xl_v1.preview.png differ diff --git a/blur_control_xl_v1.safetensors b/blur_control_xl_v1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..11f38ba9ffc4f54ae0085921a6c34b2a77e4c697 --- /dev/null +++ b/blur_control_xl_v1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:72c7ee925f08880a63790134ff809a4cf6a46e42462b1c33ec1199f393cd37ea +size 340768396 diff --git a/boringRealism_primaryV4.civitai.info b/boringRealism_primaryV4.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..7da14eddb6a26519d3fba6b3db62e77f1b000c13 --- /dev/null +++ b/boringRealism_primaryV4.civitai.info @@ -0,0 +1,444 @@ +{ + "id": 348837, + "modelId": 310571, + "name": "BoringReality_primaryV4.0", + "createdAt": "2024-02-18T06:36:57.439Z", + "updatedAt": "2024-02-18T06:46:37.070Z", + "status": "Published", + "publishedAt": "2024-02-18T06:46:37.068Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

As with the other LoRA versions, this lora will not work well on its own. It should be used with others with weights adjusted to preference.

", + "stats": { + "downloadCount": 1725, + "ratingCount": 188, + "rating": 5, + "thumbsUpCount": 329 + }, + "model": { + "name": "Boring Reality", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 277695, + "sizeKB": 445799.875, + "name": "boringRealism_primaryV4.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-18T06:45:06.015Z", + "hashes": { + "AutoV1": "3ED61AE3", + "AutoV2": "11ECF170D9", + "SHA256": "11ECF170D9419C658ECE68B5F82AF296A84A6C5393849F2C547CEBBA30A2EEB4", + "CRC32": "02F7374C", + "BLAKE3": "99BB6CED641D916AF5615A4B0A6EBD7F25052A5EB71494AA5BC284F2FD46757C", + "AutoV3": "A41FBE2273AF25A50A938B27F1AD33916D5B7646AB356F9F2FDD1A9E41201AF1" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/348837" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fe2789e5-d469-49f6-8005-89509c7524d0/width=450/6778989.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UHC$_taet,t757X9nOV@~VWBbbt7EMtRjFRj", + "type": "image", + "metadata": { + "hash": "UHC$_taet,t757X9nOV@~VWBbbt7EMtRjFRj", + "size": 1428649, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 319831130, + "Model": "sd_xl_base_1.0", + "steps": 24, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "photo of two 25 year old korean women sitting at a anime conference in the afternoon in 1989. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.9" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/40754aae-ce81-4f80-9338-4ad9634f747f/width=450/6779104.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UHC7czDhMy%f_Nf#WBa$M|RPozxu8_kWoyRP", + "type": "image", + "metadata": { + "hash": "UHC7czDhMy%f_Nf#WBa$M|RPozxu8_kWoyRP", + "size": 1513819, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 134934905, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "photo of three librarians inside at work. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 9, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.9" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/490b010d-8ab1-4fdd-adfc-b020cf696c65/width=450/6779127.jpeg", + "nsfw": "None", + "width": 768, + "height": 1400, + "hash": "UNG[i?Myx[-o~TWajWRjeSxtWDM{OtNGV@M|", + "type": "image", + "metadata": { + "hash": "UNG[i?Myx[-o~TWajWRjeSxtWDM{OtNGV@M|", + "size": 1716958, + "width": 768, + "height": 1400 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1400", + "seed": 2943916447, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "phone photo of a smiling man at a walmart ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "\"boring-v4": "a41fbe2273af", + "Model hash": "31e35c80fc", + "boring-alpha-v3": "23c9fa12f1fd", + "boring-v4-faces-version": "fc75277e0414\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5256aad7-fa22-43e8-9ce1-9984b04391da/width=450/6778993.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "UCAmYqngE1o}0MtTn$V?_4RiRks:E3t8xCfl", + "type": "image", + "metadata": { + "hash": "UCAmYqngE1o}0MtTn$V?_4RiRks:E3t8xCfl", + "size": 1443726, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3181678674, + "Model": "sd_xl_base_1.0", + "steps": 24, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "photo of a 25 year old korean woman at a beach in the afternoon in 2016. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.9" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/55d8e070-f65d-49c0-bed2-c00750863373/width=450/6779135.jpeg", + "nsfw": "None", + "width": 1080, + "height": 1080, + "hash": "U57-pE?bI.NFD5-;E2Dj?^IVxukX9F%exGM{", + "type": "image", + "metadata": { + "hash": "U57-pE?bI.NFD5-;E2Dj?^IVxukX9F%exGM{", + "size": 1446311, + "width": 1080, + "height": 1080 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1080x1080", + "seed": 331140834, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "closeup 1936 Kodachrome photo of a ship in the ocean. ", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "\"boring-v4": "a41fbe2273af", + "Model hash": "31e35c80fc", + "boring-alpha-v3": "23c9fa12f1fd", + "Denoising strength": "0.9", + "boring-v4-faces-version": "fc75277e0414\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/efc0553b-4ccc-4bf3-ba33-754998eca6aa/width=450/6778966.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7E{h5Iv4,R$%j?HIX.800%#00Di%z-=4T8_", + "type": "image", + "metadata": { + "hash": "U7E{h5Iv4,R$%j?HIX.800%#00Di%z-=4T8_", + "size": 2006074, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2668902633, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "photo of two woman with their eyes closed at a graveyard", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"boring-v4-faces-version": "fc75277e0414\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e84bea9e-692f-4c2d-a90c-7a2c12d07bf6/width=450/6779007.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U67n~=t+x@?a4U-.tRDP_KM}RQkq4UVsWCb^", + "type": "image", + "metadata": { + "hash": "U67n~=t+x@?a4U-.tRDP_KM}RQkq4UVsWCb^", + "size": 1362392, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1450410728, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "creepy Kodachrome photo of a disturbing hooded blue figures creeping into a room in 1936. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.94" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f752b2b8-ccb0-4524-8ae2-9b0b27d2cedf/width=450/6779025.jpeg", + "nsfw": "Soft", + "width": 1024, + "height": 1024, + "hash": "U68q+w?].7%eR5_2%gkDDjDkR5jZ00IWM{RP", + "type": "image", + "metadata": { + "hash": "U68q+w?].7%eR5_2%gkDDjDkR5jZ00IWM{RP", + "size": 1334005, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2447040894, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "creepy Kodachrome photo of a disturbing hooded white brides in bridal gowns creeping into a room in 1936. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.94" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dc4f0880-499c-4d24-b31f-a90f63e3c986/width=450/6779096.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UE9Z_oIYV[s,~o9cE2ax-;M}IV%0xus:D+-n", + "type": "image", + "metadata": { + "hash": "UE9Z_oIYV[s,~o9cE2ax-;M}IV%0xus:D+-n", + "size": 1302121, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2904046600, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "phone photo of a smiling 28 year old woman with long hair posing on the titanic. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 9, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.9" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/43c0eee2-138b-43ac-b2be-dbfc0c35239a/width=450/6779102.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UHBzh34n9I^%yZr:n#IpyYROt2R:t-V?ayog", + "type": "image", + "metadata": { + "hash": "UHBzh34n9I^%yZr:n#IpyYROt2R:t-V?ayog", + "size": 1605999, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 786266935, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "phone photo of a smiling 28 year old Indian woman with long hair posing on the titanic. ", + "\"Module": "depth_midas", + "sampler": "DPM++ 2M Karras", + "cfgScale": 9, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Denoising strength": "0.9" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/348837" +} \ No newline at end of file diff --git a/boringRealism_primaryV4.preview.png b/boringRealism_primaryV4.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..3af56cf969235b004f955414c718959c07e9e0e9 Binary files /dev/null and b/boringRealism_primaryV4.preview.png differ diff --git a/boringRealism_primaryV4.safetensors b/boringRealism_primaryV4.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..376ea66430808a2a610329cffe3048546f0f58a4 --- /dev/null +++ b/boringRealism_primaryV4.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11ecf170d9419c658ece68b5f82af296a84a6c5393849f2c547cebba30a2eeb4 +size 456499072 diff --git a/cinexart.civitai.info b/cinexart.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..789a2603b88f3dbdd71ce85711aa7710ef1fb915 --- /dev/null +++ b/cinexart.civitai.info @@ -0,0 +1,519 @@ +{ + "id": 347814, + "modelId": 309883, + "name": "v1.0", + "createdAt": "2024-02-17T15:59:31.294Z", + "updatedAt": "2024-02-17T16:06:28.658Z", + "status": "Published", + "publishedAt": "2024-02-17T16:06:28.657Z", + "trainedWords": [ + "cinexart", + "movie poster" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1034, + "ratingCount": 143, + "rating": 4.99, + "thumbsUpCount": 271 + }, + "model": { + "name": "Cinexart", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 276824, + "sizeKB": 348362.49609375, + "name": "cinexart.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-17T16:06:25.359Z", + "hashes": { + "AutoV1": "3C51703C", + "AutoV2": "4E23390F02", + "SHA256": "4E23390F02F5BF20C157BCA51506725DBEB3E4F00ADD2A37C15D67030E12DB21", + "CRC32": "6CAB55F9", + "BLAKE3": "99DDB6EF0A4DC8B440B5371394FBC1A5D6C44257EC19A54E5EBE3899D9ABD330", + "AutoV3": "1E6FB1EE848921527E9253A93E3BD1A3F00864F0BE2640233490A8BDC3381CCA" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/347814" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d0dee3ee-4c3f-48c9-92e8-498a6990bfa4/width=450/6753538.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UKCj5$D*kU9H~oWUM|IVxtt7D*ay%1jZNGWC", + "type": "image", + "metadata": { + "hash": "UKCj5$D*kU9H~oWUM|IVxtt7D*ay%1jZNGWC", + "size": 1550038, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 2477607748, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart title: \"cinexart\" movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6f546478-7cbf-481f-b194-2d99055561a1/width=450/6753564.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UZJZlIx]IUr?~Vt7$%WV56en-pW=IUs:ogxF", + "type": "image", + "metadata": { + "hash": "UZJZlIx]IUr?~Vt7$%WV56en-pW=IUs:ogxF", + "size": 1599231, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 3793505488, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title: \"ROSE\") a rose flowing with red liquid, a woman in the background movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e647ace3-d202-4c96-8eb9-91bd143079ac/width=450/6753542.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UDD+u5yU9HQp.jx@X,NbE3niyVTvmns.yBjZ", + "type": "image", + "metadata": { + "hash": "UDD+u5yU9HQp.jx@X,NbE3niyVTvmns.yBjZ", + "size": 1801179, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 2477607752, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart title: \"cinexart\" movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/96ca7306-12a5-4d50-813b-447860a7bd6d/width=450/6753548.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "U69={V%L3UxtNuWqNHnj;$Rk9]R*3At6}Yj@", + "type": "image", + "metadata": { + "hash": "U69={V%L3UxtNuWqNHnj;$Rk9]R*3At6}Yj@", + "size": 1382236, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 65964492, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title: \"Hell\") movie poster , ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/86ae4d16-3fa1-4d47-903a-eb19fe628b3b/width=450/6753567.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UBBMe=-ojZ%1~At6t6oe^%NHNHWC%KIpWCoe", + "type": "image", + "metadata": { + "hash": "UBBMe=-ojZ%1~At6t6oe^%NHNHWC%KIpWCoe", + "size": 1631870, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 2371568608, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title: \"prophecy\") human skull in the background of the cathedral, movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0cb3d752-e5f7-42b4-a2b6-754fbdf84109/width=450/6753556.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UVFgg,oLIVoe~AWqVtoL%LoLrsj[kCj@xFj[", + "type": "image", + "metadata": { + "hash": "UVFgg,oLIVoe~AWqVtoL%LoLrsj[kCj@xFj[", + "size": 1614372, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 812883682, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title: \"Hell\") movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4dc42496-4408-4ba8-bd00-9f09ec384ade/width=450/6753558.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "U5B-rP=cAV$i{|wfspso1iI;0}nj9]oyGEkB", + "type": "image", + "metadata": { + "hash": "U5B-rP=cAV$i{|wfspso1iI;0}nj9]oyGEkB", + "size": 1379868, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 812883684, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title: \"Hell\") movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/af407781-6acc-4b8e-9489-0c32fe09559c/width=450/6753566.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UJD[^8o|%yt6~Uxt-.smR6%1Szn%P9s:sns.", + "type": "image", + "metadata": { + "hash": "UJD[^8o|%yt6~Uxt-.smR6%1Szn%P9s:sns.", + "size": 1835487, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 3029678993, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title: \"demon ai\") cyborg girl movie poster ", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "bad anatomy, bad hands" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0f0c0728-fc12-40f4-8300-a353b67e2b15/width=450/6753606.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "UHC~hh-n~ToKxss.s:bHxtNHE2fQxsj[j[s:", + "type": "image", + "metadata": { + "hash": "UHC~hh-n~ToKxss.s:bHxtNHE2fQxsj[j[s:", + "size": 1847527, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 3065147583, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title \"TIME\") demonic time machine, movie poster", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/65514906-50d3-450f-8cd2-82067aea0d0b/width=450/6753608.jpeg", + "nsfw": "None", + "width": 904, + "height": 1280, + "hash": "U9EC5%^%?Z?G00?a?H-p~onOR5WB,]jF%MIU", + "type": "image", + "metadata": { + "hash": "U9EC5%^%?Z?G00?a?H-p~onOR5WB,]jF%MIU", + "size": 1697120, + "width": 904, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "904x1280", + "seed": 4184676813, + "Model": "sd_xl_base_1.0", + "steps": 35, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinexart (title \"fangs\") closeup of attacking wolf, movie poster", + "Version": "v1.7.0", + "sampler": "DPM++ 2S a Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "\"cinexart": "1e6fb1ee8489\"", + "resources": [ + { + "name": "cinexart", + "type": "lora", + "weight": 1 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/347814" +} \ No newline at end of file diff --git a/cinexart.preview.png b/cinexart.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..75bc6bb3c199f021060291e637e013bc58785e78 Binary files /dev/null and b/cinexart.preview.png differ diff --git a/cinexart.safetensors b/cinexart.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b06b1857cbc24261384355bd8256b75592a64499 --- /dev/null +++ b/cinexart.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e23390f02f5bf20c157bca51506725dbeb3e4f00add2a37c15d67030e12db21 +size 356723196 diff --git a/cutedoodle_XL-000012.civitai.info b/cutedoodle_XL-000012.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..5ea39a715043cb364291a4b4b29b9f363a62c6c2 --- /dev/null +++ b/cutedoodle_XL-000012.civitai.info @@ -0,0 +1,402 @@ +{ + "id": 190859, + "modelId": 132578, + "name": "Cute Doodle SDXL", + "createdAt": "2023-10-20T05:59:13.180Z", + "updatedAt": "2023-10-20T10:22:19.589Z", + "status": "Published", + "publishedAt": "2023-10-20T06:09:15.375Z", + "trainedWords": [ + "cute doodle" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 3143, + "ratingCount": 357, + "rating": 5, + "thumbsUpCount": 667 + }, + "model": { + "name": "[Lah] Cute Social | SDXL & SD1.5", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 145889, + "sizeKB": 223097.43359375, + "name": "cutedoodle_XL-000012.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-20T06:01:01.836Z", + "hashes": { + "AutoV1": "7AAFB040", + "AutoV2": "4D5964EDC7", + "SHA256": "4D5964EDC798AD9EED804902B1331160D7D8E8B053ACA7061690422E5444D222", + "CRC32": "ECE8B40D", + "BLAKE3": "C92645104C757D55EFFA3C7114DDFBB7B01CE2038861C56CE79774C581482B85" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/190859" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a080783e-b6d4-4b63-ba57-4e7f7b1a6955/width=450/3050046.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UGPNbG=p{ft,^SoYE0Kh-;MzI.w@|4o#ebax", + "type": "image", + "metadata": { + "hash": "UGPNbG=p{ft,^SoYE0Kh-;MzI.w@|4o#ebax", + "size": 722296, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 738411430, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, ground vehicle, backpack, heart, shoes, fang, bird, white hair, hair ornament, open mouth, solo, bag, long sleeves, pink eyes, smile, motor vehicle, skirt, red background, riding, jacket, socks, medium hair, short hair, bangs, sneakers, blush\n", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2bb0e7c0-e9ef-4916-bbf8-8c7405efb79c/width=450/3050048.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UQLx_ZIM={=x~BahN2k7^QxCrxWTry%0WUet", + "type": "image", + "metadata": { + "hash": "UQLx_ZIM={=x~BahN2k7^QxCrxWTry%0WUet", + "size": 587030, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 2446745452, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, yellow background, skirt, pink hair, heart, hat, bear, backpack, long sleeves, shoes, standing, blush, solo, blush stickers, jacket, short hair, limited palette, bangs, socks, black eyes, bag, full body, simple background ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9d4b1db8-bed1-4c8f-bff0-1acfcd4e3116/width=450/3050019.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UJL:WJ=x~orsTw%1Rkq^2t9aR*%2NcpHOqbu", + "type": "image", + "metadata": { + "hash": "UJL:WJ=x~orsTw%1Rkq^2t9aR*%2NcpHOqbu", + "size": 650308, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 2050382923, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, ground vehicle, backpack, motor vehicle, shorts, open mouth, hat, black shorts, shoes, bag, hair ornament, shirt, hairclip, solo, blush, white shirt, long sleeves, red eyes, sneakers, short hair, black hair, brown hair\n ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fa5b574b-b60e-49ff-876a-59db1f1946f5/width=450/3050016.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UPMsA_-p}[x]=ybHXTRjRQso-oIoyDV@E1s;", + "type": "image", + "metadata": { + "hash": "UPMsA_-p}[x]=ybHXTRjRQso-oIoyDV@E1s;", + "size": 647473, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 95510162, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, solo, shirt, heart, black hair, long sleeves, pants, short hair, short sleeves, yellow background, print shirt, red eyes, black pants, blush, hair ornament, looking at viewer, jewelry, jacket, bangs, earrings\n ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c68d1fcb-d4f1-497a-ad4e-9623e02eb7ea/width=450/3050018.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UFNR}Y^*?w~V-:NHWBn$s*Ne%MtR$LD%bcg3", + "type": "image", + "metadata": { + "hash": "UFNR}Y^*?w~V-:NHWBn$s*Ne%MtR$LD%bcg3", + "size": 600980, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 203712762, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, !, brown hair, short hair, backpack, bag, shirt, black pants, cat, pants, blush, print shirt, hair ornament, open mouth, speech bubble, fang, short sleeves\n ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/91a06c07-aa7b-41d9-b94d-894a7bbcdfd2/width=450/3050017.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UNN[z6%2~VpbYONaZ%X7,@t7E1s:-BwzbvR*", + "type": "image", + "metadata": { + "hash": "UNN[z6%2~VpbYONaZ%X7,@t7E1s:-BwzbvR*", + "size": 646396, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 787883689, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, shirt, shoes, short hair, cup, red background, black shorts, shorts, print shirt, hairclip, holding, drinking straw, hair ornament, black hair, blush, smile, red eyes, holding cup, simple background, sparkle, english text, bangs, standing, green eyes, heart, long sleeves, sneakers, disposable cup, short sleeves\n ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a24ad91f-0544-4b19-848d-2f768385c593/width=450/3050014.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UJP:;W+[~3r=#FxsxCSgvyS~OXoI?YOqGEWA", + "type": "image", + "metadata": { + "hash": "UJP:;W+[~3r=#FxsxCSgvyS~OXoI?YOqGEWA", + "size": 622109, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 1641416674, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, bird, shoes, solo, backpack, socks, open mouth, shorts, blue shorts, red footwear, yellow background, red eyes, brown hair, hair ornament, bag, long sleeves, short hair, heart, jacket, blush, bangs, riding, speech bubble\n", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/405b0067-5df3-49b0-8449-032a90d0be7e/width=450/3050015.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "UDKx3N-x?M~7_3MwE8s;~ls5IWNgxQ9k^hou", + "type": "image", + "metadata": { + "hash": "UDKx3N-x?M~7_3MwE8s;~ls5IWNgxQ9k^hou", + "size": 670308, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 3588887895, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, skateboard, shoes, shirt, shorts, long hair, blue shorts, yellow background, blue eyes, open mouth, print shirt, socks, white shirt, braid, short sleeves, backpack, sneakers, full body, standing, bow, looking at viewer, smile, solo, blush, twintails, hair bow, blush stickers, bag, black footwear, jewelry, white hair, english text, earrings\n ", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/aa91ef1d-d8fe-431e-a752-2ca448d51db9/width=450/3050047.jpeg", + "nsfw": "None", + "width": 616, + "height": 824, + "hash": "U9HeU9C3KJZP=YTEML%g8krDmSO?E_n5yBIB", + "type": "image", + "metadata": { + "hash": "U9HeU9C3KJZP=YTEML%g8krDmSO?E_n5yBIB", + "size": 518365, + "width": 616, + "height": 824 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "616x824", + "seed": 1790903932, + "Model": "[Lah]MysteriousV4", + "steps": 40, + "hashes": { + "model": "da5ddce194" + }, + "prompt": "masterpiece, best quality,\ncute doodle, 1girl, shoes, black pants, food, solo, pants, red eyes, sweater, white footwear, bottle, white hair, long sleeves, short hair, blush, full body, medium hair, cup, green background, simple background, braid, jewelry, standing, limited palette, bangs, shirt\n", + "Version": "v1.6.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "da5ddce194", + "name": "[Lah]MysteriousV4", + "type": "model" + } + ], + "Model hash": "da5ddce194", + "\"cutedoodle_XL-000012": "f55252ff3b9b\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/190859" +} \ No newline at end of file diff --git a/cutedoodle_XL-000012.preview.png b/cutedoodle_XL-000012.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..8209fbb97bf4dee42018a0497ebb8c0d7bf5205d Binary files /dev/null and b/cutedoodle_XL-000012.preview.png differ diff --git a/cutedoodle_XL-000012.safetensors b/cutedoodle_XL-000012.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..541423a005556a869f2f4294d6943a9d22fe526e --- /dev/null +++ b/cutedoodle_XL-000012.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d5964edc798ad9eed804902b1331160d7d8e8b053aca7061690422e5444d222 +size 228451772 diff --git a/dataviz_style_xl_v1.civitai.info b/dataviz_style_xl_v1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..4caea3dfa1267a5fd867131f0f27ff8ddf28d410 --- /dev/null +++ b/dataviz_style_xl_v1.civitai.info @@ -0,0 +1,462 @@ +{ + "id": 352847, + "modelId": 314536, + "name": "v1.0", + "createdAt": "2024-02-20T17:31:24.726Z", + "updatedAt": "2024-02-21T14:24:10.799Z", + "status": "Published", + "publishedAt": "2024-02-20T17:55:12.452Z", + "trainedWords": [ + "dataviz style" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1694, + "ratingCount": 134, + "rating": 5, + "thumbsUpCount": 271 + }, + "model": { + "name": "Data Visualization Style XL LoRA", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 281167, + "sizeKB": 332781.63671875, + "name": "dataviz_style_xl_v1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-20T17:40:51.771Z", + "hashes": { + "AutoV1": "9D880E05", + "AutoV2": "0144CFDDE7", + "SHA256": "0144CFDDE7C5F2AD97537647485430CB653D26A31ECA3DAB8A2D691811F71359", + "CRC32": "3779726D", + "BLAKE3": "F257C3B480C67742A47488FE5A1FD567DBDE0D3C45290EA373AA437B10E57254", + "AutoV3": "54C8DD7B2357D0BE9F9BC8C98615AD36899189E3D438E7F6E0392351C57D06B0" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/352847" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c334a1e3-49ba-4da1-9458-c7538e393b2e/width=450/6883566.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "U17nj{?vtS00W=IAM{.800-;8_.8xux]D%x]", + "type": "image", + "metadata": { + "hash": "U17nj{?vtS00W=IAM{.800-;8_.8xux]D%x]", + "size": 2535423, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 1311043935, + "Model": "DreamShaperXL_Turbo_v2_1", + "steps": 8, + "hashes": { + "model": "4496b36d48" + }, + "prompt": "dataviz style, black and white network data visualization in the shape of a cyberpunk girl, masterpiece , hyper detailed, unreal engine, blue tones, stars ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "dataviz_style_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4496b36d48", + "name": "DreamShaperXL_Turbo_v2_1", + "type": "model" + } + ], + "Model hash": "4496b36d48", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "Denoising strength": "0.45", + "\"dataviz_style_xl_v1": "54c8dd7b2357\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9b02846f-5e73-4dc3-a28d-9fee6e961c02/width=450/6883450.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "U6AAp{000Kel_4IUt7s,IoIVng%MadR+MwWY", + "type": "image", + "metadata": { + "hash": "U6AAp{000Kel_4IUt7s,IoIVng%MadR+MwWY", + "size": 3741440, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580723, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580723,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1_upscale\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"dataviz_style_xl_v1.safetensors\",1.2,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580723,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "dataviz style, a pack of butterflies rendered as a mesh grid, sparkles, blue tones, yellow tones, stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/44775d46-18f7-4001-897a-d669e45f9bef/width=450/6883462.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "U6Cr~6wJ?^%M*yaKS2IU0KNaD*RPJSNHIoxZ", + "type": "image", + "metadata": { + "hash": "U6Cr~6wJ?^%M*yaKS2IU0KNaD*RPJSNHIoxZ", + "size": 2177521, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/beffa753-a471-4fbc-b601-e7e61677f1f0/width=450/6883451.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "U25hx-8wWTRj.TH=%hoz4mMwx^tSo~IARPbb", + "type": "image", + "metadata": { + "hash": "U25hx-8wWTRj.TH=%hoz4mMwx^tSo~IARPbb", + "size": 3341412, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580711, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580711,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"dataviz_style_xl_v1.safetensors\",1.2,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580711,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "dataviz style, the silhouette of a whale swimming in space rendered as a mesh grid, sparkles, blue tones, stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dc475699-1896-43f7-af05-191fdde1ddb2/width=450/6883581.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "U8BfnLxZ0659WANHjEs+03Rj~6--NLxVt8S5", + "type": "image", + "metadata": { + "hash": "U8BfnLxZ0659WANHjEs+03Rj~6--NLxVt8S5", + "size": 2321710, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 1311043935, + "Model": "DreamShaperXL_Turbo_v2_1", + "steps": 8, + "hashes": { + "model": "4496b36d48" + }, + "prompt": "dataviz style, black and white network data visualization in the shape of Pikachu, masterpiece , hyper detailed, unreal engine ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "dataviz_style_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4496b36d48", + "name": "DreamShaperXL_Turbo_v2_1", + "type": "model" + } + ], + "Model hash": "4496b36d48", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "Denoising strength": "0.45", + "\"dataviz_style_xl_v1": "54c8dd7b2357\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9aa26e7f-f64f-4bc2-9646-44af259d79bf/width=450/6883455.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "U89%k+D%00~WH?xuS#RPIooLxuIoozjZM{R*", + "type": "image", + "metadata": { + "hash": "U89%k+D%00~WH?xuS#RPIooLxuIoozjZM{R*", + "size": 3859314, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580707, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580707,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1_upscale\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"dataviz_style_xl_v1.safetensors\",1,1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580707,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "dataviz style, the silhouette of Pennywise rendered as a mesh grid, datapoints, sparkles, horror movie, spooky, red tones, stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4c72b314-debf-4dcb-8cb9-781ce002cf65/width=450/6883487.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "U8EK_U%1#,}=?FR,NH-T58s.ENI@EMxFxZIr", + "type": "image", + "metadata": { + "hash": "U8EK_U%1#,}=?FR,NH-T58s.ENI@EMxFxZIr", + "size": 3460436, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580681, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580681,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"dataviz_style_xl_v1.safetensors\",1,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580681,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "dataviz style, lara croft slihouette rendered as a mesh grid, datapoints, sparkles, brown tones, stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cb5480e7-8004-4771-9a3c-41e23e275589/width=450/6883571.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "U16*m:=^00004nD%%M~q00W?tR%MMxoctSDi", + "type": "image", + "metadata": { + "hash": "U16*m:=^00004nD%%M~q00W?tR%MMxoctSDi", + "size": 2240217, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 1311043935, + "Model": "DreamShaperXL_Turbo_v2_1", + "steps": 8, + "hashes": { + "model": "4496b36d48" + }, + "prompt": "dataviz style, black and white network data visualization in the shape of a cat, masterpiece , hyper detailed, unreal engine, blue tones, stars ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "dataviz_style_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4496b36d48", + "name": "DreamShaperXL_Turbo_v2_1", + "type": "model" + } + ], + "Model hash": "4496b36d48", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "Denoising strength": "0.45", + "\"dataviz_style_xl_v1": "54c8dd7b2357\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b5ab401e-bc2e-46fd-b8f9-26034fba5cb9/width=450/6883493.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "U28N;9D40000%Ms:M{4T00%iRO_3M{RjRP-;", + "type": "image", + "metadata": { + "hash": "U28N;9D40000%Ms:M{4T00%iRO_3M{RjRP-;", + "size": 3784144, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580678, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580678,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"dataviz_style_xl_v1.safetensors\",1,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580678,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "dataviz style, spiderman slihouette rendered as a mesh grid, datapoints, sparkles, blue tones, stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8ca15736-53f5-4783-b552-8177380a4105/width=450/6883548.jpeg", + "nsfw": "None", + "width": 1376, + "height": 1840, + "hash": "U15YNd8wDND$tT%0x^R:VWtnIptn-:9GnM?v", + "type": "image", + "metadata": { + "hash": "U15YNd8wDND$tT%0x^R:VWtnIptn-:9GnM?v", + "size": 2966483, + "width": 1376, + "height": 1840 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580652, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"dataviz/dataviz_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":1.8,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":1.8,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":1.8,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":1.8,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.35000000000000003,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":1.8,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580652,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"dataviz_style_xl_v1.safetensors\",\"strength_model\":1.2,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":54,\"last_link_id\":118,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"dataviz/dataviz_v1_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"dataviz_style_xl_v1.safetensors\",1.2,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580652,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.35000000000000003]},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",1.8]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "dataviz style, the silhouette of prince charming rendered as a mesh grid, datapoints, sparkles, blue tones, stars", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/352847" +} \ No newline at end of file diff --git a/dataviz_style_xl_v1.preview.png b/dataviz_style_xl_v1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..cbce9f5edb8bc79a2823d3ff9a309a427de0d1e4 --- /dev/null +++ b/dataviz_style_xl_v1.preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ac1fa7470be004549dff1d20747c97aec0fc9ec5eab0b64328f5803bd1c29ec +size 2525858 diff --git a/dataviz_style_xl_v1.safetensors b/dataviz_style_xl_v1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..99ec1af84c00f903b173c8bce264031a3decbbcb --- /dev/null +++ b/dataviz_style_xl_v1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0144cfdde7c5f2ad97537647485430cb653d26a31eca3dab8a2d691811f71359 +size 340768396 diff --git a/dvr-wwyt.civitai.info b/dvr-wwyt.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..0f34b392f6077d9dbb7d8c5684531c7df7a08195 --- /dev/null +++ b/dvr-wwyt.civitai.info @@ -0,0 +1,613 @@ +{ + "id": 354307, + "modelId": 315871, + "name": "v1.0", + "createdAt": "2024-02-21T15:38:53.364Z", + "updatedAt": "2024-02-21T16:18:39.776Z", + "status": "Published", + "publishedAt": "2024-02-21T16:18:39.775Z", + "trainedWords": [ + "dvr-wwyt" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 538, + "ratingCount": 63, + "rating": 4.98, + "thumbsUpCount": 132 + }, + "model": { + "name": "[EXP] What's on your mind honey ? Nothing - by Dever", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 282503, + "sizeKB": 223099.27734375, + "name": "dvr-wwyt.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-21T15:45:46.508Z", + "hashes": { + "AutoV1": "D7CCBD93", + "AutoV2": "5D85C6F3F9", + "SHA256": "5D85C6F3F9178A6E9C7D4B9F0310550FA5D9AB240F05AC507BC872EDD5180D7D", + "CRC32": "B7B431E7", + "BLAKE3": "2E543A009B98EC7D29DDB6785D1BF89659F6C4554461F8A77B7AD2549BF9D764", + "AutoV3": "D8A2375503DCD551F9D53B19AAD6C8E121F1ABD38BD75AC2370AB6FF6D4F2B2C" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/354307" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5b1819fa-dcfe-4c7a-a646-69e0646b3e79/width=450/6927352.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UCF?LsI[.Txu0:WBnNIU01R5kXx]Q+bbNGjY", + "type": "image", + "metadata": { + "hash": "UCF?LsI[.Txu0:WBnNIU01R5kXx]Q+bbNGjY", + "size": 4623470, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 146049893, + "Model": "XL_pixelwave_08", + "steps": 30, + "hashes": { + "model": "c7d51a1ee5" + }, + "prompt": "dvr-wwyt, a woman, a rocket, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "c7d51a1ee5", + "name": "XL_pixelwave_08", + "type": "model" + } + ], + "Model hash": "c7d51a1ee5", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4ca6268a-0f44-45db-89ea-30f59cdc9156/width=450/6927318.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UZJ*;m~p_4%M?vMxWBof_4%MadM{?aIUM|of", + "type": "image", + "metadata": { + "hash": "UZJ*;m~p_4%M?vMxWBof_4%MadM{?aIUM|of", + "size": 4595082, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 1488800451, + "Model": "XL_zavychromaxl_v40", + "steps": 30, + "hashes": { + "model": "63a3752da1" + }, + "prompt": "dvr-wwyt, a man, a tank, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "63a3752da1", + "name": "XL_zavychromaxl_v40", + "type": "model" + } + ], + "Model hash": "63a3752da1", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/21768c5e-12a4-44da-b293-ae5df2656666/width=450/6927334.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UZJ[q[%L~Wt8T0WWROxa-;%go}R*%gIU%2WC", + "type": "image", + "metadata": { + "hash": "UZJ[q[%L~Wt8T0WWROxa-;%go}R*%gIU%2WC", + "size": 5072235, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 3066394612, + "Model": "XL_juggernautXL_v9Rundiffusionphoto2", + "steps": 30, + "hashes": { + "model": "c9e3e68f89" + }, + "prompt": "dvr-wwyt, a man, (brain:0.0) a water slide, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "c9e3e68f89", + "name": "XL_juggernautXL_v9Rundiffusionphoto2", + "type": "model" + } + ], + "Model hash": "c9e3e68f89", + "Hires steps": "15", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "Denoising strength": "0.3", + "Old prompt editing timelines": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e8cf93c7-87ae-44ce-a9ef-1145c09fe1ae/width=450/6927321.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "U6AnZ04T00.R?]IAVu.7k@ozDOMy00%M.7kq", + "type": "image", + "metadata": { + "hash": "U6AnZ04T00.R?]IAVu.7k@ozDOMy00%M.7kq", + "size": 5616968, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 3297685083, + "Model": "XL_juggernautXL_v9Rundiffusionphoto2", + "steps": 30, + "hashes": { + "model": "c9e3e68f89" + }, + "prompt": "dvr-wwyt, a woman, the matrix glyphs rain, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "c9e3e68f89", + "name": "XL_juggernautXL_v9Rundiffusionphoto2", + "type": "model" + } + ], + "Model hash": "c9e3e68f89", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f72a14d2-1355-490a-947f-f71c71d2a6ff/width=450/6927323.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UBC%gdpfoy~ptS4nRO-;KQ-pMw9Z4mRiV?%g", + "type": "image", + "metadata": { + "hash": "UBC%gdpfoy~ptS4nRO-;KQ-pMw9Z4mRiV?%g", + "size": 4680618, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 3815932737, + "Model": "XL_juggernautXL_v9Rundiffusionphoto2", + "steps": 30, + "hashes": { + "model": "c9e3e68f89" + }, + "prompt": "dvr-wwyt, a woman, an evil humanoid robot future AI waifu, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "c9e3e68f89", + "name": "XL_juggernautXL_v9Rundiffusionphoto2", + "type": "model" + } + ], + "Model hash": "c9e3e68f89", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8a31b675-0313-488d-b6a0-cf63e4b885c4/width=450/6927326.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "ULG9Qi00?^nMlRnNi|NaOsrrRkM|sptRahV[", + "type": "image", + "metadata": { + "hash": "ULG9Qi00?^nMlRnNi|NaOsrrRkM|sptRahV[", + "size": 5573862, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 2496448466, + "Model": "XL_juggernautXL_v9Rundiffusionphoto2", + "steps": 30, + "hashes": { + "model": "c9e3e68f89" + }, + "prompt": "dvr-wwyt, a woman, pepe the frog, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "resources": [ + { + "hash": "c9e3e68f89", + "name": "XL_juggernautXL_v9Rundiffusionphoto2", + "type": "model" + } + ], + "Model hash": "c9e3e68f89", + "Hires steps": "15", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "Denoising strength": "0.3", + "Old prompt editing timelines": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a7d09865-5991-40e2-b317-108c30a834a4/width=450/6927341.jpeg", + "nsfw": "Mature", + "width": 1664, + "height": 2432, + "hash": "UGF6wqDi_NXTAebwIUr=E3,nIBOFQlEk.8$%", + "type": "image", + "metadata": { + "hash": "UGF6wqDi_NXTAebwIUr=E3,nIBOFQlEk.8$%", + "size": 5183374, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 1203399086, + "Model": "XL_pixelwave_08", + "steps": 30, + "hashes": { + "model": "c7d51a1ee5" + }, + "prompt": "dvr-wwyt, a man, (brain:0.0) a water slide, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "c7d51a1ee5", + "name": "XL_pixelwave_08", + "type": "model" + } + ], + "Model hash": "c7d51a1ee5", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ff94058c-c740-43c8-9eb1-3f4065b36c04/width=450/6927347.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UECZ^hM_D*%M_NWCRjt7DikXMxIT4mxuMxNG", + "type": "image", + "metadata": { + "hash": "UECZ^hM_D*%M_NWCRjt7DikXMxIT4mxuMxNG", + "size": 5678339, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 1663281211, + "Model": "XL_pixelwave_08", + "steps": 30, + "hashes": { + "model": "c7d51a1ee5" + }, + "prompt": "dvr-wwyt, a woman, an evil walle, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "c7d51a1ee5", + "name": "XL_pixelwave_08", + "type": "model" + } + ], + "Model hash": "c7d51a1ee5", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e40a1892-3a79-499e-985a-e2cf934c2540/width=450/6927351.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UWF6g^xvT0tR?^%LoLW=x]I[RQxDM|t7t6Rk", + "type": "image", + "metadata": { + "hash": "UWF6g^xvT0tR?^%LoLW=x]I[RQxDM|t7t6Rk", + "size": 5197574, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 1663281211, + "Model": "XL_pixelwave_08", + "steps": 30, + "hashes": { + "model": "c7d51a1ee5" + }, + "prompt": "dvr-wwyt, a woman, a bee, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "c7d51a1ee5", + "name": "XL_pixelwave_08", + "type": "model" + } + ], + "Model hash": "c7d51a1ee5", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5bb4df36-6a8c-473b-ac69-8bf0299e3e68/width=450/6927353.jpeg", + "nsfw": "None", + "width": 1664, + "height": 2432, + "hash": "UHMH0{?G?w%Mt-V?nMxvcGo~x^D%$*RPNGtR", + "type": "image", + "metadata": { + "hash": "UHMH0{?G?w%Mt-V?nMxvcGo~x^D%$*RPNGtR", + "size": 4769121, + "width": 1664, + "height": 2432 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "832x1216", + "seed": 920106641, + "Model": "XL_copaxTimelessSDXL1_v10", + "steps": 30, + "hashes": { + "model": "78109223d2" + }, + "prompt": "dvr-wwyt, a woman, a hamster wheel, double exposure ", + "Version": "f0.0.12-latest-114-g8316773c", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "clipSkip": 2, + "Mask blur": "4", + "resources": [ + { + "hash": "78109223d2", + "name": "XL_copaxTimelessSDXL1_v10", + "type": "model" + } + ], + "Model hash": "78109223d2", + "Hires steps": "15", + "Inpaint area": "Only masked", + "Hires upscale": "2", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "nsfw, brain, gaussian noise, worst quality, lowres, oversaturated, undersaturated, overexposed, underexposed, grayscale, bw, bad photo, bad photography, bad art, blur, blurry, grainy, morbid, ugly, asymmetrical, mutated, malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, jpeg artifacts, out of focus, glitch, duplicate, pixelated, soft focus, color fringing, overprocessed, oversharpened", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "24.1.2", + "Denoising strength": "0.4", + "ADetailer mask blur": "4", + "Masked area padding": "32", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint width": "1024", + "ADetailer inpaint height": "1024", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "Old prompt editing timelines": "True", + "ADetailer inpaint only masked": "True", + "ADetailer use inpaint width height": "True" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/354307" +} \ No newline at end of file diff --git a/dvr-wwyt.preview.png b/dvr-wwyt.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..f97e6801ed8e06f1bb172fa3609bba7fcdd6dcd9 Binary files /dev/null and b/dvr-wwyt.preview.png differ diff --git a/dvr-wwyt.safetensors b/dvr-wwyt.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e9c3fc02dbd7e83f62897bd79db3cdd1f01fe8b4 --- /dev/null +++ b/dvr-wwyt.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d85c6f3f9178a6e9c7d4b9f0310550fa5d9ab240f05ac507bc872edd5180d7d +size 228453660 diff --git a/fx-monsters-xl-meatsack.civitai.info b/fx-monsters-xl-meatsack.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..de3ad02e9cc885d75ba24a27392165be7ed82e20 --- /dev/null +++ b/fx-monsters-xl-meatsack.civitai.info @@ -0,0 +1,492 @@ +{ + "id": 198931, + "modelId": 86377, + "name": "v1.0 - XL 1.0 - Meatsack", + "createdAt": "2023-10-25T20:13:55.758Z", + "updatedAt": "2023-10-25T20:26:51.230Z", + "status": "Published", + "publishedAt": "2023-10-25T20:26:51.228Z", + "trainedWords": [ + "fx-monsters-xl-meatsack", + "creature", + "monster" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

This version is dedicated to creating bloody gore humanoid creature. Play with cfg and lora strength to obtain what you want.

", + "stats": { + "downloadCount": 2138, + "ratingCount": 239, + "rating": 4.97, + "thumbsUpCount": 243 + }, + "model": { + "name": "Fx-Monsters", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 152269, + "sizeKB": 223100.83203125, + "name": "fx-monsters-xl-meatsack.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-10-25T20:15:49.804Z", + "hashes": { + "AutoV1": "FB0C476F", + "AutoV2": "269B27A246", + "SHA256": "269B27A2462D7EAAD37B384CBA6889372E8E26C3A159EF8218597DB464B7C3BE", + "CRC32": "AC28F828", + "BLAKE3": "EFAA246FF436088EF03C2738E8537A7A70B80229F8EFE2B3A10F86557E0F35E2" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/198931" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7a66348e-c7e3-4ca0-9796-14d1c6461809/width=450/3166662.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "UDE-m@I=0frr6+slE1I:E#$*D*Rj}]xuM{Io", + "type": "image", + "metadata": { + "hash": "UDE-m@I=0frr6+slE1I:E#$*D*Rj}]xuM{Io", + "size": 1632684, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3712406147, + "Model": "newrealityxl_v11", + "steps": 16, + "hashes": { + "model": "d0268d47bb" + }, + "prompt": " fx-monsters-xl-meatsack rotten monster woman", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 3M SDE", + "cfgScale": 7, + "resources": [ + { + "hash": "d0268d47bb", + "name": "newrealityxl_v11", + "type": "model" + } + ], + "Model hash": "d0268d47bb", + "Variation seed": "385307256", + "negativePrompt": "beautiful, clean", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Variation seed strength": "0.04", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c03672d7-a179-4962-a2b5-8c6409dc1f97/width=450/3166674.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "U9GP:??J^nbH0xD~NE%3E{kD5Oad9@xa$,R%", + "type": "image", + "metadata": { + "hash": "U9GP:??J^nbH0xD~NE%3E{kD5Oad9@xa$,R%", + "size": 1394036, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1950238106, + "Model": "newrealityxl_v11", + "steps": 21, + "hashes": { + "model": "d0268d47bb" + }, + "prompt": " fx-monsters-xl-meatsack severed head on tile floor, ((eyes closed))", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler", + "cfgScale": 10, + "resources": [ + { + "hash": "d0268d47bb", + "name": "newrealityxl_v11", + "type": "model" + } + ], + "Model hash": "d0268d47bb", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7a489186-0787-40e0-86bc-19050690873e/width=450/3166696.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1280, + "hash": "U9C#=09t0g}[Bon%E2NGtlM{xGNG^kaKE2of", + "type": "image", + "metadata": { + "hash": "U9C#=09t0g}[Bon%E2NGtlM{xGNG^kaKE2of", + "size": 2033786, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 3712406147, + "Model": "newrealityxl_v11", + "steps": 16, + "hashes": { + "model": "d0268d47bb" + }, + "prompt": " fx-monsters-xl-meatsack eviscerated rotten monster woman", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 3M SDE", + "cfgScale": 7, + "resources": [ + { + "hash": "d0268d47bb", + "name": "newrealityxl_v11", + "type": "model" + } + ], + "Model hash": "d0268d47bb", + "Variation seed": "2368273361", + "negativePrompt": "beautiful, clean", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Variation seed strength": "0.04", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a06c0ede-0347-479f-9771-e7d22e22f615/width=450/3166698.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "UCDI2v~B1y9[H?WB9tIpyENHR%smIAofxutR", + "type": "image", + "metadata": { + "hash": "UCDI2v~B1y9[H?WB9tIpyENHR%smIAofxutR", + "size": 2055349, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 459759312, + "Model": "newrealityxl_v11", + "steps": 21, + "hashes": { + "model": "d0268d47bb" + }, + "prompt": " fx-monsters-xl-meatsack rotten monster cake", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler", + "cfgScale": 7, + "resources": [ + { + "hash": "d0268d47bb", + "name": "newrealityxl_v11", + "type": "model" + } + ], + "Model hash": "d0268d47bb", + "Variation seed": "1906930389", + "negativePrompt": "beautiful, clean, teeth", + "Style Selector Style": "base", + "Style Selector Enabled": "True", + "Variation seed strength": "0.04", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/37d18173-be29-4bdc-a5ac-78e468eca744/width=450/3166722.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCJ$K}I8H[0{_io}IAtm%$NG55sW~VOEE0ja", + "type": "image", + "metadata": { + "hash": "UCJ$K}I8H[0{_io}IAtm%$NG55sW~VOEE0ja", + "size": 1468968, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sd_xl_base_1.0.vae.safetensors", + "Size": "1024x1024", + "seed": 1438508367, + "Model": "sd_xl_base_1.0", + "steps": 21, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": " fx-monsters-xl-meatsack tiger monster", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "DPM++ 3M SDE", + "VAE hash": "495c9925a5", + "cfgScale": 10, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/33a2b3a8-3dc2-4d6d-9323-2bd53c911406/width=450/3166744.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7By5dIU5l^lL4#S9Zx^0xMw#6OUxtE0$eI.", + "type": "image", + "metadata": { + "hash": "U7By5dIU5l^lL4#S9Zx^0xMw#6OUxtE0$eI.", + "size": 1292957, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sd_xl_base_1.0.vae.safetensors", + "Size": "1024x1024", + "seed": 2327110087, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinematic photo fx-monsters-xl-meatsack nude woman . 35mm photograph, film, bokeh, professional, 4k, highly detailed", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler a", + "VAE hash": "495c9925a5", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly", + "Style Selector Style": "Photographic", + "Style Selector Enabled": "True", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/230ec806-26c6-4b01-b094-706e095b2417/width=450/3166746.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "U6Cri~0#l9rW.856T0?b0eoxS2~D^+t7=}K5", + "type": "image", + "metadata": { + "hash": "U6Cri~0#l9rW.856T0?b0eoxS2~D^+t7=}K5", + "size": 1326303, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sd_xl_base_1.0.vae.safetensors", + "Size": "1024x1024", + "seed": 672541463, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "cinematic photo fx-monsters-xl-meatsack nude woman . 35mm photograph, film, bokeh, professional, 4k, highly detailed", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler a", + "VAE hash": "495c9925a5", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "drawing, painting, crayon, sketch, graphite, impressionist, noisy, blurry, soft, deformed, ugly", + "Style Selector Style": "Photographic", + "Style Selector Enabled": "True", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/214906bd-0e4c-46d9-b04d-58d47e1f9f71/width=450/3166749.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "U58z$0r:UH}p^,ROEMs9RPR*D*xuV=%Mw[tS", + "type": "image", + "metadata": { + "hash": "U58z$0r:UH}p^,ROEMs9RPR*D*xuV=%Mw[tS", + "size": 1623097, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sd_xl_base_1.0.vae.safetensors", + "Size": "1024x1024", + "seed": 2426685753, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Horror-themed fx-monsters-xl-meatsack brad pitt . Eerie, unsettling, dark, spooky, suspenseful, grim, highly detailed", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler a", + "VAE hash": "495c9925a5", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "cheerful, bright, vibrant, light-hearted, cute", + "Style Selector Style": "Horror", + "Style Selector Enabled": "True", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/46647bce-c99d-4e8d-9ebe-9f7b04e982a7/width=450/3166754.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "U49?wc0#00}T},9ZT2?c1JxbMcMw00%1~DS1", + "type": "image", + "metadata": { + "hash": "U49?wc0#00}T},9ZT2?c1JxbMcMw00%1~DS1", + "size": 1664545, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sd_xl_base_1.0.vae.safetensors", + "Size": "1024x1024", + "seed": 315972852, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Horror-themed fx-monsters-xl-meatsack donald trump . Eerie, unsettling, dark, spooky, suspenseful, grim, highly detailed", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler a", + "VAE hash": "495c9925a5", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "cheerful, bright, vibrant, light-hearted, cute", + "Style Selector Style": "Horror", + "Style Selector Enabled": "True", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e482bb52-3ef7-46cd-afd3-9de11e96b480/width=450/3166761.jpeg", + "nsfw": "Mature", + "width": 1024, + "height": 1024, + "hash": "UFAc;}9ZAJ~W~VIA9Z-o%1RjD%%M%Mo#NJtR", + "type": "image", + "metadata": { + "hash": "UFAc;}9ZAJ~W~VIA9Z-o%1RjD%%M%Mo#NJtR", + "size": 1580756, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sd_xl_base_1.0.vae.safetensors", + "Size": "1024x1024", + "seed": 709318883, + "Model": "sd_xl_base_1.0", + "steps": 30, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "Horror-themed fx-monsters-xl-meatsack disfigured Joe Biden . Eerie, unsettling, dark, spooky, suspenseful, grim, highly detailed", + "Version": "v1.6.0-209-g7d60076b", + "sampler": "Euler a", + "VAE hash": "495c9925a5", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "cheerful, bright, vibrant, light-hearted, cute", + "Style Selector Style": "Horror", + "Style Selector Enabled": "True", + "\"fx-monsters-xl-meatsack": "5f4c7b1de2c8\"", + "Style Selector Randomize": "False" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/198931" +} \ No newline at end of file diff --git a/fx-monsters-xl-meatsack.preview.png b/fx-monsters-xl-meatsack.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..81fc5694aef8b7eb0b4666c049801ecf177a2ada Binary files /dev/null and b/fx-monsters-xl-meatsack.preview.png differ diff --git a/fx-monsters-xl-meatsack.safetensors b/fx-monsters-xl-meatsack.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a66c7d7d282da25b808c0deeb36b077e26ed60c0 --- /dev/null +++ b/fx-monsters-xl-meatsack.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:269b27a2462d7eaad37b384cba6889372e8e26c3a159ef8218597db464b7c3be +size 228455252 diff --git a/game_icon_v1.0.civitai.info b/game_icon_v1.0.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..40f1b0fe2d38d67343ebc2ff76049a6feec3829a --- /dev/null +++ b/game_icon_v1.0.civitai.info @@ -0,0 +1,384 @@ +{ + "id": 156373, + "modelId": 141066, + "name": "v1.0", + "createdAt": "2023-09-06T13:41:42.808Z", + "updatedAt": "2023-09-07T11:32:03.557Z", + "status": "Published", + "publishedAt": "2023-09-07T11:32:03.553Z", + "trainedWords": [ + "icon" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 3740, + "ratingCount": 405, + "rating": 5, + "thumbsUpCount": 691 + }, + "model": { + "name": "Game icon", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 117206, + "sizeKB": 2724525.4921875, + "name": "game_icon_v1.0.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-09-06T14:36:56.420Z", + "hashes": { + "AutoV1": "E6A0A28A", + "AutoV2": "9E39A540E9", + "SHA256": "9E39A540E97173442ABBB319437080E4DC5A48087F43ED94C53BAFEF75E2E903", + "CRC32": "2EB7036B", + "BLAKE3": "6F54B489A6166EDD9FC28AE1E1DF8E3AFF2946C9496DE7D70C30E2C18223CB67" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/156373" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/55660fc0-0001-48d7-8e1a-477e1e17f365/width=450/2380349.jpeg", + "nsfw": "None", + "width": 4096, + "height": 4096, + "hash": "U7RVti_M.7^,]fVXx[r?-;tRR4I9yZ-o9vM_", + "type": "image", + "metadata": { + "hash": "U7RVti_M.7^,]fVXx[r?-;tRR4I9yZ-o9vM_", + "size": 5095027, + "width": 4096, + "height": 4096 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d02952d7-6315-40ab-8506-0a2fa9dfcb5c/width=450/2391730.jpeg", + "nsfw": "None", + "width": 4096, + "height": 4096, + "hash": "U4RMG5t*9GixITv+Em+t57pIR4Ab~WNFIVu4", + "type": "image", + "metadata": { + "hash": "U4RMG5t*9GixITv+Em+t57pIR4Ab~WNFIVu4", + "size": 5109594, + "width": 4096, + "height": 4096 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/db9b9da4-295a-4dd5-b5f6-3ca6c002076e/width=450/2392189.jpeg", + "nsfw": "None", + "width": 4096, + "height": 4096, + "hash": "U5Ro,0~R-,.8%~-hrntSv{s6.7x@^kt-Fz#S", + "type": "image", + "metadata": { + "hash": "U5Ro,0~R-,.8%~-hrntSv{s6.7x@^kt-Fz#S", + "size": 5253639, + "width": 4096, + "height": 4096 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c01218cb-4f86-4548-b811-cc68fe932dba/width=450/2379793.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UrRM3ut7.9ozxuaya}fk?^WWMdoLV@j[j[ay", + "type": "image", + "metadata": { + "hash": "UrRM3ut7.9ozxuaya}fk?^WWMdoLV@j[j[ay", + "size": 685942, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. a red and white cape with a red bow. ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6dc46426-ed63-43af-8596-d0f84da17252/width=450/2379797.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UdR.=Lxt.Ta%yDWCodt7kCafWAoenhoeWBWB", + "type": "image", + "metadata": { + "hash": "UdR.=Lxt.Ta%yDWCodt7kCafWAoenhoeWBWB", + "size": 655890, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. a dress with ruffles . ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/99691bc5-6edc-49e6-addc-cf4a1e76b2a5/width=450/2379796.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UtR{Sws,yEogt6WAWBoz*0flR4ofkDozozV@", + "type": "image", + "metadata": { + "hash": "UtR{Sws,yEogt6WAWBoz*0flR4ofkDozozV@", + "size": 620499, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. two pieces of soap sitting next to each other. ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/67ef63e9-a80c-455b-afa7-98e59a29f397/width=450/2379795.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U$RL@Ns:%#kCt7j[a|ay?^W;MdoKofa|f6oL", + "type": "image", + "metadata": { + "hash": "U$RL@Ns:%#kCt7j[a|ay?^W;MdoKofa|f6oL", + "size": 651628, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. a brown leather purse with fringes on it. ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cdef3274-5f8d-466d-9cf4-9810d4b6ad0c/width=450/2379798.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UcRyW%x].TV@t6j]WCax.9V@MwtRt8aet7kC", + "type": "image", + "metadata": { + "hash": "UcRyW%x].TV@t6j]WCax.9V@MwtRt8aet7kC", + "size": 678173, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. a close up of a tooth . ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8eb803c7-6047-4719-a60c-3f39f9fbecdb/width=450/2379799.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UmSYaIt7yYs:ozj[jsay*0j@Q-bIxaaeW=of", + "type": "image", + "metadata": { + "hash": "UmSYaIt7yYs:ozj[jsay*0j@Q-bIxaaeW=of", + "size": 627116, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. a piece of orange crystal . ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3f7c73e3-33f0-42dd-8c21-8af2e4c632b3/width=450/2379801.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UPOy;Ln#?wn~_2RkRjozx^%MRPogj[Rjofof", + "type": "image", + "metadata": { + "hash": "UPOy;Ln#?wn~_2RkRjozx^%MRPogj[Rjofof", + "size": 1063015, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1, + "Model": "sd_xl_base_1.0", + "steps": 25, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "2d icon. a newspaper with a picture of a man and a woman. ", + "Version": "v1.5.1-1-g56236dfd", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "blurry, lowres", + "\"SDXL_icon_V2-000034": "9ef9e9a33a34\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/156373" +} \ No newline at end of file diff --git a/game_icon_v1.0.preview.png b/game_icon_v1.0.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..7fc058fea5c5f83fb1fe09182323b48650a64454 --- /dev/null +++ b/game_icon_v1.0.preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9953291a18d4ccffb7f012f049335e8c19e7a9060457b34b0753ab411e2eae4b +size 4838054 diff --git a/game_icon_v1.0.safetensors b/game_icon_v1.0.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1ee4da65f295c0bba377237ff946f46b5f621f0b --- /dev/null +++ b/game_icon_v1.0.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e39a540e97173442abbb319437080e4dc5a48087f43ed94c53bafef75e2e903 +size 2789914104 diff --git a/glowneon_xl_v1.civitai.info b/glowneon_xl_v1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..a009ddd8ff8404a8a3b8167c5672b96fed9b2fef --- /dev/null +++ b/glowneon_xl_v1.civitai.info @@ -0,0 +1,515 @@ +{ + "id": 348189, + "modelId": 310235, + "name": "v1.0", + "createdAt": "2024-02-17T20:57:35.336Z", + "updatedAt": "2024-02-17T21:06:33.545Z", + "status": "Published", + "publishedAt": "2024-02-17T21:06:33.544Z", + "trainedWords": [ + "glowneon", + "glowing", + "sparks", + "lightning" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 4132, + "ratingCount": 232, + "rating": 4.99, + "thumbsUpCount": 490 + }, + "model": { + "name": "GlowNeon XL LoRA", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 277142, + "sizeKB": 332781.63671875, + "name": "glowneon_xl_v1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-17T21:01:46.597Z", + "hashes": { + "AutoV1": "51078C80", + "AutoV2": "4E114AEEFF", + "SHA256": "4E114AEEFFADE86458CF8D24D752E32568C697E62A221072964422F1AD07A290", + "CRC32": "D3AE59AB", + "BLAKE3": "80DD38BD469C880842218D66EDBF3D3EEF225EE3AD50F4C699495F4707BD3BEC", + "AutoV3": "7F46648B8EECF21F4098150923C316F9949E328B256C9B9977211846D6B704F5" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/348189" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7e67e5a2-b60f-4284-8531-d8248217c136/width=450/6762990.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "U67nk6rW0Jo~1nRP-4tQ$js:S5XA:hkCJCtS", + "type": "image", + "metadata": { + "hash": "U67nk6rW0Jo~1nRP-4tQ$js:S5XA:hkCJCtS", + "size": 3559122, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580630, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580630,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":52,\"last_link_id\":117,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"glowneon_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow \"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580630,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "glowneon, a glowing whale swimming into space ocean with sparks, blue, white, yellow ", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0f5099b3-c026-46a4-9c7b-19f12c261dbd/width=450/6763023.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "UA8NR}+d.Axd7dW-S$XmMvJ}ITOSv#wN#rZ~", + "type": "image", + "metadata": { + "hash": "UA8NR}+d.Axd7dW-S$XmMvJ}ITOSv#wN#rZ~", + "size": 2667318, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "768x1024", + "seed": 2329184249, + "Model": "DreamShaperXL_Turbo_v2", + "steps": 8, + "hashes": { + "model": "4726d3bab1" + }, + "prompt": "glowneon, cinematic film still, woman with glowing eyes, sparks, black cape, magenta and cyan ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "glowneon_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4726d3bab1", + "name": "DreamShaperXL_Turbo_v2", + "type": "model" + } + ], + "Model hash": "4726d3bab1", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "\"glowneon_xl_v1": "7f46648b8eec\"", + "Denoising strength": "0.45" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/460f886b-ea74-4646-8e95-ac5493c19978/width=450/6763032.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1152, + "hash": "U8BCY]5X0%$yOtOGIowH0g%1^KE25S%0}=It", + "type": "image", + "metadata": { + "hash": "U8BCY]5X0%$yOtOGIowH0g%1^KE25S%0}=It", + "size": 2547326, + "width": 1536, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "1024x768", + "seed": 4045062253, + "Model": "DreamShaperXL_Turbo_v2", + "steps": 8, + "hashes": { + "model": "4726d3bab1" + }, + "prompt": "glowneon, cinematic film still, glowing potato fries emitting sparks and lightning, dark yellow-orange ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "glowneon_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4726d3bab1", + "name": "DreamShaperXL_Turbo_v2", + "type": "model" + } + ], + "Model hash": "4726d3bab1", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "\"glowneon_xl_v1": "7f46648b8eec\"", + "Denoising strength": "0.45" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/74e893a7-920e-49ae-9247-8004488a4a84/width=450/6763026.jpeg", + "nsfw": "None", + "width": 1432, + "height": 1072, + "hash": "U68|l8?d0JoVTLrgH?5fvTx1P9I%@_%3uNo@", + "type": "image", + "metadata": { + "hash": "U68|l8?d0JoVTLrgH?5fvTx1P9I%@_%3uNo@", + "size": 1971600, + "width": 1432, + "height": 1072 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "1024x768", + "seed": 2329184250, + "Model": "DreamShaperXL_Turbo_v2", + "steps": 8, + "hashes": { + "model": "4726d3bab1", + "lora:add-detail-xl": "0d9bd1b873" + }, + "prompt": "glowneon, starbucks coffee mug emitting sparks, dark amethyst and cyan ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "glowneon_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4726d3bab1", + "name": "DreamShaperXL_Turbo_v2", + "type": "model" + } + ], + "Model hash": "4726d3bab1", + "Hires steps": "5", + "Hires upscale": "1.4", + "add-detail-xl": "9c783c8ce46c\"", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "\"glowneon_xl_v1": "7f46648b8eec", + "Denoising strength": "0.45" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/87b8c482-2804-44f7-953b-54a7a461b243/width=450/6763030.jpeg", + "nsfw": "None", + "width": 1432, + "height": 1072, + "hash": "UA4|Q0V[YRx^Mco}nNX8L}Z~X9RjysaLpJn$", + "type": "image", + "metadata": { + "hash": "UA4|Q0V[YRx^Mco}nNX8L}Z~X9RjysaLpJn$", + "size": 2304913, + "width": 1432, + "height": 1072 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "1024x768", + "seed": 2329184249, + "Model": "DreamShaperXL_Turbo_v2", + "steps": 8, + "hashes": { + "model": "4726d3bab1", + "lora:add-detail-xl": "0d9bd1b873" + }, + "prompt": "glowneon, Son Goku emitting sparks and electricity, dark cyan and white, glowing eyes, cinematic film still ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "glowneon_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4726d3bab1", + "name": "DreamShaperXL_Turbo_v2", + "type": "model" + } + ], + "Model hash": "4726d3bab1", + "Hires steps": "5", + "Hires upscale": "1.4", + "add-detail-xl": "9c783c8ce46c\"", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "\"glowneon_xl_v1": "7f46648b8eec", + "Denoising strength": "0.45" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5f769919-e7bb-429b-aaf4-216153e49d9d/width=450/6762998.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "U7C60h~6065D?Y-TMzNI0BD.~7^f9KIr%J%0", + "type": "image", + "metadata": { + "hash": "U7C60h~6065D?Y-TMzNI0BD.~7^f9KIr%J%0", + "size": 3461926, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580639, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580639,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":52,\"last_link_id\":117,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580639,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"glowneon_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1_upscale\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "glowneon, a glowing pikachu emanating light, sparks, yellow, white, red, fluffy, hyperealistic, realistic, fur", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/69917f4f-2f8f-45ee-84d8-4462eca1316b/width=450/6762994.jpeg", + "nsfw": "None", + "width": 1536, + "height": 2048, + "hash": "U68qse=rH;u6s7?Ew@9ami$|R=IU~U=;n3o$", + "type": "image", + "metadata": { + "hash": "U68qse=rH;u6s7?Ew@9ami$|R=IU~U=;n3o$", + "size": 3668005, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580628, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580628,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a robot girl, blue, yellow\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":52,\"last_link_id\":117,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"glowneon_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1_upscale\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"glowneon, a robot girl, blue, yellow\"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580628,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "glowneon, a robot girl, blue, yellow", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/612a65fd-4562-4425-b5c2-48bca6a2473e/width=450/6762996.jpeg", + "nsfw": "X", + "width": 1536, + "height": 2048, + "hash": "U59@9pX93Y*0MziGZge84Ts:R3.90xpJ^,IU", + "type": "image", + "metadata": { + "hash": "U59@9pX93Y*0MziGZge84Ts:R3.90xpJ^,IU", + "size": 3523962, + "width": 1536, + "height": 2048 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580629, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580629,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":768,\"height\":1024,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":52,\"last_link_id\":117,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"glowneon_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1_upscale\"]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"glowneon, vampirella with glowing eyes, sparks, black cape, red, white \"]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580629,\"fixed\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[768,1024,1]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 768, + "height": 1024, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "glowneon, vampirella with glowing eyes, sparks, black cape, red, white ", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1f176b51-c504-4d8f-ad18-6b5aab4b90eb/width=450/6763007.jpeg", + "nsfw": "None", + "width": 2048, + "height": 1536, + "hash": "UAAl@Nxs0KwMKQInn2%N0mwI}SFyK7xZsRI@", + "type": "image", + "metadata": { + "hash": "UAAl@Nxs0KwMKQInn2%N0mwI}SFyK7xZsRI@", + "size": 3262409, + "width": 2048, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 561187032580641, + "vaes": [], + "Model": "dreamshaperXL_v2TurboDpmppSDE", + "comfy": "{\"prompt\":{\"3\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"4\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"5\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"},\"6\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"7\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"8\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"9\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"14\":{\"inputs\":{\"filename_prefix\":\"glowneon/glowneon_v1_upscale\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"SaveImage\"},\"39\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"40\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"41\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"},\"42\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":203236410398179,\"steps\":5,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":0.45,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"pixels\":{\"inputs\":{\"upscale_method\":\"bicubic\",\"scale_by\":2,\"image\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":561187032580641,\"steps\":8,\"cfg\":2,\"sampler_name\":\"dpmpp_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"},\"positive\":{\"inputs\":{\"text\":\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"negative\":{\"inputs\":{\"text\":\"low quality, worst quality\",\"clip\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"class_type\":\"CLIPTextEncode\"},\"latent_image\":{\"inputs\":{\"width\":1024,\"height\":768,\"batch_size\":1},\"class_type\":\"EmptyLatentImage\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"ImageScaleBy\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEEncode\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"VAEDecode\"},\"50\":{\"inputs\":{\"lora_name\":\"glowneon_xl_v1.safetensors\",\"strength_model\":1,\"strength_clip\":1,\"model\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"},\"class_type\":\"CheckpointLoaderSimple\"}},\"class_type\":\"Load Lora\"}},\"workflow\":{\"last_node_id\":52,\"last_link_id\":117,\"nodes\":[{\"id\":8,\"type\":\"VAEDecode\",\"pos\":[943,87],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":6,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":7},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":8}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[9,88],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":41,\"type\":\"VAEEncode\",\"pos\":[1586,-7],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"pixels\",\"type\":\"IMAGE\",\"link\":89},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":90}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[91],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEEncode\"}},{\"id\":42,\"type\":\"VAEDecode\",\"pos\":[1838,108],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":11,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":92},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":93}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[94],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"}},{\"id\":40,\"type\":\"ImageScaleBy\",\"pos\":[1207,-34],\"size\":{\"0\":315,\"1\":82},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":88}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[89],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"ImageScaleBy\"},\"widgets_values\":[\"bicubic\",2]},{\"id\":39,\"type\":\"KSampler\",\"pos\":[1473,136],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":116},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":86},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":87},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":91}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[92],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[203236410398179,\"fixed\",5,2,\"dpmpp_sde\",\"karras\",0.45]},{\"id\":4,\"type\":\"CheckpointLoaderSimple\",\"pos\":[65,476],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[111],\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[112],\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[8,90,93],\"slot_index\":2}],\"properties\":{\"Node name for S&R\":\"CheckpointLoaderSimple\"},\"widgets_values\":[\"dreamshaperXL_v2TurboDpmppSDE.safetensors\"]},{\"id\":7,\"type\":\"CLIPTextEncode\",\"pos\":[413,389],\"size\":{\"0\":425.27801513671875,\"1\":180.6060791015625},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":114}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[6,87],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"low quality, worst quality\"]},{\"id\":50,\"type\":\"Load Lora\",\"pos\":[40,198],\"size\":{\"0\":315,\"1\":146},\"flags\":{},\"order\":2,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":111},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":112}],\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[115,116],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[113,114],\"shape\":3,\"slot_index\":1},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":3}],\"properties\":{\"Node name for S&R\":\"Load Lora\"},\"widgets_values\":[\"glowneon_xl_v1.safetensors\",1,1]},{\"id\":9,\"type\":\"SaveImage\",\"pos\":[1244,209],\"size\":{\"0\":210,\"1\":270},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":9}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1\"]},{\"id\":14,\"type\":\"SaveImage\",\"pos\":[1823,216],\"size\":{\"0\":213.27566528320312,\"1\":270},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":94}],\"properties\":{},\"widgets_values\":[\"glowneon/glowneon_v1_upscale\"]},{\"id\":5,\"type\":\"EmptyLatentImage\",\"pos\":[473,609],\"size\":{\"0\":315,\"1\":106},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[2],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"EmptyLatentImage\"},\"widgets_values\":[1024,768,1]},{\"id\":3,\"type\":\"KSampler\",\"pos\":[891,192],\"size\":{\"0\":315,\"1\":474},\"flags\":{},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":115},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":83},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":6},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":2}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[7],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[561187032580641,\"increment\",8,2,\"dpmpp_sde\",\"karras\",1]},{\"id\":6,\"type\":\"CLIPTextEncode\",\"pos\":[415,186],\"size\":{\"0\":422.84503173828125,\"1\":164.31304931640625},\"flags\":{},\"order\":3,\"mode\":0,\"inputs\":[{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":113}],\"outputs\":[{\"name\":\"CONDITIONING\",\"type\":\"CONDITIONING\",\"links\":[83,86],\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"CLIPTextEncode\"},\"widgets_values\":[\"glowneon, a glowing goldfish emanating light and sparks, orange, blue\"]}],\"links\":[[2,5,0,3,3,\"LATENT\"],[6,7,0,3,2,\"CONDITIONING\"],[7,3,0,8,0,\"LATENT\"],[8,4,2,8,1,\"VAE\"],[9,8,0,9,0,\"IMAGE\"],[83,6,0,3,1,\"CONDITIONING\"],[86,6,0,39,1,\"CONDITIONING\"],[87,7,0,39,2,\"CONDITIONING\"],[88,8,0,40,0,\"IMAGE\"],[89,40,0,41,0,\"IMAGE\"],[90,4,2,41,1,\"VAE\"],[91,41,0,39,3,\"LATENT\"],[92,39,0,42,0,\"LATENT\"],[93,4,2,42,1,\"VAE\"],[94,42,0,14,0,\"IMAGE\"],[111,4,0,50,0,\"MODEL\"],[112,4,1,50,1,\"CLIP\"],[113,50,1,6,0,\"CLIP\"],[114,50,1,7,0,\"CLIP\"],[115,50,0,3,0,\"MODEL\"],[116,50,0,39,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4,\"widget_idx_map\":{\"3\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5},\"39\":{\"seed\":0,\"sampler_name\":4,\"scheduler\":5}}}}", + "steps": 8, + "width": 1024, + "height": 768, + "models": [ + "dreamshaperXL_v2TurboDpmppSDE.safetensors" + ], + "prompt": "glowneon, a glowing goldfish emanating light and sparks, orange, blue", + "denoise": 1, + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "modelIds": [], + "scheduler": "karras", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "low quality, worst quality", + "additionalResources": [] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9d358450-624b-498f-b0f1-1f010ae800f8/width=450/6763079.jpeg", + "nsfw": "None", + "width": 1432, + "height": 1072, + "hash": "U49$%to#K1WAFyIp0#X70hxGENS#~B-n=wrr", + "type": "image", + "metadata": { + "hash": "U49$%to#K1WAFyIp0#X70hxGENS#~B-n=wrr", + "size": 2360623, + "width": 1432, + "height": 1072 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "RNG": "CPU", + "Size": "1024x768", + "seed": 2329184250, + "Model": "DreamShaperXL_Turbo_v2", + "steps": 8, + "hashes": { + "model": "4726d3bab1", + "lora:add-detail-xl": "0d9bd1b873" + }, + "prompt": "glowneon, glowing samurai emitting sparks and electricity, dark red and orange, glowing eyes, cinematic film still ", + "Version": "v1.6.1", + "sampler": "DPM++ SDE Karras", + "cfgScale": 2, + "clipSkip": 2, + "resources": [ + { + "name": "glowneon_xl_v1", + "type": "lora", + "weight": 1 + }, + { + "hash": "4726d3bab1", + "name": "DreamShaperXL_Turbo_v2", + "type": "model" + } + ], + "Model hash": "4726d3bab1", + "Hires steps": "5", + "Hires upscale": "1.4", + "add-detail-xl": "9c783c8ce46c\"", + "Hires upscaler": "8x_NMKD-Superscale_150000_G", + "negativePrompt": "(low quality, worst quality:1.4), cgi, text, signature, watermark, extra limbs", + "\"glowneon_xl_v1": "7f46648b8eec", + "Denoising strength": "0.45" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/348189" +} \ No newline at end of file diff --git a/glowneon_xl_v1.preview.png b/glowneon_xl_v1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..164fb3b2f4461816144318d8705e0c97dfc0d5e3 Binary files /dev/null and b/glowneon_xl_v1.preview.png differ diff --git a/glowneon_xl_v1.safetensors b/glowneon_xl_v1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..da40f7091698ea526800cd02f7a1b9a891f33c8d --- /dev/null +++ b/glowneon_xl_v1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e114aeeffade86458cf8d24d752e32568c697e62a221072964422f1ad07a290 +size 340768396 diff --git a/lora-sdxl-perfect-eyes.civitai.info b/lora-sdxl-perfect-eyes.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..daa6ab3dd075a7726b9f1977f43bfac8c3bcbbc7 --- /dev/null +++ b/lora-sdxl-perfect-eyes.civitai.info @@ -0,0 +1,16686 @@ +{ + "id": 129711, + "modelId": 119399, + "name": "v1.0", + "createdAt": "2023-07-30T20:22:30.390Z", + "updatedAt": "2023-07-30T20:27:40.367Z", + "status": "Published", + "publishedAt": "2023-07-30T20:27:13.350Z", + "trainedWords": [ + "perfecteyes" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 5957, + "ratingCount": 404, + "rating": 4.99, + "thumbsUpCount": 597 + }, + "model": { + "name": "Concept: Perfect Eyes", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 94037, + "sizeKB": 40235.38671875, + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-07-30T20:30:40.490Z", + "hashes": { + "AutoV1": "16D4E1E4", + "AutoV2": "1E1DDAAEBC", + "SHA256": "1E1DDAAEBC92863A778EF96FE07C0D64257D9A03DDC4719D85A80716B6933A45", + "CRC32": "46251D79", + "BLAKE3": "0BF6D0022A67B81C408EB59D14F086C61EB0ECEA38F5F353D19153BB3B6C0F9F" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/129711" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f398add8-e76c-4eba-b9ac-859f65b4e920/width=450/1797816.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UXKTb=~WxaxaIUn$R*s:~Vo#NHoetRxuxaWX", + "type": "image", + "metadata": { + "hash": "UXKTb=~WxaxaIUn$R*s:~Vo#NHoetRxuxaWX", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 643091462646845, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 14, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 643091462646845, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 15, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b55be2aa-6e3a-4ebb-b2ad-a9e83ba454f3/width=450/1797809.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UQKmthD*4.%L%gxaE1WBofa|NGWV~Vads.t7", + "type": "image", + "metadata": { + "hash": "UQKmthD*4.%L%gxaE1WBofa|NGWV~Vads.t7", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "perfecteyes" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 159657161927510, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "perfecteyes" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 13, + "pos": [ + 117.74066078186034, + 135.179916229248 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 1, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "perfecteyes" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 5, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 8, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 10, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 11, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "mndlr style, mandalorian in the desert" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark, bad art, realistic" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 13, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "mndlr style, mandalorian in the desert" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark, bad art, realistic" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 14, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 10, + "pos": [ + 1219, + 194 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 159657161927510, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 19, + "pos": [ + 1267, + -470 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 15, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "perfecteyes", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f2aadc6d-c134-4d71-82b2-ed60e3440456/width=450/1797808.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "USHw.D0L9a%L%2xaNHM|adofs:NH~BIpjYof", + "type": "image", + "metadata": { + "hash": "USHw.D0L9a%L%2xaNHM|adofs:NH~BIpjYof", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "professional photo of perfecteyes eyes, , sharp, absurdres, studio lighting, intricate, epic, 8k, photorealism, highly detailed, bokeh, hasselblad" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 1001981830172707, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "professional photo of perfecteyes eyes, , sharp, absurdres, studio lighting, intricate, epic, 8k, photorealism, highly detailed, bokeh, hasselblad" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 5, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 8, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 10, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 11, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "perfecteyes (red eyes:1.45)" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "blue eyes" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 13, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "perfecteyes (red eyes:1.45)" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "blue eyes" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 14, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 15, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 1001981830172707, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 1, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "professional photo of perfecteyes eyes, , sharp, absurdres, studio lighting, intricate, epic, 8k, photorealism, highly detailed, bokeh, hasselblad" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "professional photo of perfecteyes eyes, , sharp, absurdres, studio lighting, intricate, epic, 8k, photorealism, highly detailed, bokeh, hasselblad", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/312c7950-789e-4b7d-8acc-f1d7589d5f35/width=450/1797822.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UNI|:q01D%o}9aNG?GIo-SoLI?NG~VM{ofof", + "type": "image", + "metadata": { + "hash": "UNI|:q01D%o}9aNG?GIo-SoLI?NG~VM{ofof", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, healthly" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 275890030483060, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, healthly" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes-resized.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes-resized.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, healthly" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, healthly" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 275890030483060, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 14, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, healthly" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 15, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes-resized.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes-resized.safetensors", + 1, + 1 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 1024, + "height": 1024, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, healthly", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes-resized.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes-resized.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9838f152-5ee9-4efb-8c58-cd612a41bf25/width=450/1797806.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U7Fhhk00Mckq00^+%1Ip[9xv%#D%_N0KxCs,", + "type": "image", + "metadata": { + "hash": "U7Fhhk00Mckq00^+%1Ip[9xv%#D%_N0KxCs,", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 80360198158917, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 14, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 80360198158917, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 15, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0e028364-0b11-424f-abb9-5d07806a1030/width=450/1797812.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UGGkn3?G$yad00jaRjWC}?WVx]RktSNGaKt7", + "type": "image", + "metadata": { + "hash": "UGGkn3?G$yad00jaRjWC}?WVx]RktSNGaKt7", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 461664244327054, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 14, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 461664244327054, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 15, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d1baa28e-e1de-4b78-9804-023dfc115369/width=450/1797811.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "ULGa:.Ri$LWB0LogWDR*~ANGtRWBXTIos8of", + "type": "image", + "metadata": { + "hash": "ULGa:.Ri$LWB0LogWDR*~ANGtRWBXTIos8of", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 531382339328839, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 14, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 531382339328839, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 15, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dd4153ff-5b7e-4af7-a3ee-f1384579235c/width=450/1797810.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UJJ%,wD*01?atR-p9ZM|aebHNGM|~Vn$smxa", + "type": "image", + "metadata": { + "hash": "UJJ%,wD*01?atR-p9ZM|aebHNGM|~Vn$smxa", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "perfecteyes" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 648449202179327, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "perfecteyes" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 13, + "pos": [ + 117.74066078186034, + 135.179916229248 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 1, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "perfecteyes" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 4, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 5, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 8, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 10, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 11, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "perfecteyes" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 13, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "perfecteyes" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 14, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 10, + "pos": [ + 1219, + 194 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 648449202179327, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 15, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "perfecteyes", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7601ffa9-6bc9-4683-8e16-842cd1e6dcc1/width=450/1797813.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UBGH^~9G,n-;00t7o2V@}=%2tmslEOxCVst7", + "type": "image", + "metadata": { + "hash": "UBGH^~9G,n-;00t7o2V@}=%2tmslEOxCVst7", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 698907470651728, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 14, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 698907470651728, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 15, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate, earth reflected in the eye", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a4a980c5-5f12-4a9a-8ad8-5244ba5f6829/width=450/1797814.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UXLf{y9G9Zxu%LxZWCIo%1NGoJof~VV@xZt7", + "type": "image", + "metadata": { + "hash": "UXLf{y9G9Zxu%LxZWCIo%1NGoJof~VV@xZt7", + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [], + "Model": "sdXL_v10", + "comfy": { + "prompt": { + "4": { + "inputs": { + "ckpt_name": "sdXL_v10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 896, + "height": 1152, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "49", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": [ + "49", + 0 + ], + "steps": 25, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "add_noise": "enable", + "scheduler": "karras", + "noise_seed": 642992949355749, + "end_at_step": 20, + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2s_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "11": { + "inputs": { + "cfg": 8, + "model": [ + "50", + 0 + ], + "steps": 25, + "negative": [ + "16", + 0 + ], + "positive": [ + "15", + 0 + ], + "add_noise": "disable", + "scheduler": "karras", + "noise_seed": 0, + "end_at_step": 10000, + "latent_image": [ + "10", + 0 + ], + "sampler_name": "dpm_2_ancestral", + "start_at_step": 20, + "return_with_leftover_noise": "disable" + }, + "class_type": "KSamplerAdvanced" + }, + "12": { + "inputs": { + "ckpt_name": "sdXL_v10Refiner.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "15": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + }, + "class_type": "CLIPTextEncode" + }, + "16": { + "inputs": { + "clip": [ + "50", + 1 + ], + "text": "" + }, + "class_type": "CLIPTextEncode" + }, + "17": { + "inputs": { + "vae": [ + "12", + 2 + ], + "samples": [ + "11", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": [ + "17", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "49": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + }, + "50": { + "inputs": { + "clip": [ + "12", + 1 + ], + "model": [ + "12", + 0 + ], + "lora_name": "lora-sdxl-perfect-eyes.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 13, + 10, + 0, + 11, + 3, + "LATENT" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 21, + 13, + 0, + 15, + 1, + "STRING" + ], + [ + 22, + 14, + 0, + 16, + 1, + "STRING" + ], + [ + 23, + 15, + 0, + 11, + 1, + "CONDITIONING" + ], + [ + 24, + 16, + 0, + 11, + 2, + "CONDITIONING" + ], + [ + 25, + 11, + 0, + 17, + 0, + "LATENT" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 34, + 12, + 2, + 17, + 1, + "VAE" + ], + [ + 38, + 45, + 0, + 11, + 4, + "INT" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 44, + 47, + 0, + 11, + 5, + "INT" + ], + [ + 45, + 4, + 0, + 49, + 0, + "MODEL" + ], + [ + 46, + 49, + 0, + 10, + 0, + "MODEL" + ], + [ + 48, + 49, + 1, + 6, + 0, + "CLIP" + ], + [ + 49, + 4, + 1, + 49, + 1, + "CLIP" + ], + [ + 50, + 49, + 1, + 7, + 0, + "CLIP" + ], + [ + 51, + 12, + 0, + 50, + 0, + "MODEL" + ], + [ + 52, + 50, + 0, + 11, + 0, + "MODEL" + ], + [ + 53, + 12, + 1, + 50, + 1, + "CLIP" + ], + [ + 54, + 50, + 1, + 15, + 0, + "CLIP" + ], + [ + 55, + 50, + 1, + 16, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 117.74066078186034, + 335.1799162292478 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18, + 22 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "" + ] + }, + { + "id": 36, + "pos": [ + -94.49287176550293, + -161.48460828083813 + ], + "mode": 0, + "size": { + "0": 315.70074462890625, + "1": 147.9551239013672 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 1, + "title": "Note - Load Checkpoint BASE", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations" + ] + }, + { + "id": 37, + "pos": [ + 175.64484131213376, + -499.8127881198046 + ], + "mode": 0, + "size": { + "0": 330, + "1": 140 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Load Checkpoint REFINER", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations." + ] + }, + { + "id": 38, + "pos": [ + 126.74066078186036, + 534.1799162292467 + ], + "mode": 0, + "size": { + "0": 284.3257141113281, + "1": 123.88604736328125 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 3, + "title": "Note - Text Prompts", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type." + ] + }, + { + "id": 17, + "pos": [ + 2225.9384427180166, + -293.89278097683797 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 24, + "inputs": [ + { + "link": 25, + "name": "samples", + "type": "LATENT" + }, + { + "link": 34, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 41, + "pos": [ + 2165.9384427180166, + -193.89278097683825 + ], + "mode": 0, + "size": { + "0": 320, + "1": 120 + }, + "type": "Note", + "color": "#332922", + "flags": {}, + "order": 4, + "title": "Note - VAE Decoder", + "bgcolor": "#593930", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG." + ] + }, + { + "id": 42, + "pos": [ + 535.8140031630148, + 765.2576984821503 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 5, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 43, + "pos": [ + 965.599367688539, + -2.120033473266579 + ], + "mode": 0, + "size": { + "0": 240, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 6, + "title": "Note - CLIP Encode (REFINER)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)" + ] + }, + { + "id": 39, + "pos": [ + 655.9007817501858, + 379.7768858719531 + ], + "mode": 0, + "size": { + "0": 210, + "1": 80 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 7, + "title": "Note - CLIP Encode (BASE)", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)" + ] + }, + { + "id": 47, + "pos": [ + 919.645986397198, + 844.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 8, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43, + 44 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 45, + "pos": [ + 921.645986397198, + 697.7239366325114 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 9, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 38, + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 25, + "fixed" + ] + }, + { + "id": 48, + "pos": [ + 918.1173023958742, + 981.1125484812007 + ], + "mode": 0, + "size": { + "0": 213.90769958496094, + "1": 110.17156982421875 + }, + "type": "Note", + "color": "#432", + "flags": {}, + "order": 10, + "bgcolor": "#653", + "properties": { + "text": "" + }, + "widgets_values": [ + "These can be used to control the total sampling steps and the step at which the sampling switches to the refiner." + ] + }, + { + "id": 4, + "pos": [ + -110.49287176550291, + -311.4846082808383 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 11, + "title": "Load Checkpoint - BASE", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 45 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 49 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10.safetensors" + ] + }, + { + "id": 6, + "pos": [ + 655.9007817501858, + 199.77688587195325 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + }, + { + "id": 7, + "pos": [ + 655.9007817501858, + 289.7768858719532 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 50, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 12, + "pos": [ + 44.68798146936043, + -884.5045739758183 + ], + "mode": 0, + "size": { + "0": 350, + "1": 100 + }, + "type": "CheckpointLoaderSimple", + "color": "#323", + "flags": {}, + "order": 12, + "title": "Load Checkpoint - REFINER", + "bgcolor": "#535", + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 51 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 53 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 34 + ], + "shape": 3, + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sdXL_v10Refiner.safetensors" + ] + }, + { + "id": 15, + "pos": [ + 979.7096764626475, + -193.9095223397471 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 20, + "inputs": [ + { + "link": 54, + "name": "clip", + "type": "CLIP" + }, + { + "link": 21, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 23 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + }, + { + "id": 16, + "pos": [ + 979.7096764626475, + -103.90952233974714 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 21, + "inputs": [ + { + "link": 55, + "name": "clip", + "type": "CLIP" + }, + { + "link": 22, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 24 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "" + ] + }, + { + "id": 40, + "pos": [ + -438, + 172 + ], + "mode": 0, + "size": { + "0": 451.5049743652344, + "1": 424.4164123535156 + }, + "type": "Note", + "color": "#223", + "flags": {}, + "order": 13, + "title": "Note - KSampler ADVANCED General Information", + "bgcolor": "#335", + "properties": { + "text": "" + }, + "widgets_values": [ + "Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)." + ] + }, + { + "id": 11, + "pos": [ + 1849, + 216 + ], + "mode": 0, + "size": { + "0": 300, + "1": 340 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 23, + "title": "KSampler (Advanced) - REFINER", + "inputs": [ + { + "link": 52, + "name": "model", + "type": "MODEL", + "slot_index": 0 + }, + { + "link": 23, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 24, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 13, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 38, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 44, + "name": "start_at_step", + "type": "INT", + "widget": { + "name": "start_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 0 + } + ] + } + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 25 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "disable", + 0, + "fixed", + 25, + 8, + "dpm_2_ancestral", + "karras", + 20, + 10000, + "disable" + ] + }, + { + "id": 5, + "pos": [ + 515.8140031630143, + 615.2576984821503 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 14, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 896, + 1152, + 1 + ] + }, + { + "id": 50, + "pos": [ + 573, + -469 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 51, + "name": "model", + "type": "MODEL" + }, + { + "link": 53, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 52 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 54, + 55 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 49, + "pos": [ + 301, + -164 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 45, + "name": "model", + "type": "MODEL" + }, + { + "link": 49, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 46 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 48, + 50 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora-sdxl-perfect-eyes.safetensors", + 1, + 1 + ] + }, + { + "id": 19, + "pos": [ + 1420, + -480 + ], + "mode": 0, + "size": { + "0": 593.4341430664062, + "1": 669.8292846679688 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 25, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 10, + "pos": [ + 1218, + 263 + ], + "mode": 0, + "size": { + "0": 300, + "1": 334 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": { + "collapsed": false + }, + "order": 22, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 46, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 13 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 642992949355749, + "randomize", + 25, + 8, + "dpmpp_2s_ancestral", + "karras", + 0, + 20, + "enable" + ] + }, + { + "id": 13, + "pos": [ + 120, + 129 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 15, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16, + 21 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "single eye, professional photo of perfecteyes eyes, studio lighting, intricate" + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 635, + 120, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Refiner Prompt", + "bounding": [ + 942, + -273, + 282, + 372 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 98, + 52, + 339, + 622 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -120, + -391, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Load in REFINER SDXL Model", + "bounding": [ + 27, + -973, + 391, + 400 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 495, + 541, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 2147, + -372, + 360, + 350 + ] + }, + { + "color": "#3f789e", + "title": "Step Control", + "bounding": [ + 887, + 586, + 284, + 524 + ] + } + ], + "version": 0.4, + "last_link_id": 55, + "last_node_id": 50 + } + }, + "steps": 25, + "width": 896, + "height": 1152, + "models": [ + "sdXL_v10.safetensors", + "sdXL_v10Refiner.safetensors" + ], + "prompt": "single eye, professional photo of perfecteyes eyes, studio lighting, intricate", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "additionalResources": [ + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + }, + { + "name": "lora-sdxl-perfect-eyes.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/129711" +} \ No newline at end of file diff --git a/lora-sdxl-perfect-eyes.preview.png b/lora-sdxl-perfect-eyes.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..88eec524494deea61ac2cbee231ff970f0cc8ddb Binary files /dev/null and b/lora-sdxl-perfect-eyes.preview.png differ diff --git a/lora-sdxl-perfect-eyes.safetensors b/lora-sdxl-perfect-eyes.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..37c2fb63ac607be1eae7f9b486a4792808b3c8af --- /dev/null +++ b/lora-sdxl-perfect-eyes.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e1ddaaebc92863a778ef96fe07c0d64257d9a03ddc4719d85a80716b6933a45 +size 41201036 diff --git a/oil painting.civitai.info b/oil painting.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..1b1b7707216544e10f801812216559e23dd1a715 --- /dev/null +++ b/oil painting.civitai.info @@ -0,0 +1,5324 @@ +{ + "id": 128212, + "modelId": 118223, + "name": "\ud83d\uddbc\ufe0f", + "createdAt": "2023-07-29T02:14:39.926Z", + "updatedAt": "2023-08-02T02:02:53.355Z", + "status": "Published", + "publishedAt": "2023-07-29T02:30:11.789Z", + "trainedWords": [ + "bichu", + "oil painting" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": "

\u65b0\u7248\u672cwebui\u4e0b\u5982\u679c\u641c\u4e0d\u5230\u6a21\u578b\u5c31\u6539\u4e00\u4e0b\u8bbe\u7f6e\u3002

setting\uff08\u8bbe\u7f6e\uff09\u2192\u2192 extra networks\uff08\u9644\u52a0\u7f51\u7edc\uff09\u2192\u2192 Always show all networks on the Lora page \u628a\u8fd9\u4e2a\u9009\u9879\u52fe\u4e0a\u5c31\u597d\u4e86

", + "stats": { + "downloadCount": 5736, + "ratingCount": 434, + "rating": 5, + "thumbsUpCount": 720 + }, + "model": { + "name": "\uff08SDXL\uff09Oil painting(oil brush stroke) - \u6cb9\u753b\u7b14\u89e6", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 92815, + "sizeKB": 665281.35546875, + "name": "oil painting.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-07-29T02:31:17.155Z", + "hashes": { + "AutoV1": "B4D6019B", + "AutoV2": "498251AADD", + "SHA256": "498251AADDB3603CC7EC07267854E9EA3BA2530115BC1CD9EEFFBCDACB250219", + "CRC32": "DAE54143", + "BLAKE3": "DC27E1C9BC6DC894CFDA2717D5F59EBB2BB2AFD617C82933F2D7B5754EBDA2E7" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/128212" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/accd873d-e7ae-4881-a6dd-831db4a569cb/width=450/1767874.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UGF6LB4p4p~p^+NLWEs:-:NIbFt3xuWYa#ay", + "type": "image", + "metadata": { + "hash": "UGF6LB4p4p~p^+NLWEs:-:NIbFt3xuWYa#ay", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 960644122000849, + "vaes": [], + "Model": "SD\\SDXL--dreamshaperXL10_alpha2Xl10", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 960644122000849, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,oil painting,masterpiece,cinematic scene,ancient magical ruins,deep forest,overgrown with vines,crumbling architecture,ornate carvings,glowing sigils,mystical energy,shining pedestals,floating platforms,shining crystals,particle effects,Volumetric light rays,tranquil pond,fireflies,tall lush trees,stepping stones,foggy atmosphere,painterly rendering,extremely detailed elements,photorealistic style,dreamlike lighting,high resolution,depth of field,\n" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "lowres,bad anatomy,bad hands,text,error,missing fingers,extra digit,fewer digits,cropped,worst quality,low quality,normal quality,jpeg artifacts,signature,watermark,username," + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.7 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,oil painting,masterpiece,cinematic scene,ancient magical ruins,deep forest,overgrown with vines,crumbling architecture,ornate carvings,glowing sigils,mystical energy,shining pedestals,floating platforms,shining crystals,particle effects,Volumetric light rays,tranquil pond,fireflies,tall lush trees,stepping stones,foggy atmosphere,painterly rendering,extremely detailed elements,photorealistic style,dreamlike lighting,high resolution,depth of field,\n" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "lowres,bad anatomy,bad hands,text,error,missing fingers,extra digit,fewer digits,cropped,worst quality,low quality,normal quality,jpeg artifacts,signature,watermark,username," + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 960644122000849, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.7, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ], + "prompt": "bichu,oil painting,masterpiece,cinematic scene,ancient magical ruins,deep forest,overgrown with vines,crumbling architecture,ornate carvings,glowing sigils,mystical energy,shining pedestals,floating platforms,shining crystals,particle effects,Volumetric light rays,tranquil pond,fireflies,tall lush trees,stepping stones,foggy atmosphere,painterly rendering,extremely detailed elements,photorealistic style,dreamlike lighting,high resolution,depth of field,\n", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "lowres,bad anatomy,bad hands,text,error,missing fingers,extra digit,fewer digits,cropped,worst quality,low quality,normal quality,jpeg artifacts,signature,watermark,username,", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.7, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/05c7d914-33d2-4eb7-a02d-b881b4bae008/width=450/1767875.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UKHUqV?wJ8-:-;krxajEb^xWxDod%1adRPV?", + "type": "image", + "metadata": { + "hash": "UKHUqV?wJ8-:-;krxajEb^xWxDod%1adRPV?", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 378886303539922, + "vaes": [], + "Model": "SD\\SDXL--dreamshaperXL10_alpha2Xl10", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 378886303539922, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,lowres" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.7 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,lowres" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 378886303539922, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.7, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,lowres", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.7, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/eeb24c17-1ef3-45fc-ae23-121e83f61f76/width=450/1767876.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U6A-xDIu00~px1I^9O=;oZRQIVn~H;xUo4NH", + "type": "image", + "metadata": { + "hash": "U6A-xDIu00~px1I^9O=;oZRQIVn~H;xUo4NH", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 723700726370021, + "vaes": [], + "Model": "SD\\SDXL--dreamshaperXL10_alpha2Xl10", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 723700726370021, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,cinematic scene, mystical glow, enchanted pool, middle of dense forest, tall shadowy trees all around, full moon overhead, stars reflecting on ripples, swirling celestial fog, bioluminescent plants, azure glow from water, intricate stonework basin, vine covered pillars, fireflies blinking, overhead branches and leaves, highly detailed texture, volumetric lighting rays, bright glow from water, blooming water lilies, frog sounds, owl hooting, tranquil and serene, ethereal atmosphere, high resolution rendering" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,lowres" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.7 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,cinematic scene, mystical glow, enchanted pool, middle of dense forest, tall shadowy trees all around, full moon overhead, stars reflecting on ripples, swirling celestial fog, bioluminescent plants, azure glow from water, intricate stonework basin, vine covered pillars, fireflies blinking, overhead branches and leaves, highly detailed texture, volumetric lighting rays, bright glow from water, blooming water lilies, frog sounds, owl hooting, tranquil and serene, ethereal atmosphere, high resolution rendering" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,lowres" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 723700726370021, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.7, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,cinematic scene, mystical glow, enchanted pool, middle of dense forest, tall shadowy trees all around, full moon overhead, stars reflecting on ripples, swirling celestial fog, bioluminescent plants, azure glow from water, intricate stonework basin, vine covered pillars, fireflies blinking, overhead branches and leaves, highly detailed texture, volumetric lighting rays, bright glow from water, blooming water lilies, frog sounds, owl hooting, tranquil and serene, ethereal atmosphere, high resolution rendering", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,lowres", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.7, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/91a9bb74-85f4-4fcb-82aa-b31110c7fda3/width=450/1767872.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7Gkg-Dn0x*0bo0LRPDh9]oe-qx^r;g4%gjX", + "type": "image", + "metadata": { + "hash": "U7Gkg-Dn0x*0bo0LRPDh9]oe-qx^r;g4%gjX", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 1079877396625634, + "vaes": [], + "Model": "SD\\SDXL--dreamshaperXL10_alpha2Xl10", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 1079877396625634, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,1girl\uff0c20yo" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,lowres" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.7 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,1girl\uff0c20yo" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,lowres" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 1079877396625634, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.7, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,1girl\uff0c20yo", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,lowres", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.7, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1515caa8-1097-4e8a-8b0f-6ca344e2437d/width=450/1767880.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UNIO|d_4E2%M^,xXRit7MyRgbdj]ajt0oMV@", + "type": "image", + "metadata": { + "hash": "UNIO|d_4E2%M^,xXRit7MyRgbdj]ajt0oMV@", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 439508636424683, + "vaes": [], + "Model": "SD\\SDXL1.0--sd_xl_base_1.0", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 439508636424683, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,masterpiece, serene jungle lake, surface like a mirror, reflecting the trees and blue sky, surrounded by dense rainforest, beams of sunlight streaking through treetops, tranquility, not a ripple on the water, intricate details of foliage, wading birds by the shoreline, brightly colored flowers dotting banks, expansive vista of wilderness, highly detailed texture, photorealistic style, 1:1 scale, cinematic composition, atmosphere of peace, volumetric mist, 8K resolution" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,blur" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,masterpiece, serene jungle lake, surface like a mirror, reflecting the trees and blue sky, surrounded by dense rainforest, beams of sunlight streaking through treetops, tranquility, not a ripple on the water, intricate details of foliage, wading birds by the shoreline, brightly colored flowers dotting banks, expansive vista of wilderness, highly detailed texture, photorealistic style, 1:1 scale, cinematic composition, atmosphere of peace, volumetric mist, 8K resolution" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,blur" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 439508636424683, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 1, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,masterpiece, serene jungle lake, surface like a mirror, reflecting the trees and blue sky, surrounded by dense rainforest, beams of sunlight streaking through treetops, tranquility, not a ripple on the water, intricate details of foliage, wading birds by the shoreline, brightly colored flowers dotting banks, expansive vista of wilderness, highly detailed texture, photorealistic style, 1:1 scale, cinematic composition, atmosphere of peace, volumetric mist, 8K resolution", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,blur", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/500ca2ce-791a-4ffb-98bf-eca138c2e7f3/width=450/1767877.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U9IOFK00K*?bb{V=TfjIt.02~WV_^%n~Z~Vr", + "type": "image", + "metadata": { + "hash": "U9IOFK00K*?bb{V=TfjIt.02~WV_^%n~Z~Vr", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 855439474606518, + "vaes": [], + "Model": "SD\\SDXL1.0--sd_xl_base_1.0", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 855439474606518, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,blur" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.6960000610351562 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,blur" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 855439474606518, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.6960000610351562, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,blur", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.6960000610351562, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ee356e76-8388-4dc6-b6ac-234379e5f635/width=450/1767879.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U6IO91000JuOIUW+Rk?w00xpDj%2?wZzWE4n", + "type": "image", + "metadata": { + "hash": "U6IO91000JuOIUW+Rk?w00xpDj%2?wZzWE4n", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 506654123212497, + "vaes": [], + "Model": "SD\\SDXL1.0--sd_xl_base_1.0", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 506654123212497, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,blur" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 1 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,blur" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 506654123212497, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 1, + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,cinematic scene, 20 year old girl, sitting at outdoor cafe balcony, high rise city background, holding coffee cup pausing for a sip, sunshine lighting her face, relishing the aroma, focused placid expression, wearing stylish sundress, natural makeup look, intricate wood table details, compatibility and coziness, soft bokeh of traffic below, extremely detailed portrait, photorealistic materials, contentment during me-time, high resolution, shallow depth of field", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,blur", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 1, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/51b97615-b77d-4a94-8b61-6cd6f001dacd/width=450/1767873.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAC7H4-%00o#~qIV9Z%1_2R+9GV[_NtQRhs-", + "type": "image", + "metadata": { + "hash": "UAC7H4-%00o#~qIV9Z%1_2R+9GV[_NtQRhs-", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 401632494851732, + "vaes": [], + "Model": "SD\\SDXL--dreamshaperXL10_alpha2Xl10", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 401632494851732, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "masterpiece,cinematic scene,ancient magical ruins,deep forest,overgrown with vines,crumbling architecture,ornate carvings,glowing sigils,mystical energy,shining pedestals,floating platforms,shining crystals,particle effects,Volumetric light rays,tranquil pond,fireflies,tall lush trees,stepping stones,foggy atmosphere,painterly rendering,extremely detailed elements,photorealistic style,dreamlike lighting,high resolution,depth of field,\n" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "lowres,bad anatomy,bad hands,text,error,missing fingers,extra digit,fewer digits,cropped,worst quality,low quality,normal quality,jpeg artifacts,signature,watermark,username," + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.7 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "masterpiece,cinematic scene,ancient magical ruins,deep forest,overgrown with vines,crumbling architecture,ornate carvings,glowing sigils,mystical energy,shining pedestals,floating platforms,shining crystals,particle effects,Volumetric light rays,tranquil pond,fireflies,tall lush trees,stepping stones,foggy atmosphere,painterly rendering,extremely detailed elements,photorealistic style,dreamlike lighting,high resolution,depth of field,\n" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "lowres,bad anatomy,bad hands,text,error,missing fingers,extra digit,fewer digits,cropped,worst quality,low quality,normal quality,jpeg artifacts,signature,watermark,username," + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 3, + "pos": [ + 122, + -628 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 401632494851732, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 23, + "pos": [ + 584, + -908 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.7, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL--dreamshaperXL10_alpha2Xl10.safetensors" + ], + "prompt": "masterpiece,cinematic scene,ancient magical ruins,deep forest,overgrown with vines,crumbling architecture,ornate carvings,glowing sigils,mystical energy,shining pedestals,floating platforms,shining crystals,particle effects,Volumetric light rays,tranquil pond,fireflies,tall lush trees,stepping stones,foggy atmosphere,painterly rendering,extremely detailed elements,photorealistic style,dreamlike lighting,high resolution,depth of field,\n", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "lowres,bad anatomy,bad hands,text,error,missing fingers,extra digit,fewer digits,cropped,worst quality,low quality,normal quality,jpeg artifacts,signature,watermark,username,", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.7, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8d434331-0516-4f50-ba5f-b2e6cbc9b7b3/width=450/1767878.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UGGSfXMw00?v~TNgERozOIaIn~o$D$t8xvRi", + "type": "image", + "metadata": { + "hash": "UGGSfXMw00?v~TNgERozOIaIn~o$D$t8xvRi", + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 470177158443906, + "vaes": [], + "Model": "SD\\SDXL1.0--sd_xl_base_1.0", + "comfy": { + "prompt": { + "3": { + "inputs": { + "cfg": 7, + "seed": 470177158443906, + "model": [ + "24", + 0 + ], + "steps": 24, + "denoise": 1, + "negative": [ + "7", + 0 + ], + "positive": [ + "6", + 0 + ], + "scheduler": "karras", + "latent_image": [ + "5", + 0 + ], + "sampler_name": "dpmpp_2m" + }, + "class_type": "KSampler" + }, + "4": { + "inputs": { + "ckpt_name": "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "bichu,masterpiece,oil painting,masterpiece, serene jungle lake, surface like a mirror, reflecting the trees and blue sky, surrounded by dense rainforest, beams of sunlight streaking through treetops, tranquility, not a ripple on the water, intricate details of foliage, wading birds by the shoreline, brightly colored flowers dotting banks, expansive vista of wilderness, highly detailed texture, photorealistic style, 1:1 scale, cinematic composition, atmosphere of peace, volumetric mist, 8K resolution" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": [ + "24", + 1 + ], + "text": "worst quality, low quality,blur" + }, + "class_type": "CLIPTextEncode" + }, + "22": { + "inputs": { + "vae": [ + "4", + 2 + ], + "samples": [ + "3", + 0 + ] + }, + "class_type": "VAEDecode" + }, + "23": { + "inputs": { + "images": [ + "22", + 0 + ], + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "24": { + "inputs": { + "clip": [ + "4", + 1 + ], + "model": [ + "4", + 0 + ], + "lora_name": "lora\\SDXL\\oil painting.safetensors", + "strength_clip": 1, + "strength_model": 0.65 + }, + "class_type": "LoraLoader" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 2, + 5, + 0, + 3, + 3, + "LATENT" + ], + [ + 4, + 6, + 0, + 3, + 1, + "CONDITIONING" + ], + [ + 6, + 7, + 0, + 3, + 2, + "CONDITIONING" + ], + [ + 53, + 3, + 0, + 22, + 0, + "LATENT" + ], + [ + 54, + 4, + 2, + 22, + 1, + "VAE" + ], + [ + 55, + 22, + 0, + 23, + 0, + "IMAGE" + ], + [ + 56, + 4, + 0, + 24, + 0, + "MODEL" + ], + [ + 58, + 4, + 1, + 24, + 1, + "CLIP" + ], + [ + 73, + 24, + 0, + 3, + 0, + "MODEL" + ], + [ + 74, + 24, + 1, + 6, + 0, + "CLIP" + ], + [ + 75, + 24, + 1, + 7, + 0, + "CLIP" + ] + ], + "nodes": [ + { + "id": 4, + "pos": [ + -1088, + -334 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 0, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 56 + ], + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 58 + ], + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": [ + 54 + ], + "slot_index": 2 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 23, + "pos": [ + 586, + -719 + ], + "mode": 0, + "size": [ + 1025.958345544434, + 1081.9538536071782 + ], + "type": "SaveImage", + "flags": {}, + "order": 7, + "inputs": [ + { + "link": 55, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 3, + "pos": [ + 89, + -617 + ], + "mode": 0, + "size": { + "0": 315, + "1": 262 + }, + "type": "KSampler", + "flags": {}, + "order": 5, + "inputs": [ + { + "link": 73, + "name": "model", + "type": "MODEL" + }, + { + "link": 4, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 6, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 2, + "name": "latent_image", + "type": "LATENT" + } + ], + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 53 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSampler" + }, + "widgets_values": [ + 470177158443906, + "randomize", + 24, + 7, + "dpmpp_2m", + "karras", + 1 + ] + }, + { + "id": 22, + "pos": [ + 296, + -259 + ], + "mode": 0, + "size": { + "0": 210, + "1": 46 + }, + "type": "VAEDecode", + "flags": {}, + "order": 6, + "inputs": [ + { + "link": 53, + "name": "samples", + "type": "LATENT" + }, + { + "link": 54, + "name": "vae", + "type": "VAE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 55 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 6, + "pos": [ + -357, + -779 + ], + "mode": 0, + "size": { + "0": 422.84503173828125, + "1": 164.31304931640625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 3, + "inputs": [ + { + "link": 74, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 4 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "bichu,masterpiece,oil painting,masterpiece, serene jungle lake, surface like a mirror, reflecting the trees and blue sky, surrounded by dense rainforest, beams of sunlight streaking through treetops, tranquility, not a ripple on the water, intricate details of foliage, wading birds by the shoreline, brightly colored flowers dotting banks, expansive vista of wilderness, highly detailed texture, photorealistic style, 1:1 scale, cinematic composition, atmosphere of peace, volumetric mist, 8K resolution" + ] + }, + { + "id": 7, + "pos": [ + -368, + -568 + ], + "mode": 0, + "size": { + "0": 425.27801513671875, + "1": 180.6060791015625 + }, + "type": "CLIPTextEncode", + "flags": {}, + "order": 4, + "inputs": [ + { + "link": 75, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 6 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "worst quality, low quality,blur" + ] + }, + { + "id": 5, + "pos": [ + -292, + -304 + ], + "mode": 0, + "size": { + "0": 315, + "1": 106 + }, + "type": "EmptyLatentImage", + "flags": {}, + "order": 1, + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 2 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + }, + { + "id": 24, + "pos": [ + -741, + -331 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 2, + "inputs": [ + { + "link": 56, + "name": "model", + "type": "MODEL" + }, + { + "link": 58, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 73 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 74, + 75 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "lora\\SDXL\\oil painting.safetensors", + 0.65, + 1 + ] + } + ], + "config": {}, + "groups": [], + "version": 0.4, + "last_link_id": 75, + "last_node_id": 30 + } + }, + "steps": 24, + "width": 1024, + "height": 1024, + "models": [ + "SD\\SDXL1.0--sd_xl_base_1.0.safetensors" + ], + "prompt": "bichu,masterpiece,oil painting,masterpiece, serene jungle lake, surface like a mirror, reflecting the trees and blue sky, surrounded by dense rainforest, beams of sunlight streaking through treetops, tranquility, not a ripple on the water, intricate details of foliage, wading birds by the shoreline, brightly colored flowers dotting banks, expansive vista of wilderness, highly detailed texture, photorealistic style, 1:1 scale, cinematic composition, atmosphere of peace, volumetric mist, 8K resolution", + "denoise": 1, + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "scheduler": "karras", + "upscalers": [], + "controlNets": [], + "negativePrompt": "worst quality, low quality,blur", + "additionalResources": [ + { + "name": "lora\\SDXL\\oil painting.safetensors", + "type": "lora", + "strength": 0.65, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d2019d62-8639-4e88-99f7-86ae4920f001/width=450/1769158.jpeg", + "nsfw": "None", + "width": 2732, + "height": 1216, + "hash": "UC9HLX4UM{_M%MWBR+t6V@ayozV@Rnj;WDoK", + "type": "image", + "metadata": { + "hash": "UC9HLX4UM{_M%MWBR+t6V@ayozV@Rnj;WDoK", + "width": 2732, + "height": 1216 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + } + ], + "downloadUrl": "https://civitai.com/api/download/models/128212" +} \ No newline at end of file diff --git a/oil painting.preview.png b/oil painting.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..05d160d3fce78dfc79407f95a1661f23c80d52fc Binary files /dev/null and b/oil painting.preview.png differ diff --git a/oil painting.safetensors b/oil painting.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d893f9f07d4aff3f52fcc2506145ee2bf4da6fc7 --- /dev/null +++ b/oil painting.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:498251aaddb3603cc7ec07267854e9ea3ba2530115bc1cd9eeffbcdacb250219 +size 681248108 diff --git a/perfect feet.civitai.info b/perfect feet.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..736fa59c59f711cb9603bdaa3ce23d4495ced615 --- /dev/null +++ b/perfect feet.civitai.info @@ -0,0 +1,229 @@ +{ + "id": 225347, + "modelId": 200251, + "name": "Feet v1.0", + "createdAt": "2023-11-14T20:23:59.410Z", + "updatedAt": "2023-11-16T20:36:25.745Z", + "status": "Published", + "publishedAt": "2023-11-14T20:27:51.351Z", + "trainedWords": [ + "Perfect feet" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

Human Feet body part.

Can help with better feet quality.

", + "stats": { + "downloadCount": 5531, + "ratingCount": 68, + "rating": 4.91, + "thumbsUpCount": 109 + }, + "model": { + "name": "Feet XL 1.0 + SD 1.5", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 173754, + "sizeKB": 445788.17578125, + "name": "perfect feet.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-11-14T20:25:55.096Z", + "hashes": { + "AutoV1": "312D8B38", + "AutoV2": "E4EB845440", + "SHA256": "E4EB845440623588FE8DAEC1A411B07FEF2E0A795083AFB1BC08EDE3946E78D2", + "CRC32": "2D88DB83", + "BLAKE3": "25405B3F78B586910E1531825F0E0284CF17CCD2E8A04E6C78EE892FFA300E64", + "AutoV3": "ECEFC69D5E4284B5236504A66419B80369FEFD2C385401F6BB826F5BC67FE9C9" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/225347" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/19de5704-d049-4eaa-b55e-b928b64c2576/width=450/3627457.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UJB|d6e8IpRj$KRPWXx]I9IoV?x]_NIUR,Ri", + "type": "image", + "metadata": { + "hash": "UJB|d6e8IpRj$KRPWXx]I9IoV?x]_NIUR,Ri", + "size": 939422, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 232441932, + "Model": "realisticStockPhoto_v10", + "steps": 45, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "cinematic film still of \nperfect feet, shallow depth of field, vignette, highly detailed, high budget, bokeh, cinemascope, moody, epic, gorgeous, film grain, grainy", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"perfect feet": "ecefc69d5e42\"", + "negativePrompt": "anime, cartoon, graphic, text, painting, crayon, graphite, abstract, glitch, deformed, mutated, ugly, disfigured, bad hands, ugly hands, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, JPEG artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Cinematic Film", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7315cca9-dc0b-4208-9acf-1d12829602b0/width=450/3627458.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UEKA.;00ys?v00%2D*R*9u~pD%oL8_NGa#M{", + "type": "image", + "metadata": { + "hash": "UEKA.;00ys?v00%2D*R*9u~pD%oL8_NGa#M{", + "size": 847789, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2207408306, + "Model": "realisticStockPhoto_v10", + "steps": 45, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "professional 3d model of \nperfect feet, octane render, highly detailed, volumetric, dramatic lighting", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"perfect feet": "ecefc69d5e42\"", + "negativePrompt": "ugly, deformed, noisy, low poly, blurry, painting, bad hands, ugly hands, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, JPEG artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "3D Model", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5b66b50a-8f60-4e4d-9568-440b652052b3/width=450/3627459.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UHH{p9Fyzn~V3sEM%#569|r;Xnoe=xRPoJxC", + "type": "image", + "metadata": { + "hash": "UHH{p9Fyzn~V3sEM%#569|r;Xnoe=xRPoJxC", + "size": 1089648, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 550000550, + "Model": "realisticStockPhoto_v10", + "steps": 45, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "concept art of \nperfect feet, digital artwork, illustrative, painterly, matte painting, highly detailed", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"perfect feet": "ecefc69d5e42\"", + "negativePrompt": "photo, photorealistic, realism, ugly, bad hands, ugly hands, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, JPEG artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Digital Art", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/297c796b-0af7-489d-b02d-34b2705670ae/width=450/3627460.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UAJa.V0K0c4?{,03T1VX00xG#OMfxxtR9G9u", + "type": "image", + "metadata": { + "hash": "UAJa.V0K0c4?{,03T1VX00xG#OMfxxtR9G9u", + "size": 1077288, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3362760195, + "Model": "realisticStockPhoto_v10", + "steps": 45, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "concept art of \nperfect feet, digital artwork, illustrative, painterly, matte painting, highly detailed", + "Version": "v1.6.0-2-g4afaaf8a", + "sampler": "DPM++ 3M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"perfect feet": "ecefc69d5e42\"", + "negativePrompt": "photo, photorealistic, realism, ugly, bad hands, ugly hands, ugly breasts, tripod, camera, anime, animation, cartoon, 3D, drawing, painting, (censorship, censored, worst quality, low quality, normal quality, lowres, low details, bad photo, bad photography, bad art:1.4), (watermark, signature, text font, username, error, logo, words, letters, digits, autograph, trademark, name:1.2), (blur, blurry), morbid, ugly, asymmetrical, mutated malformed, mutilated, poorly lit, bad shadow, draft, cropped, out of frame, cut off, censored, JPEG artifacts, out of focus, glitch, duplicate, (bad hands, bad anatomy, bad body, bad face, bad teeth, bad arms, bad legs, deformities:1.3) (ugly hands, ugly anatomy, ugly body, ugly face, ugly teeth, ugly arms, ugly legs, deformities:1.3) ugly fingers, bad fingers, (((ugly nipples, bad nipples, deformed nipples))), (((Bad teeth, ugly teeth)))", + "Style Selector Style": "Digital Art", + "Style Selector Enabled": "True", + "Style Selector Randomize": "False" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/225347" +} \ No newline at end of file diff --git a/perfect feet.preview.png b/perfect feet.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..baf60f323ebd73c61559e8d442f0992119d88c95 Binary files /dev/null and b/perfect feet.preview.png differ diff --git a/perfect feet.safetensors b/perfect feet.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..17fc96b1af77d79f40cd216bf7a1cc9e9f6454cd --- /dev/null +++ b/perfect feet.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4eb845440623588fe8daec1a411b07fef2e0a795083afb1bc08ede3946e78d2 +size 456487092 diff --git a/pixel-art-xl-v1.1.civitai.info b/pixel-art-xl-v1.1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..4cef4b009bae9dd1c8107d9ebe6d1c5b357f3421 --- /dev/null +++ b/pixel-art-xl-v1.1.civitai.info @@ -0,0 +1,9006 @@ +{ + "id": 135931, + "modelId": 120096, + "name": "v1.1", + "createdAt": "2023-08-07T15:59:01.580Z", + "updatedAt": "2023-08-07T16:02:34.411Z", + "status": "Published", + "publishedAt": "2023-08-07T16:02:34.409Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": "

Update compatibility with AUTOMATIC1111
Better coherence
More flexible

", + "stats": { + "downloadCount": 39678, + "ratingCount": 1980, + "rating": 4.99, + "thumbsUpCount": 3329 + }, + "model": { + "name": "Pixel Art XL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 99321, + "sizeKB": 166545.94921875, + "name": "pixel-art-xl-v1.1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-07T16:00:42.028Z", + "hashes": { + "AutoV1": "1643DE9F", + "AutoV2": "BBF3D8DEFB", + "SHA256": "BBF3D8DEFBFB3FB71331545225C0CF50C74A748D2525F7C19EBB8F74445DE274", + "CRC32": "B8AF58B7", + "BLAKE3": "E90959513C166BDF9A33D14E20DB4C003559C98C8F16C1F90FAF6F60089D3435" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/135931" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0770e5fc-4260-4e3c-a0ee-bf2253028e9f/width=450/1918193.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UBM8E0x[4Sx]%Laff+of1cWV^6s;-Uj[NIWC", + "type": "image", + "metadata": { + "hash": "UBM8E0x[4Sx]%Laff+of1cWV^6s;-Uj[NIWC", + "size": 38898, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sd_xl_base_1.0", + "comfy": { + "prompt": { + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "17": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "48": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "49": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "51": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "52": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "53": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "55": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "57": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "58": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "x8_upscaled_" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 25468944332627, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "nn_downscaled_" + }, + "class_type": "SaveImage" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 45, + 17, + 0, + 48, + 0, + "IMAGE" + ], + [ + 46, + 48, + 0, + 49, + 0, + "IMAGE" + ], + [ + 47, + 48, + 0, + 51, + 0, + "IMAGE" + ], + [ + 48, + 51, + 0, + 52, + 0, + "IMAGE" + ], + [ + 52, + 53, + 1, + 6, + 0, + "CLIP" + ], + [ + 53, + 53, + 1, + 7, + 0, + "CLIP" + ], + [ + 54, + 53, + 0, + 10, + 0, + "MODEL" + ], + [ + 56, + 10, + 0, + 17, + 0, + "LATENT" + ], + [ + 57, + 55, + 0, + 17, + 1, + "VAE" + ], + [ + 58, + 57, + 0, + 53, + 0, + "MODEL" + ], + [ + 59, + 57, + 1, + 53, + 1, + "CLIP" + ], + [ + 60, + 51, + 0, + 58, + 0, + "IMAGE" + ], + [ + 61, + 48, + 0, + 59, + 0, + "IMAGE" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 3587.205062483223, + 681.3654430100763 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + ] + }, + { + "id": 6, + "pos": [ + 613.8337091823432, + 76.76253406667381 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 9, + "inputs": [ + { + "link": 52, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + ] + }, + { + "id": 45, + "pos": [ + 1004, + 624 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 1, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 17, + "pos": [ + 1533.2421162516673, + 124.62247729681123 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 56, + "name": "samples", + "type": "LATENT" + }, + { + "link": 57, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 45 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 42, + "pos": [ + 565, + 801 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 7, + "pos": [ + 611.3369181870398, + 211.28450994264554 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 10, + "inputs": [ + { + "link": 53, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + ] + }, + { + "id": 57, + "pos": [ + -92, + -573 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 3, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 58 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 59 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 19, + "pos": [ + 1502, + 352 + ], + "mode": 0, + "size": { + "0": 701.2393188476562, + "1": 745.6878051757812 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 13, + "pos": [ + 3587, + 481 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 4, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + ] + }, + { + "id": 49, + "pos": [ + 2293, + 404 + ], + "mode": 0, + "size": { + "0": 301.1867370605469, + "1": 246 + }, + "type": "PreviewImage", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 46, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 48, + "pos": [ + 1898, + 192 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 46, + 47, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 0.125 + ] + }, + { + "id": 59, + "pos": [ + 1918, + -160 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 61, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "nn_downscaled_" + ] + }, + { + "id": 58, + "pos": [ + 2264, + -212 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 60, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "x8_upscaled_" + ] + }, + { + "id": 51, + "pos": [ + 2262, + 185 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 47, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 48, + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 8 + ] + }, + { + "id": 10, + "pos": [ + 1044, + -82 + ], + "mode": 0, + "size": { + "0": 343.2493896484375, + "1": 595.7294921875 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 11, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 54, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 56 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 25468944332627, + "randomize", + 20, + 8, + "euler_ancestral", + "normal", + 0, + 20, + "enable" + ] + }, + { + "id": 47, + "pos": [ + 1002, + 765 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 5, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 55, + "pos": [ + 343, + -470 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "flags": {}, + "order": 6, + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 57 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae.safetensors" + ] + }, + { + "id": 53, + "pos": [ + 352, + -344 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 8, + "inputs": [ + { + "link": 58, + "name": "model", + "type": "MODEL" + }, + { + "link": 59, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 54 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 52, + 53 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "pixel-art-xl-v1.1.safetensors", + 1.0000000000000002, + 1 + ] + }, + { + "id": 52, + "pos": [ + 2656, + 358 + ], + "mode": 0, + "size": { + "0": 808.3607177734375, + "1": 777.3040771484375 + }, + "type": "PreviewImage", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 5, + "pos": [ + 544.5041024540301, + 651.1200708259006 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 7, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 4 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 593, + -4, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 3567, + 398, + 364, + 487 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -108, + -812, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 524, + 577, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 1454, + 46, + 336, + 157 + ] + } + ], + "version": 0.4, + "last_link_id": 61, + "last_node_id": 60 + } + }, + "steps": 20, + "width": 1024, + "height": 1024, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)", + "sampler": "Euler a", + "cfgScale": 8, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline", + "additionalResources": [ + { + "name": "pixel-art-xl-v1.1.safetensors", + "type": "lora", + "strength": 1.0000000000000002, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/684efffb-c290-4e5b-b0be-90b3e7234a89/width=450/1918242.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U5OWQY?H?^?a^*ayR.og%~a|H?oL-=ofskRP", + "type": "image", + "metadata": { + "hash": "U5OWQY?H?^?a^*ayR.og%~a|H?oL-=ofskRP", + "size": 27930, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sd_xl_base_1.0", + "comfy": { + "prompt": { + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "17": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "48": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "49": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "51": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "52": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "53": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "55": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "57": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "58": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "x8_upscaled_" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute shiba inu, minimalist, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 937731538949123, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "nn_downscaled_" + }, + "class_type": "SaveImage" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 45, + 17, + 0, + 48, + 0, + "IMAGE" + ], + [ + 46, + 48, + 0, + 49, + 0, + "IMAGE" + ], + [ + 47, + 48, + 0, + 51, + 0, + "IMAGE" + ], + [ + 48, + 51, + 0, + 52, + 0, + "IMAGE" + ], + [ + 52, + 53, + 1, + 6, + 0, + "CLIP" + ], + [ + 53, + 53, + 1, + 7, + 0, + "CLIP" + ], + [ + 54, + 53, + 0, + 10, + 0, + "MODEL" + ], + [ + 56, + 10, + 0, + 17, + 0, + "LATENT" + ], + [ + 57, + 55, + 0, + 17, + 1, + "VAE" + ], + [ + 58, + 57, + 0, + 53, + 0, + "MODEL" + ], + [ + 59, + 57, + 1, + 53, + 1, + "CLIP" + ], + [ + 60, + 51, + 0, + 58, + 0, + "IMAGE" + ], + [ + 61, + 48, + 0, + 59, + 0, + "IMAGE" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 3587.205062483223, + 681.3654430100763 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + ] + }, + { + "id": 6, + "pos": [ + 613.8337091823432, + 76.76253406667381 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 9, + "inputs": [ + { + "link": 52, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a cute shiba inu, minimalist, (pixel:0.8)" + ] + }, + { + "id": 45, + "pos": [ + 1004, + 624 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 1, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 17, + "pos": [ + 1533.2421162516673, + 124.62247729681123 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 56, + "name": "samples", + "type": "LATENT" + }, + { + "link": 57, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 45 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 42, + "pos": [ + 2310.769893066405, + 912.6079321899413 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 7, + "pos": [ + 611.3369181870398, + 211.28450994264554 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 10, + "inputs": [ + { + "link": 53, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + ] + }, + { + "id": 57, + "pos": [ + -92, + -573 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 3, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 58 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 59 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 19, + "pos": [ + 1502, + 352 + ], + "mode": 0, + "size": { + "0": 701.2393188476562, + "1": 745.6878051757812 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 13, + "pos": [ + 3587, + 481 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 4, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "a cute shiba inu, minimalist, (pixel:0.8), flat" + ] + }, + { + "id": 49, + "pos": [ + 2293, + 404 + ], + "mode": 0, + "size": { + "0": 301.1867370605469, + "1": 246 + }, + "type": "PreviewImage", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 46, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 48, + "pos": [ + 1898, + 192 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 46, + 47, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 0.125 + ] + }, + { + "id": 59, + "pos": [ + 1918, + -160 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 61, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "nn_downscaled_" + ] + }, + { + "id": 58, + "pos": [ + 2264, + -212 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 60, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "x8_upscaled_" + ] + }, + { + "id": 51, + "pos": [ + 2262, + 185 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 47, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 48, + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 8 + ] + }, + { + "id": 10, + "pos": [ + 1044, + -82 + ], + "mode": 0, + "size": { + "0": 343.2493896484375, + "1": 595.7294921875 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 11, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 54, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 56 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 937731538949123, + "randomize", + 20, + 8, + "euler_ancestral", + "normal", + 0, + 20, + "enable" + ] + }, + { + "id": 47, + "pos": [ + 1002, + 765 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 5, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 55, + "pos": [ + 343, + -470 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "flags": {}, + "order": 6, + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 57 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae.safetensors" + ] + }, + { + "id": 53, + "pos": [ + 352, + -344 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 8, + "inputs": [ + { + "link": 58, + "name": "model", + "type": "MODEL" + }, + { + "link": 59, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 54 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 52, + 53 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "pixel-art-xl-v1.1.safetensors", + 1.0000000000000002, + 1 + ] + }, + { + "id": 52, + "pos": [ + 2654, + 451 + ], + "mode": 0, + "size": { + "0": 808.3607177734375, + "1": 777.3040771484375 + }, + "type": "PreviewImage", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 5, + "pos": [ + 2289.322656127929, + 762.2985609054565 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 7, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 593, + -4, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 3567, + 398, + 364, + 487 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -108, + -812, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 2270, + 688, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 1454, + 46, + 336, + 157 + ] + } + ], + "version": 0.4, + "last_link_id": 61, + "last_node_id": 60 + } + }, + "steps": 20, + "width": 1024, + "height": 1024, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "a cute shiba inu, minimalist, (pixel:0.8), flat", + "sampler": "Euler a", + "cfgScale": 8, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, blurry, deformed, depth of field, realistic, 3d render", + "additionalResources": [ + { + "name": "pixel-art-xl-v1.1.safetensors", + "type": "lora", + "strength": 1.0000000000000002, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6c54e73b-ee6f-4ca4-b683-f6aa619894c1/width=450/1918331.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UYPPZG9E10R+_3M{?wo}%#-;iwxa.8xvnij]", + "type": "image", + "metadata": { + "hash": "UYPPZG9E10R+_3M{?wo}%#-;iwxa.8xvnij]", + "size": 60172, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sd_xl_base_1.0", + "comfy": { + "prompt": { + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "17": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "48": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "49": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "51": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "52": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "53": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "55": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "57": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "58": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "x8_upscaled_" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1121265674197334, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 1 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "nn_downscaled_" + }, + "class_type": "SaveImage" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 45, + 17, + 0, + 48, + 0, + "IMAGE" + ], + [ + 46, + 48, + 0, + 49, + 0, + "IMAGE" + ], + [ + 47, + 48, + 0, + 51, + 0, + "IMAGE" + ], + [ + 48, + 51, + 0, + 52, + 0, + "IMAGE" + ], + [ + 52, + 53, + 1, + 6, + 0, + "CLIP" + ], + [ + 53, + 53, + 1, + 7, + 0, + "CLIP" + ], + [ + 54, + 53, + 0, + 10, + 0, + "MODEL" + ], + [ + 56, + 10, + 0, + 17, + 0, + "LATENT" + ], + [ + 57, + 55, + 0, + 17, + 1, + "VAE" + ], + [ + 58, + 57, + 0, + 53, + 0, + "MODEL" + ], + [ + 59, + 57, + 1, + 53, + 1, + "CLIP" + ], + [ + 60, + 51, + 0, + 58, + 0, + "IMAGE" + ], + [ + 61, + 48, + 0, + 59, + 0, + "IMAGE" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 3587.205062483223, + 681.3654430100763 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + ] + }, + { + "id": 6, + "pos": [ + 613.8337091823432, + 76.76253406667381 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 9, + "inputs": [ + { + "link": 52, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + ] + }, + { + "id": 45, + "pos": [ + 1004, + 624 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 1, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 17, + "pos": [ + 1533.2421162516673, + 124.62247729681123 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 56, + "name": "samples", + "type": "LATENT" + }, + { + "link": 57, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 45 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 42, + "pos": [ + 2310.769893066405, + 912.6079321899413 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 7, + "pos": [ + 611.3369181870398, + 211.28450994264554 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 10, + "inputs": [ + { + "link": 53, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render" + ] + }, + { + "id": 57, + "pos": [ + -92, + -573 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 3, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 58 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 59 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 19, + "pos": [ + 1502, + 352 + ], + "mode": 0, + "size": { + "0": 701.2393188476562, + "1": 745.6878051757812 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 13, + "pos": [ + 3587, + 481 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 4, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat" + ] + }, + { + "id": 49, + "pos": [ + 2293, + 404 + ], + "mode": 0, + "size": { + "0": 301.1867370605469, + "1": 246 + }, + "type": "PreviewImage", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 46, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 48, + "pos": [ + 1898, + 192 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 46, + 47, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 0.125 + ] + }, + { + "id": 59, + "pos": [ + 1918, + -160 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 61, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "nn_downscaled_" + ] + }, + { + "id": 58, + "pos": [ + 2264, + -212 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 60, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "x8_upscaled_" + ] + }, + { + "id": 51, + "pos": [ + 2262, + 185 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 47, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 48, + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 8 + ] + }, + { + "id": 10, + "pos": [ + 1044, + -82 + ], + "mode": 0, + "size": { + "0": 343.2493896484375, + "1": 595.7294921875 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 11, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 54, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 56 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 1121265674197334, + "randomize", + 20, + 8, + "euler_ancestral", + "normal", + 0, + 20, + "enable" + ] + }, + { + "id": 47, + "pos": [ + 1002, + 765 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 5, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 55, + "pos": [ + 343, + -470 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "flags": {}, + "order": 6, + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 57 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae.safetensors" + ] + }, + { + "id": 53, + "pos": [ + 352, + -344 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 8, + "inputs": [ + { + "link": 58, + "name": "model", + "type": "MODEL" + }, + { + "link": 59, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 54 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 52, + 53 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "pixel-art-xl-v1.1.safetensors", + 1.0000000000000002, + 1 + ] + }, + { + "id": 52, + "pos": [ + 2654, + 451 + ], + "mode": 0, + "size": { + "0": 808.3607177734375, + "1": 777.3040771484375 + }, + "type": "PreviewImage", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 5, + "pos": [ + 2289.322656127929, + 762.2985609054565 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 7, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 1 + ] + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 593, + -4, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 3567, + 398, + 364, + 487 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -108, + -812, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 2270, + 688, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 1454, + 46, + 336, + 157 + ] + } + ], + "version": 0.4, + "last_link_id": 61, + "last_node_id": 60 + } + }, + "steps": 20, + "width": 1024, + "height": 1024, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "portrait of a woman with blonde hair, wearing a white summer dress, (pixel:0.8), flat", + "sampler": "Euler a", + "cfgScale": 8, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, blurry, deformed, depth of field, realistic, 3d render", + "additionalResources": [ + { + "name": "pixel-art-xl-v1.1.safetensors", + "type": "lora", + "strength": 1.0000000000000002, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/21af48d8-0538-4065-bba2-385e4b189d90/width=450/1918359.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7M70^%L0}%Mw@j[tmay0ej[~Cay?wayDhoe", + "type": "image", + "metadata": { + "hash": "U7M70^%L0}%Mw@j[tmay0ej[~Cay?wayDhoe", + "size": 33875, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "vaes": [ + "sdxl_vae.safetensors" + ], + "Model": "sd_xl_base_1.0", + "comfy": { + "prompt": { + "5": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "6": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "7": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "10": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + }, + "17": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "19": { + "inputs": { + "images": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "filename_prefix": "ComfyUI" + }, + "class_type": "SaveImage" + }, + "48": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "49": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "51": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "52": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + } + }, + "class_type": "PreviewImage" + }, + "53": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "55": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "57": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "58": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "scale_by": 8, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "x8_upscaled_" + }, + "class_type": "SaveImage" + }, + "59": { + "inputs": { + "images": { + "inputs": { + "image": { + "inputs": { + "vae": { + "inputs": { + "vae_name": "sdxl_vae.safetensors" + }, + "class_type": "VAELoader" + }, + "samples": { + "inputs": { + "cfg": 8, + "model": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "steps": 20, + "negative": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + }, + "class_type": "CLIPTextEncode" + }, + "positive": { + "inputs": { + "clip": { + "inputs": { + "clip": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "model": { + "inputs": { + "ckpt_name": "sd_xl_base_1.0.safetensors" + }, + "class_type": "CheckpointLoaderSimple" + }, + "lora_name": "pixel-art-xl-v1.1.safetensors", + "strength_clip": 1, + "strength_model": 1.0000000000000002 + }, + "class_type": "LoraLoader" + }, + "text": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + }, + "class_type": "CLIPTextEncode" + }, + "add_noise": "enable", + "scheduler": "normal", + "noise_seed": 1024798686504084, + "end_at_step": 20, + "latent_image": { + "inputs": { + "width": 1024, + "height": 1024, + "batch_size": 4 + }, + "class_type": "EmptyLatentImage" + }, + "sampler_name": "euler_ancestral", + "start_at_step": 0, + "return_with_leftover_noise": "enable" + }, + "class_type": "KSamplerAdvanced" + } + }, + "class_type": "VAEDecode" + }, + "scale_by": 0.125, + "upscale_method": "nearest-exact" + }, + "class_type": "ImageScaleBy" + }, + "filename_prefix": "nn_downscaled_" + }, + "class_type": "SaveImage" + } + }, + "workflow": { + "extra": {}, + "links": [ + [ + 11, + 6, + 0, + 10, + 1, + "CONDITIONING" + ], + [ + 12, + 7, + 0, + 10, + 2, + "CONDITIONING" + ], + [ + 16, + 13, + 0, + 6, + 1, + "STRING" + ], + [ + 18, + 14, + 0, + 7, + 1, + "STRING" + ], + [ + 27, + 5, + 0, + 10, + 3, + "LATENT" + ], + [ + 28, + 17, + 0, + 19, + 0, + "IMAGE" + ], + [ + 41, + 45, + 0, + 10, + 4, + "INT" + ], + [ + 43, + 47, + 0, + 10, + 5, + "INT" + ], + [ + 45, + 17, + 0, + 48, + 0, + "IMAGE" + ], + [ + 46, + 48, + 0, + 49, + 0, + "IMAGE" + ], + [ + 47, + 48, + 0, + 51, + 0, + "IMAGE" + ], + [ + 48, + 51, + 0, + 52, + 0, + "IMAGE" + ], + [ + 52, + 53, + 1, + 6, + 0, + "CLIP" + ], + [ + 53, + 53, + 1, + 7, + 0, + "CLIP" + ], + [ + 54, + 53, + 0, + 10, + 0, + "MODEL" + ], + [ + 56, + 10, + 0, + 17, + 0, + "LATENT" + ], + [ + 57, + 55, + 0, + 17, + 1, + "VAE" + ], + [ + 58, + 57, + 0, + 53, + 0, + "MODEL" + ], + [ + 59, + 57, + 1, + 53, + 1, + "CLIP" + ], + [ + 60, + 51, + 0, + 58, + 0, + "IMAGE" + ], + [ + 61, + 48, + 0, + 59, + 0, + "IMAGE" + ] + ], + "nodes": [ + { + "id": 14, + "pos": [ + 3587.205062483223, + 681.3654430100763 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#322", + "flags": {}, + "order": 0, + "title": "Negative Prompt (Text)", + "bgcolor": "#533", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 18 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + ] + }, + { + "id": 6, + "pos": [ + 613.8337091823432, + 76.76253406667381 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#232", + "flags": {}, + "order": 9, + "inputs": [ + { + "link": 52, + "name": "clip", + "type": "CLIP" + }, + { + "link": 16, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#353", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 11 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "a cute happy corgi, (flat shading:1.2), (minimalist:1.4)" + ] + }, + { + "id": 45, + "pos": [ + 1004, + 624 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 1, + "title": "steps", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 41 + ], + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + } + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 17, + "pos": [ + 1533.2421162516673, + 124.62247729681123 + ], + "mode": 0, + "size": { + "0": 200, + "1": 50 + }, + "type": "VAEDecode", + "color": "#332922", + "flags": {}, + "order": 12, + "inputs": [ + { + "link": 56, + "name": "samples", + "type": "LATENT" + }, + { + "link": 57, + "name": "vae", + "type": "VAE" + } + ], + "bgcolor": "#593930", + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 28, + 45 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAEDecode" + } + }, + { + "id": 42, + "pos": [ + 565, + 801 + ], + "mode": 0, + "size": { + "0": 260, + "1": 210 + }, + "type": "Note", + "color": "#323", + "flags": {}, + "order": 2, + "title": "Note - Empty Latent Image", + "bgcolor": "#535", + "properties": { + "text": "" + }, + "widgets_values": [ + "This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536" + ] + }, + { + "id": 7, + "pos": [ + 611.3369181870398, + 211.28450994264554 + ], + "mode": 0, + "size": { + "0": 210, + "1": 54 + }, + "type": "CLIPTextEncode", + "color": "#322", + "flags": {}, + "order": 10, + "inputs": [ + { + "link": 53, + "name": "clip", + "type": "CLIP" + }, + { + "link": 18, + "name": "text", + "type": "STRING", + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 1 + } + ], + "bgcolor": "#533", + "outputs": [ + { + "name": "CONDITIONING", + "type": "CONDITIONING", + "links": [ + 12 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "CLIPTextEncode" + }, + "widgets_values": [ + "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline" + ] + }, + { + "id": 57, + "pos": [ + -92, + -573 + ], + "mode": 0, + "size": { + "0": 315, + "1": 98 + }, + "type": "CheckpointLoaderSimple", + "flags": {}, + "order": 3, + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 58 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 59 + ], + "shape": 3, + "slot_index": 1 + }, + { + "name": "VAE", + "type": "VAE", + "links": null, + "shape": 3 + } + ], + "properties": { + "Node name for S&R": "CheckpointLoaderSimple" + }, + "widgets_values": [ + "sd_xl_base_1.0.safetensors" + ] + }, + { + "id": 19, + "pos": [ + 1502, + 352 + ], + "mode": 0, + "size": { + "0": 701.2393188476562, + "1": 745.6878051757812 + }, + "type": "SaveImage", + "color": "#222", + "flags": {}, + "order": 13, + "inputs": [ + { + "link": 28, + "name": "images", + "type": "IMAGE" + } + ], + "bgcolor": "#000", + "properties": {}, + "widgets_values": [ + "ComfyUI" + ] + }, + { + "id": 13, + "pos": [ + 3587, + 481 + ], + "mode": 0, + "size": { + "0": 300, + "1": 160 + }, + "type": "PrimitiveNode", + "color": "#232", + "flags": {}, + "order": 4, + "title": "Positive Prompt (Text)", + "bgcolor": "#353", + "outputs": [ + { + "name": "STRING", + "type": "STRING", + "links": [ + 16 + ], + "widget": { + "name": "text", + "config": [ + "STRING", + { + "multiline": true + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)" + ] + }, + { + "id": 49, + "pos": [ + 2293, + 404 + ], + "mode": 0, + "size": { + "0": 301.1867370605469, + "1": 246 + }, + "type": "PreviewImage", + "flags": {}, + "order": 15, + "inputs": [ + { + "link": 46, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + }, + { + "id": 48, + "pos": [ + 1898, + 192 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 14, + "inputs": [ + { + "link": 45, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 46, + 47, + 61 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 0.125 + ] + }, + { + "id": 59, + "pos": [ + 1918, + -160 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 17, + "inputs": [ + { + "link": 61, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "nn_downscaled_" + ] + }, + { + "id": 58, + "pos": [ + 2264, + -212 + ], + "mode": 0, + "size": { + "0": 315, + "1": 270 + }, + "type": "SaveImage", + "flags": {}, + "order": 19, + "inputs": [ + { + "link": 60, + "name": "images", + "type": "IMAGE" + } + ], + "properties": {}, + "widgets_values": [ + "x8_upscaled_" + ] + }, + { + "id": 51, + "pos": [ + 2262, + 185 + ], + "mode": 0, + "size": { + "0": 315, + "1": 82 + }, + "type": "ImageScaleBy", + "flags": {}, + "order": 16, + "inputs": [ + { + "link": 47, + "name": "image", + "type": "IMAGE" + } + ], + "outputs": [ + { + "name": "IMAGE", + "type": "IMAGE", + "links": [ + 48, + 60 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "ImageScaleBy" + }, + "widgets_values": [ + "nearest-exact", + 8 + ] + }, + { + "id": 10, + "pos": [ + 1044, + -82 + ], + "mode": 0, + "size": { + "0": 343.2493896484375, + "1": 595.7294921875 + }, + "type": "KSamplerAdvanced", + "color": "#223", + "flags": {}, + "order": 11, + "title": "KSampler (Advanced) - BASE", + "inputs": [ + { + "link": 54, + "name": "model", + "type": "MODEL" + }, + { + "link": 11, + "name": "positive", + "type": "CONDITIONING" + }, + { + "link": 12, + "name": "negative", + "type": "CONDITIONING" + }, + { + "link": 27, + "name": "latent_image", + "type": "LATENT" + }, + { + "link": 41, + "name": "steps", + "type": "INT", + "widget": { + "name": "steps", + "config": [ + "INT", + { + "max": 10000, + "min": 1, + "default": 20 + } + ] + }, + "slot_index": 4 + }, + { + "link": 43, + "name": "end_at_step", + "type": "INT", + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 5 + } + ], + "bgcolor": "#335", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 56 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "KSamplerAdvanced" + }, + "widgets_values": [ + "enable", + 1024798686504084, + "randomize", + 20, + 8, + "euler_ancestral", + "normal", + 0, + 20, + "enable" + ] + }, + { + "id": 47, + "pos": [ + 1002, + 765 + ], + "mode": 0, + "size": { + "0": 210, + "1": 82 + }, + "type": "PrimitiveNode", + "color": "#432", + "flags": {}, + "order": 5, + "title": "end_at_step", + "bgcolor": "#653", + "outputs": [ + { + "name": "INT", + "type": "INT", + "links": [ + 43 + ], + "widget": { + "name": "end_at_step", + "config": [ + "INT", + { + "max": 10000, + "min": 0, + "default": 10000 + } + ] + }, + "slot_index": 0 + } + ], + "properties": {}, + "widgets_values": [ + 20, + "fixed" + ] + }, + { + "id": 55, + "pos": [ + 343, + -470 + ], + "mode": 0, + "size": { + "0": 315, + "1": 58 + }, + "type": "VAELoader", + "flags": {}, + "order": 6, + "outputs": [ + { + "name": "VAE", + "type": "VAE", + "links": [ + 57 + ], + "shape": 3, + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "VAELoader" + }, + "widgets_values": [ + "sdxl_vae.safetensors" + ] + }, + { + "id": 53, + "pos": [ + 352, + -344 + ], + "mode": 0, + "size": { + "0": 315, + "1": 126 + }, + "type": "LoraLoader", + "flags": {}, + "order": 8, + "inputs": [ + { + "link": 58, + "name": "model", + "type": "MODEL" + }, + { + "link": 59, + "name": "clip", + "type": "CLIP" + } + ], + "outputs": [ + { + "name": "MODEL", + "type": "MODEL", + "links": [ + 54 + ], + "shape": 3, + "slot_index": 0 + }, + { + "name": "CLIP", + "type": "CLIP", + "links": [ + 52, + 53 + ], + "shape": 3, + "slot_index": 1 + } + ], + "properties": { + "Node name for S&R": "LoraLoader" + }, + "widgets_values": [ + "pixel-art-xl-v1.1.safetensors", + 1.0000000000000002, + 1 + ] + }, + { + "id": 5, + "pos": [ + 544.5041024540301, + 651.1200708259006 + ], + "mode": 0, + "size": { + "0": 300, + "1": 110 + }, + "type": "EmptyLatentImage", + "color": "#323", + "flags": {}, + "order": 7, + "bgcolor": "#535", + "outputs": [ + { + "name": "LATENT", + "type": "LATENT", + "links": [ + 27 + ], + "slot_index": 0 + } + ], + "properties": { + "Node name for S&R": "EmptyLatentImage" + }, + "widgets_values": [ + 1024, + 1024, + 4 + ] + }, + { + "id": 52, + "pos": [ + 2656, + 358 + ], + "mode": 0, + "size": { + "0": 808.3607177734375, + "1": 777.3040771484375 + }, + "type": "PreviewImage", + "flags": {}, + "order": 18, + "inputs": [ + { + "link": 48, + "name": "images", + "type": "IMAGE" + } + ], + "properties": { + "Node name for S&R": "PreviewImage" + } + } + ], + "config": {}, + "groups": [ + { + "color": "#3f789e", + "title": "Base Prompt", + "bounding": [ + 593, + -4, + 252, + 361 + ] + }, + { + "color": "#3f789e", + "title": "Text Prompts", + "bounding": [ + 3567, + 398, + 364, + 487 + ] + }, + { + "color": "#a1309b", + "title": "Load in BASE SDXL Model", + "bounding": [ + -108, + -812, + 369, + 399 + ] + }, + { + "color": "#a1309b", + "title": "Empty Latent Image", + "bounding": [ + 524, + 577, + 339, + 443 + ] + }, + { + "color": "#b06634", + "title": "VAE Decoder", + "bounding": [ + 1454, + 46, + 336, + 157 + ] + } + ], + "version": 0.4, + "last_link_id": 61, + "last_node_id": 60 + } + }, + "steps": 20, + "width": 1024, + "height": 1024, + "models": [ + "sd_xl_base_1.0.safetensors" + ], + "prompt": "a cute happy shiba inu, (flat shading:1.2), (minimalist:1.4)", + "sampler": "Euler a", + "cfgScale": 8, + "modelIds": [], + "scheduler": "normal", + "upscalers": [], + "versionIds": [], + "controlNets": [], + "negativePrompt": "text, watermark, blurry, deformed, depth of field, realistic, 3d render, outline", + "additionalResources": [ + { + "name": "pixel-art-xl-v1.1.safetensors", + "type": "lora", + "strength": 1.0000000000000002, + "strengthClip": 1 + } + ] + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fc960370-d609-44ec-9f2d-ab0d32217f58/width=450/1918636.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1024, + "hash": "UFHu14jKAG-C|6r^F?ja0]s9%1I:B-Nr[csA", + "type": "image", + "metadata": { + "hash": "UFHu14jKAG-C|6r^F?ja0]s9%1I:B-Nr[csA", + "size": 73848, + "width": 1536, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "ENSD": "31337", + "Size": "1536x1024", + "seed": 3080556257, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "a Delorean on synthwave city ", + "Version": "v1.5.1-273-g9e6af44c", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Token merging ratio": "0.3", + "\"pixelbuildings128-v2": "dbe5cdf5f8f4\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/135931" +} \ No newline at end of file diff --git a/pixel-art-xl-v1.1.preview.png b/pixel-art-xl-v1.1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..e6e18067914525117ab0266d5f6a42ba9f75a28a Binary files /dev/null and b/pixel-art-xl-v1.1.preview.png differ diff --git a/pixel-art-xl-v1.1.safetensors b/pixel-art-xl-v1.1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0d178cc30472f25d4482fb50310d05f4c4acc126 --- /dev/null +++ b/pixel-art-xl-v1.1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bbf3d8defbfb3fb71331545225c0cf50c74a748d2525f7c19ebb8f74445de274 +size 170543052 diff --git a/ral-dissolve-sdxl.civitai.info b/ral-dissolve-sdxl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..07bce223191e4d649efe9d0be3e4280c7b1bc515 --- /dev/null +++ b/ral-dissolve-sdxl.civitai.info @@ -0,0 +1,338 @@ +{ + "id": 277389, + "modelId": 245889, + "name": "SDXL", + "createdAt": "2023-12-28T07:10:45.531Z", + "updatedAt": "2024-01-26T13:34:42.033Z", + "status": "Published", + "publishedAt": "2023-12-28T07:13:24.918Z", + "trainedWords": [ + "ral-dissolve" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 10169, + "ratingCount": 356, + "rating": 4.97, + "thumbsUpCount": 672 + }, + "model": { + "name": "Dissolve Style [LoRA 1.5+SDXL]", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 217024, + "sizeKB": 223098.90234375, + "name": "ral-dissolve-sdxl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-12-28T07:15:48.662Z", + "hashes": { + "AutoV1": "FFC58BE6", + "AutoV2": "E68ACA7EE4", + "SHA256": "E68ACA7EE489D2DE11AE1AC8E78FB179CD4BEB3EDAE43818ED2382E474B95839", + "CRC32": "5CB69EEF", + "BLAKE3": "6AC2153B24A789589FBB9C3B9C15711F536F864DF427FC3390EA984485049DF2", + "AutoV3": "F2854B318C818F9E9828199DCBFE5404BD2190E265BC51605F884634E4C665A8" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/277389" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/139769af-1eca-4f2e-837d-c63cc6bbf8d8/width=450/4949284.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UAC$.e+b.mEf00$j01S$_2t8$fW;NyxZx]NG", + "type": "image", + "metadata": { + "hash": "UAC$.e+b.mEf00$j01S$_2t8$fW;NyxZx]NG", + "size": 1301333, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 250389907, + "Model": "realisticStockPhoto_v10", + "steps": 20, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "painting, martius_storm red ominous war [:style of vincent van gogh and leonardo da vinci:0.4] ral-dissolve", + "Version": "1.6.0", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"ral-dissolve": "f2854b318c81\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cc749633-b78b-4b58-988b-662094a4d3cd/width=450/4949280.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UDC?lh9HM^o~~oDjkCx]_2V@IUodxUIo%LR+", + "type": "image", + "metadata": { + "hash": "UDC?lh9HM^o~~oDjkCx]_2V@IUodxUIo%LR+", + "size": 1364467, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 2612694862, + "Model": "realisticStockPhoto_v10", + "steps": 20, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "film filter, Hyper-realistic, halfbody shot of a android, lost in forest, scifi , old prosthetics , covered in lush, wild flowers and nature, cracked face, illuminated, rusted old metal, realistic, elegance, sophisticated, moss, (photorealistic:1.5), iron face rust, detailed, realistic, hyper realistic, highly detailed, depth of field, 32k, aura of power, renaissance painting, slice of life, fine art painting, lush, classical realism, poster, Figurative, oil, unilalianism, Historical mystery ral-dissolve", + "Version": "1.6.0", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "Pad conds": "True", + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"ral-dissolve": "f2854b318c81\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a257afc5-9c74-447c-af1d-9d15a205dbb3/width=450/4949508.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "U13S3B-;Di9E.9ozIAMxofkCMxj]t7t7Rjfk", + "type": "image", + "metadata": { + "hash": "U13S3B-;Di9E.9ozIAMxofkCMxj]t7t7Rjfk", + "size": 876998, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 78850673, + "Model": "realisticStockPhoto_v10", + "steps": 20, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "Man in black robe standing in darkness, album cover, by Bal\u00e1zs Di\u00f3szegi, tumblr, horror gothic art, veil, moonless night, darkness, dark and muted colors, caretaker, faceless, dark background ral-dissolve", + "Version": "1.6.0", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"ral-dissolve": "f2854b318c81\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4753605f-7b0b-4433-9456-d49dfb7c7370/width=450/4949289.jpeg", + "nsfw": "Soft", + "width": 768, + "height": 1024, + "hash": "U8CPO.=^0h0M}qE1END%H=OFVt%Ktj-;Nd-p", + "type": "image", + "metadata": { + "hash": "U8CPO.=^0h0M}qE1END%H=OFVt%Ktj-;Nd-p", + "size": 1215595, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 331343722, + "Model": "realisticStockPhoto_v10", + "steps": 20, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "analog photo, dark shot, low key, action, (a intelligent afghan girl, 20 years old:1.1), strawberry blonde pixie bob hair, huge tits, fit, reflections on the wall background, abstraction atmosphere, (prismatic, holographic:1.2), sparkles, neon pixels, (neon light:1.1), chaotic, fashion magazine, (intricate details:0.9), (hdr, hyperdetailed:1.2) ral-dissolve", + "Version": "1.6.0", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"ral-dissolve": "f2854b318c81\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ca0206fd-1cbe-4cd6-9a08-2a2bf6577e08/width=450/4949287.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UiJ[Cms.D%%M~pWAj[WBxtM{azRj-;ofayj[", + "type": "image", + "metadata": { + "hash": "UiJ[Cms.D%%M~pWAj[WBxtM{azRj-;ofayj[", + "size": 1106686, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 1957147708, + "Model": "realisticStockPhoto_v10", + "steps": 20, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "a deer in landscape, traditional ink art, mountain, trees, river ral-dissolve", + "Version": "1.6.0", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"ral-dissolve": "f2854b318c81\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/03261742-b635-4c93-a15f-0d72df85bd0d/width=450/4949283.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UeHxmK%2_NkX-:t7t6xuE1NGMxxa%Mazt7t7", + "type": "image", + "metadata": { + "hash": "UeHxmK%2_NkX-:t7t6xuE1NGMxxa%Mazt7t7", + "size": 1050638, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 894787334, + "Model": "realisticStockPhoto_v10", + "steps": 20, + "hashes": { + "model": "2d44ce378d" + }, + "prompt": "low angle, from below, photo, realistic, portrait, man sneezing violently toward the camera, running nose, shooting snot at the camera, (smear on lens:1.5) ral-dissolve", + "Version": "1.6.0", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "2d44ce378d", + "name": "realisticStockPhoto_v10", + "type": "model" + } + ], + "Model hash": "2d44ce378d", + "\"ral-dissolve": "f2854b318c81\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/277389" +} \ No newline at end of file diff --git a/ral-dissolve-sdxl.preview.png b/ral-dissolve-sdxl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..3789495d65c786e9afe1a2a16df20298793553ea Binary files /dev/null and b/ral-dissolve-sdxl.preview.png differ diff --git a/ral-dissolve-sdxl.safetensors b/ral-dissolve-sdxl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..9362195c943ba3a28a4de7bcbf3db63660682d35 --- /dev/null +++ b/ral-dissolve-sdxl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e68aca7ee489d2de11ae1ac8e78fb179cd4beb3edae43818ed2382e474b95839 +size 228453276 diff --git a/ral-vlntxplzn-sdxl.civitai.info b/ral-vlntxplzn-sdxl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..81d9a75e713e5c07800784db4557578f8ef27696 --- /dev/null +++ b/ral-vlntxplzn-sdxl.civitai.info @@ -0,0 +1,530 @@ +{ + "id": 331736, + "modelId": 295223, + "name": "SDXL", + "createdAt": "2024-02-06T16:53:16.466Z", + "updatedAt": "2024-02-16T12:51:57.463Z", + "status": "Published", + "publishedAt": "2024-02-07T09:00:00.000Z", + "trainedWords": [ + "ral-vlntxplzn" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1986, + "ratingCount": 108, + "rating": 5, + "thumbsUpCount": 189 + }, + "model": { + "name": "Violent Explosion Style [LoRA 1.5+SDXL]", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 262977, + "sizeKB": 223099.03515625, + "name": "ral-vlntxplzn-sdxl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-06T16:55:49.777Z", + "hashes": { + "AutoV1": "ACB8E302", + "AutoV2": "2D7D2726A8", + "SHA256": "2D7D2726A8C7FF139C8BCCFF775D70860FAA42E4F3021A426CEC1536F6E43730", + "CRC32": "D3B25771", + "BLAKE3": "5BE8E742A211619E4446772317D3D6DF8ACA8C29302D8605DCAA2918AE0B9D49", + "AutoV3": "5646707CB2D1C0CBA5F8AE74C64FD250F9A3CCA148129BC8747EB8BFA7BCA141" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/331736" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/25c6356a-1295-4dc6-b5c6-f31db757a780/width=450/6332239.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UACsdO_NO@rrtTIVIAbb56niwHI:9bNH-onO", + "type": "image", + "metadata": { + "hash": "UACsdO_NO@rrtTIVIAbb56niwHI:9bNH-onO", + "size": 1745251, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 3626180476, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "a dark wizzard casting a explosive spell at a castle made of ral-vlntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dbea236e-6997-4cba-9c21-ce0df2d87517/width=450/6332246.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UFDS,ft,tlVs~p%gW;s:pI-;RPo~.8smtRSi", + "type": "image", + "metadata": { + "hash": "UFDS,ft,tlVs~p%gW;s:pI-;RPo~.8smtRSi", + "size": 1885004, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 1958999907, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "ral-vlntxplzn, car accident, explosion, flying though a paradox, cinematic scene, scenery, detailed background, masterpiece, best quality, high quality, highres, absurdres, very detailed, high resolution, sharp, sharp image, 8k, multicolored, colorful, vivid, award winning, stunning, amazing ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fd0d44b9-6f96-40b5-806b-a4ffe0c6e4f2/width=450/6332237.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U8CGfKNGAc4T4.IAQ,-;00xuZ~?w?bt7RQ9F", + "type": "image", + "metadata": { + "hash": "U8CGfKNGAc4T4.IAQ,-;00xuZ~?w?bt7RQ9F", + "size": 1776212, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 3204229782, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "terminator, t1000, robot, (melting face:1.6), looking at viewer, made of ral-vlntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9f521664-7ac0-4fa1-b803-e6cd7100f195/width=450/6332247.jpeg", + "nsfw": "X", + "width": 896, + "height": 1152, + "hash": "ULHxWq%3yD%L~pxu-;%M9]xunlt74oxuxus:", + "type": "image", + "metadata": { + "hash": "ULHxWq%3yD%L~pxu-;%M9]xunlt74oxuxus:", + "size": 1412982, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 3944534713, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "a short obese man with big glasses, sitting in his toilet, toilet made of ral-vlntxplzn, naked, looking stupid with crazy face expresion ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "penis", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/1b6c36c4-95a5-4aad-bead-c4852c605dbd/width=450/6332235.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UEDmH$E1XTx].98_IU?bkXI9Di-;_ND%RjxZ", + "type": "image", + "metadata": { + "hash": "UEDmH$E1XTx].98_IU?bkXI9Di-;_ND%RjxZ", + "size": 1781212, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 3007458476, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "Epic CG masterpiece, hdr,dtm, full ha,8K, ultra detailed graphic tension, dynamic poses, stunning colors, 3D rendering, surrealism, cinematic lighting effects, realism, 00 renderer, super realistic, full - body photos, super vista, super wide Angle, HD\u00ef\u00bc\u008c In a futuristic abandoned city, a girl with a huge mechanical arm is engaged in a fierce struggle with the enemy, Her name is Alita, a fighting angel from the future world, Her eyes are firm and resolute, showing the desire for victory, high-definition picture, real effect, hyper-realistic portraits, xiaofei yue, uniformly staged images, flat yet expressive, made of ral-vlntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "Pad conds": "True", + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/073941d6-1a0c-42d5-aab3-1a936592d69e/width=450/6332236.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UAEe_W~q0f-:#6WC9uV@2wx]-UJA4Uae-:R*", + "type": "image", + "metadata": { + "hash": "UAEe_W~q0f-:#6WC9uV@2wx]-UJA4Uae-:R*", + "size": 1828301, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 1400097490, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "salat with tomatoes, splash water, flashing particles , depth of field, clean background, made of ral-vlntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7c56ab51-235e-4a53-8ffb-52d862f94706/width=450/6332238.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U7AwPJ9ayE?G_NNeD%%20K%MDhNG00Z#-pN_", + "type": "image", + "metadata": { + "hash": "U7AwPJ9ayE?G_NNeD%%20K%MDhNG00Z#-pN_", + "size": 1901266, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 1007211346, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "star wars, death star, in space, made of ral-vlntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw", + "ADetailer model": "face_yolov8n.pt", + "ADetailer version": "23.11.1", + "ADetailer mask blur": "4", + "ADetailer confidence": "0.3", + "ADetailer dilate erode": "4", + "ADetailer inpaint padding": "32", + "ADetailer denoising strength": "0.4", + "ADetailer inpaint only masked": "True" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a4efe334-8647-4179-a526-cc769f10df20/width=450/6332249.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U49tiiTbK+^j1G8wQl0z00x^?^^*8y%#tl$2", + "type": "image", + "metadata": { + "hash": "U49tiiTbK+^j1G8wQl0z00x^?^^*8y%#tl$2", + "size": 1596393, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 2152948794, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "smoke, glowing aura, excessive energy, Conveyance, Electrostatic Tinting, Neon Lights, Racing Seats, Steel, Luminous, shiny metal coating, made of ral-flntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8aea16ff-cb14-4da6-890d-4276828222d1/width=450/6332233.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U8CsjjD%IU~qV@ofM{Rj00of-;D%_3xuay-p", + "type": "image", + "metadata": { + "hash": "U8CsjjD%IU~qV@ofM{Rj00of-;D%_3xuay-p", + "size": 1501497, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 1253194512, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea", + "lora:ral-vlntxplzn-sdxl": "2d7d2726a8" + }, + "prompt": "masterpiece, best quality, HDR, (monochrome:1.05) portrait of a woman, face shot, short hair, BREAK, (vivid polychromatic kaleidoscopic background),, made of ral-vlntxplzn ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "Pad conds": "True", + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn-sdxl": "5646707cb2d1\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/304cf236-1821-4c15-ab37-aa904552bfe6/width=450/6332244.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UAEVHJ^%CR$$T}4nwIxC0g4Tm,W:pbEMD*Rk", + "type": "image", + "metadata": { + "hash": "UAEVHJ^%CR$$T}4nwIxC0g4Tm,W:pbEMD*Rk", + "size": 1521587, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "896x1152", + "seed": 3235167548, + "Model": "zavychromaxl_v31", + "steps": 20, + "hashes": { + "vae": "63aeecb90f", + "model": "c41f93afea" + }, + "prompt": "Raw photo of huge Atomic explosion, crepuscular rays, cumulus nimbus clouds, volcanic lightning, 8k uhd, dslr, high quality, film grain, Fujifilm XT3,fireball, (nuclear weapon:1.2),Mushroom cloud,Distant city,(Shock Wave smoke:1.2),Radiation,sea Surface reflection, made of ral-vlntxplzn realistic multicolor", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "hash": "c41f93afea", + "name": "zavychromaxl_v31", + "type": "model" + } + ], + "Model hash": "c41f93afea", + "\"ral-vlntxplzn": "5646707cb2d1\"", + "negativePrompt": "nsfw" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/331736" +} \ No newline at end of file diff --git a/ral-vlntxplzn-sdxl.preview.png b/ral-vlntxplzn-sdxl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..6b79457fe16bba2f5015e13623961162ac7bbe08 Binary files /dev/null and b/ral-vlntxplzn-sdxl.preview.png differ diff --git a/ral-vlntxplzn-sdxl.safetensors b/ral-vlntxplzn-sdxl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..31e6e85954260234147430e461f7a6184efca7bd --- /dev/null +++ b/ral-vlntxplzn-sdxl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d7d2726a8c7ff139c8bccff775d70860faa42e4f3021a426cec1536f6e43730 +size 228453412 diff --git a/sd_xl_dpo_lora_v1.civitai.info b/sd_xl_dpo_lora_v1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..7246d14e1977aafa73208939ad32bf547d384844 --- /dev/null +++ b/sd_xl_dpo_lora_v1.civitai.info @@ -0,0 +1,122 @@ +{ + "id": 273996, + "modelId": 242825, + "name": "SDXL - V1.0", + "createdAt": "2023-12-24T22:29:25.997Z", + "updatedAt": "2023-12-24T22:51:07.020Z", + "status": "Published", + "publishedAt": "2023-12-24T22:51:07.019Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 4345, + "ratingCount": 292, + "rating": 4.99, + "thumbsUpCount": 475 + }, + "model": { + "name": "DPO (Direct Preference Optimization) LoRA for XL and 1.5 - OpenRail++", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 214128, + "sizeKB": 768888.8671875, + "name": "sd_xl_dpo_lora_v1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-12-24T22:40:50.665Z", + "hashes": { + "AutoV1": "091E985B", + "AutoV2": "C100EC5708", + "SHA256": "C100EC5708865A649C68912CE0E541FC69CB1973FE6543310B9B81A42E15ADA3", + "CRC32": "08360901", + "BLAKE3": "36692FB885E7938399DA9D2465C7E769E6E7A345B7DC61D298D8DA4403565347", + "AutoV3": "077C31ED3CD5CF0DC004C08922337716ED83E2BF056E470AA11CC7D3F9E96074" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/273996" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/eaef6a90-c101-4183-a974-39f40c092f5c/width=450/4862211.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UCKTJH020f-T}6t,$2WY15I9?HS$^J%MKPjX", + "type": "image", + "metadata": { + "hash": "UCKTJH020f-T}6t,$2WY15I9?HS$^J%MKPjX", + "size": 1600460, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 231527354, + "steps": 25, + "prompt": "RAW photo, a close-up picture of a cat, orange eyes, reflection in it's eyes", + "sampler": "DPM2", + "cfgScale": 5 + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/def49d32-e454-4d9b-9021-12be0ca10c32/width=450/4862214.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UJI=0@024:%L-pt7RkM{S4Ri%NV@~qbdIps.", + "type": "image", + "metadata": { + "hash": "UJI=0@024:%L-pt7RkM{S4Ri%NV@~qbdIps.", + "size": 1386578, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 231527354, + "steps": 25, + "prompt": "RAW photo, a close-up picture of a dog, blue eyes, reflection in it's eyes", + "sampler": "DPM2", + "cfgScale": 5 + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c7a9c5cb-980f-4bb8-ab12-d56a1f20f529/width=450/4903274.jpeg", + "nsfw": "None", + "width": 1024, + "height": 3372, + "hash": "U6HUh5?bs:~VGaNHWqt8EMoJt7RP8wIVRjD%", + "type": "image", + "metadata": { + "hash": "U6HUh5?bs:~VGaNHWqt8EMoJt7RP8wIVRjD%", + "size": 4938280, + "width": 1024, + "height": 3372 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + } + ], + "downloadUrl": "https://civitai.com/api/download/models/273996" +} \ No newline at end of file diff --git a/sd_xl_dpo_lora_v1.preview.png b/sd_xl_dpo_lora_v1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..aa075592ec7531bde5212aeec1d923e4bd2969f7 Binary files /dev/null and b/sd_xl_dpo_lora_v1.preview.png differ diff --git a/sd_xl_dpo_lora_v1.safetensors b/sd_xl_dpo_lora_v1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bd7d42a6ea8872927bb8ba9e9f846387bfbf34c2 --- /dev/null +++ b/sd_xl_dpo_lora_v1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c100ec5708865a649c68912ce0e541fc69cb1973fe6543310b9b81a42e15ada3 +size 787342200 diff --git a/sdxl_photorealistic_slider_v1-0.civitai.info b/sdxl_photorealistic_slider_v1-0.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..129ef52719d8d1c8051c178a32975684512f8747 --- /dev/null +++ b/sdxl_photorealistic_slider_v1-0.civitai.info @@ -0,0 +1,259 @@ +{ + "id": 126807, + "modelId": 117060, + "name": "v1.0-SDXL", + "createdAt": "2023-07-27T02:10:42.229Z", + "updatedAt": "2023-07-27T03:05:34.021Z", + "status": "Published", + "publishedAt": "2023-07-27T02:14:37.294Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": "Standard", + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 9586, + "ratingCount": 614, + "rating": 5, + "thumbsUpCount": 827 + }, + "model": { + "name": "Photorealistic Slider SDXL - LoRA", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 91665, + "sizeKB": 23612.30859375, + "name": "sdxl_photorealistic_slider_v1-0.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-07-27T02:15:34.633Z", + "hashes": { + "AutoV1": "DD062636", + "AutoV2": "2764F1F25E", + "SHA256": "2764F1F25EE63DF2AD7685B73572FCE19593086B967A67C61E75FF6353746224", + "CRC32": "A57603DA", + "BLAKE3": "FC773E002484E44508E9202AB728618136F3ED380CE43EA188B2CFA1573D1871" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/126807" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/64f44ebd-ae26-4cd8-896c-bc9a2135e13e/width=450/1739210.jpeg", + "nsfw": "None", + "width": 960, + "height": 1440, + "hash": "UHHnZ@~Bt7W-g4NHR-bH_NNZjFxDD*oLWVX9", + "type": "image", + "metadata": { + "hash": "UHHnZ@~Bt7W-g4NHR-bH_NNZjFxDD*oLWVX9", + "width": 960, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 334979273, + "steps": 20, + "prompt": "a bear having a birthday party at a roller rink, about to blow out candles, balloons around", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "negativePrompt": "cartoon, cgi, render, illustration, painting, drawing, bad quality, grainy, low resolution" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/043a5b82-ce8d-419c-80eb-d8a62a35128d/width=450/1739267.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1728, + "hash": "UEFYJit6M{RP~oD$tRM_.79ZTKIo%z57Ne-o", + "type": "image", + "metadata": { + "hash": "UEFYJit6M{RP~oD$tRM_.79ZTKIo%z57Ne-o", + "width": 1152, + "height": 1728 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1152", + "seed": 2818936237, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "a bear sitting on a couch watching tv, drinking a slurpee ", + "Version": "v1.5.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "name": "sdxl_photorealistic_slider_v1", + "type": "lora", + "weight": 2 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Hires upscale": "1.5", + "Hires upscaler": "Latent", + "negativePrompt": "bad hands, cartoon, cgi, render, illustration, painting, drawing, bad quality, grainy, low resolution", + "Denoising strength": "0.7", + "\"sdxl_photorealistic_slider_v1": "a48607dc7327\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f1e24f57-c660-488b-a563-3d5f60e7d3a7/width=450/1739266.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1728, + "hash": "UPIz#Kt6nPNH_MV?S$NH?aM{OsNH%g9uWCxZ", + "type": "image", + "metadata": { + "hash": "UPIz#Kt6nPNH_MV?S$NH?aM{OsNH%g9uWCxZ", + "width": 1152, + "height": 1728 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1152", + "seed": 2818936237, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "a bear sitting on a couch watching tv, drinking a slurpee ", + "Version": "v1.5.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "name": "sdxl_photorealistic_slider_v1", + "type": "lora", + "weight": 0 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "Hires upscale": "1.5", + "Hires upscaler": "Latent", + "negativePrompt": "bad hands, cartoon, cgi, render, illustration, painting, drawing, bad quality, grainy, low resolution", + "Denoising strength": "0.7", + "\"sdxl_photorealistic_slider_v1": "a48607dc7327\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/91858e12-0972-4cda-849f-438f6d5115cd/width=450/1739235.jpeg", + "nsfw": "None", + "width": 960, + "height": 1440, + "hash": "UEK0.[pax^OZx[t5t5od_L-PM_xHz.xB#*rr", + "type": "image", + "metadata": { + "hash": "UEK0.[pax^OZx[t5t5od_L-PM_xHz.xB#*rr", + "width": 960, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "960x1440", + "seed": 334979273, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "a bear having a birthday party at a roller rink, about to blow out candles, balloons around", + "Version": "v1.5.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "name": "sdxl_photorealistic_slider_v1", + "type": "lora", + "weight": 0 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "cartoon, cgi, render, illustration, painting, drawing, bad quality, grainy, low resolution", + "\"sdxl_photorealistic_slider_v1": "a48607dc7327\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b539431a-498d-42f9-a143-8c4f0758e472/width=450/1739247.jpeg", + "nsfw": "None", + "width": 960, + "height": 1440, + "hash": "UBF}yPTr-UTe%2os$lK5~B?XIT#QU[wJ=r+[", + "type": "image", + "metadata": { + "hash": "UBF}yPTr-UTe%2os$lK5~B?XIT#QU[wJ=r+[", + "width": 960, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "960x1440", + "seed": 334979273, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "a bear having a birthday party at a roller rink, about to blow out candles, balloons around", + "Version": "v1.5.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "name": "sdxl_photorealistic_slider_v1", + "type": "lora", + "weight": 5 + }, + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "cartoon, cgi, render, illustration, painting, drawing, bad quality, grainy, low resolution", + "\"sdxl_photorealistic_slider_v1": "a48607dc7327\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/126807" +} \ No newline at end of file diff --git a/sdxl_photorealistic_slider_v1-0.preview.png b/sdxl_photorealistic_slider_v1-0.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..c5508098102a54465a5047025af935c6bf039417 --- /dev/null +++ b/sdxl_photorealistic_slider_v1-0.preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2b4f8028e1433b1e6796b27e2c0adb8d9fcb0c8dd5948a8b441f30fa27e1de88 +size 2540870 diff --git a/sdxl_photorealistic_slider_v1-0.safetensors b/sdxl_photorealistic_slider_v1-0.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5dd39f53245c3cd3990938c87da94dd4e4b41de6 --- /dev/null +++ b/sdxl_photorealistic_slider_v1-0.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2764f1f25ee63df2ad7685b73572fce19593086b967a67c61e75ff6353746224 +size 24179004 diff --git a/sketch_it.civitai.info b/sketch_it.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..6d83aba9e6dbd25617ed2906ae3954921bf76b21 --- /dev/null +++ b/sketch_it.civitai.info @@ -0,0 +1,331 @@ +{ + "id": 340557, + "modelId": 303330, + "name": "v1.0", + "createdAt": "2024-02-12T18:42:21.576Z", + "updatedAt": "2024-02-12T18:53:54.438Z", + "status": "Published", + "publishedAt": "2024-02-12T18:53:54.437Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 3169, + "ratingCount": 265, + "rating": 5, + "thumbsUpCount": 526 + }, + "model": { + "name": "Sketchit", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 270607, + "sizeKB": 223119.89453125, + "name": "sketch_it.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-02-12T18:55:45.055Z", + "hashes": { + "AutoV1": "937FAC6B", + "AutoV2": "A0C0F3435C", + "SHA256": "A0C0F3435CDA03FEBB744CBF940CE1E82D177A5DF6C041659EF30404C1A32B56", + "CRC32": "67B1A24A", + "BLAKE3": "92C1A3BF088C1F150F805BDE08818E5C5D52C12DBC076989C21959A2314ACAF3", + "AutoV3": "5210587CA21DBBFFD310E4964D282F3BA12072FA0D3ACFF058D6E4FAB2DC24E5" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/340557" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/40baafc8-f403-4d31-99a5-d73fff0bfada/width=450/6562006.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7L;d9-;_2%M~pWBt7xt?aoeE1WB?at7RkM{", + "type": "image", + "metadata": { + "hash": "U7L;d9-;_2%M~pWBt7xt?aoeE1WB?at7RkM{", + "size": 1329044, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 284918455, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "black and white drawing of a donald trump on a white paper ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "color", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a2f63b14-3f6c-48d9-85c0-de2e89315c5a/width=450/6562003.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U8LqRon$%2%2~pxtR*Rj?aRjIVofozM{xaxu", + "type": "image", + "metadata": { + "hash": "U8LqRon$%2%2~pxtR*Rj?aRjIVofozM{xaxu", + "size": 1278952, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3019521429, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "black and white drawing of jerusalem\n on a white paper ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "color", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dd5c4205-6b98-42cd-b1b9-67956dd75c81/width=450/6562007.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U6Lq5,D%aK_2~pNGR*xa%2xuj?NG?a%2ofNG", + "type": "image", + "metadata": { + "hash": "U6Lq5,D%aK_2~pNGR*xa%2xuj?NG?a%2ofNG", + "size": 1264473, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1806336477, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "drawing of a bulldog ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5a5b531d-f775-487c-9545-3fc26791c7eb/width=450/6562004.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "UANdBpt7?b%M?aofofR%~pja9GWB%MM{Rjxu", + "type": "image", + "metadata": { + "hash": "UANdBpt7?b%M?aofofR%~pja9GWB%MM{Rjxu", + "size": 1309257, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3883928413, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "black and white drawing of a slice of pizza on a white paper ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "color", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/3f7797f6-ab4e-476a-83e4-a84f687ba59f/width=450/6562005.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U9MaFZxu~Wxu?bofxuaf_2ah9Gj[_2f8IUof", + "type": "image", + "metadata": { + "hash": "U9MaFZxu~Wxu?bofxuaf_2ah9Gj[_2f8IUof", + "size": 1202156, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 3268047119, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "black and white drawing of a gameboy\n on a white paper ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "color", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/934c7e41-542a-4ea2-a0cd-b779537b53fa/width=450/6562010.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U9MtHlM{RP?a?b%Lt6of?aj[j[WB~pR*RkNG", + "type": "image", + "metadata": { + "hash": "U9MtHlM{RP?a?b%Lt6of?aj[j[WB~pR*RkNG", + "size": 1615141, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 2239648016, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "black and white drawing of a dystopian landscape ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "color", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/735605bb-941e-4a6f-a59b-e19e48fdf4ed/width=450/6562009.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1024, + "hash": "U7MjdUt6~U-p_2ayoLj?~Vof4:Rj^+j[IobH", + "type": "image", + "metadata": { + "hash": "U7MjdUt6~U-p_2ayoLj?~Vof4:Rj^+j[IobH", + "size": 1414771, + "width": 1024, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1362880529, + "Model": "sd_xl_base_1.0", + "steps": 20, + "hashes": { + "model": "31e35c80fc" + }, + "prompt": "black and white drawing of yoda ", + "Version": "v1.7.0", + "sampler": "DPM++ 2M Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "31e35c80fc", + "name": "sd_xl_base_1.0", + "type": "model" + } + ], + "Model hash": "31e35c80fc", + "negativePrompt": "color", + "\"my_sketching_style (1)": "5210587ca21d\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/340557" +} \ No newline at end of file diff --git a/sketch_it.preview.png b/sketch_it.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..5c221214252eabd44cbab7a58ccd91ceb6b0e3b6 Binary files /dev/null and b/sketch_it.preview.png differ diff --git a/sketch_it.safetensors b/sketch_it.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..df2dac6cebe0fc5664645c3d68b045095f18d6b5 --- /dev/null +++ b/sketch_it.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0c0f3435cda03febb744cbf940ce1e82d177a5df6c041659ef30404c1a32b56 +size 228474772 diff --git a/texta.civitai.info b/texta.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..c03b20d51bb695de3ac978f3461160171406d243 --- /dev/null +++ b/texta.civitai.info @@ -0,0 +1,423 @@ +{ + "id": 249521, + "modelId": 221240, + "name": "01", + "createdAt": "2023-12-04T09:05:57.207Z", + "updatedAt": "2023-12-04T10:12:27.390Z", + "status": "Published", + "publishedAt": "2023-12-04T09:40:52.111Z", + "trainedWords": [ + "logo", + "text", + "sign that says", + "the word" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 6075, + "ratingCount": 634, + "rating": 4.99, + "thumbsUpCount": 642 + }, + "model": { + "name": "Texta - Generate text with SDXL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 193263, + "sizeKB": 275893.2265625, + "name": "texta.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-12-04T09:11:00.695Z", + "hashes": { + "AutoV1": "1CB8C042", + "AutoV2": "B7026F2C57", + "SHA256": "B7026F2C5739F6F46A7B2EB85B40A26F6B7F6AC11504B0F04B4FDC9603ED4C95", + "CRC32": "2505254D", + "BLAKE3": "89EC5F40805E0A1AA8BD03EC1928587E1AA4C1A17528CD0E7776105E55394180", + "AutoV3": "11EDCE6EF2B4DA84126E0CEA9E228E5DE856A4D15CF4EE865D6EF1220B396264" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/249521" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/68bd3ccc-0310-4c62-8472-e7d67b95a2e7/width=450/4176635.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UAKTGf#QL}]|%N~P_$S,4TQ=PDOZ1Ry4%|Dl", + "type": "image", + "metadata": { + "hash": "UAKTGf#QL}]|%N~P_$S,4TQ=PDOZ1Ry4%|Dl", + "size": 1291602, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469281, + "Model": "sd_xl_base_1.0_0.9vae.safetensors", + "steps": 40, + "hashes": { + "model": "9B4D991F" + }, + "prompt": "texta logo, White tie_dye background", + "sampler": "dpmpp_2m_sde_gpu", + "cfgScale": 4, + "resources": [ + { + "hash": "9B4D991F", + "name": "sd_xl_base_1.0_0.9vae.safetensors", + "type": "model" + } + ], + "Model hash": "9B4D991F", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9e3f9e22-969d-4c7c-859f-d5ba5d140a2f/width=450/4177102.jpeg", + "nsfw": "None", + "width": 1152, + "height": 896, + "hash": "U5BL^h4:02_2~B}80z1j4;-oNF0zn4OYkq9v", + "type": "image", + "metadata": { + "hash": "U5BL^h4:02_2~B}80z1j4;-oNF0zn4OYkq9v", + "size": 1378489, + "width": 1152, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469295, + "Model": "PixelWaveTurbo.safetensors", + "steps": 6, + "hashes": { + "model": "F1C6917C" + }, + "prompt": "A fire symbol surrounding the word Planet.", + "sampler": "dpmpp_sde_gpu", + "cfgScale": 2, + "resources": [ + { + "hash": "F1C6917C", + "name": "PixelWaveTurbo.safetensors", + "type": "model" + } + ], + "Model hash": "F1C6917C", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/c7cda83a-d3f7-42a9-a3af-597ae52b0730/width=450/4176620.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "UCNmfyxuvx%Lt6t7V@ae}%oL9Hj[xZj[WBRj", + "type": "image", + "metadata": { + "hash": "UCNmfyxuvx%Lt6t7V@ae}%oL9Hj[xZj[WBRj", + "size": 1596422, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469281, + "Model": "sd_xl_base_1.0_0.9vae.safetensors", + "steps": 40, + "hashes": { + "model": "9B4D991F" + }, + "prompt": "texta logo, Vanilla toile background", + "sampler": "dpmpp_2m_sde_gpu", + "cfgScale": 4, + "resources": [ + { + "hash": "9B4D991F", + "name": "sd_xl_base_1.0_0.9vae.safetensors", + "type": "model" + } + ], + "Model hash": "9B4D991F", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8cdb8892-bc85-440a-86da-ca7bddbc69fd/width=450/4176952.jpeg", + "nsfw": "None", + "width": 1152, + "height": 896, + "hash": "UDMaFh4nOZ%2~WNGIpj@IqWBxCkC%#f6i^R*", + "type": "image", + "metadata": { + "hash": "UDMaFh4nOZ%2~WNGIpj@IqWBxCkC%#f6i^R*", + "size": 698259, + "width": 1152, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469295, + "Model": "PixelWaveTurbo.safetensors", + "steps": 6, + "hashes": { + "model": "F1C6917C" + }, + "prompt": "A lightbulb symbol is adjacent to the word WINNER.", + "sampler": "dpmpp_sde_gpu", + "cfgScale": 2, + "resources": [ + { + "hash": "F1C6917C", + "name": "PixelWaveTurbo.safetensors", + "type": "model" + } + ], + "Model hash": "F1C6917C", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f5b98baa-23ff-4d15-84c7-4b87e239626c/width=450/4177390.jpeg", + "nsfw": "None", + "width": 1152, + "height": 896, + "hash": "U29j}i0cq:PK_4E0x[M_DIMxyLxH_LXMWBxu", + "type": "image", + "metadata": { + "hash": "U29j}i0cq:PK_4E0x[M_DIMxyLxH_LXMWBxu", + "size": 1634654, + "width": 1152, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469295, + "Model": "PixelWaveTurbo.safetensors", + "steps": 6, + "hashes": { + "model": "F1C6917C" + }, + "prompt": "wreath design with the words TEAM GREEN.", + "sampler": "dpmpp_sde_gpu", + "cfgScale": 2, + "resources": [ + { + "hash": "F1C6917C", + "name": "PixelWaveTurbo.safetensors", + "type": "model" + } + ], + "Model hash": "F1C6917C", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7b750adc-541b-4e3f-93f0-0235260c892e/width=450/4176424.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U29?,e~V01D*~pxuM{RQ00Io~TtR4pR6S%x]", + "type": "image", + "metadata": { + "hash": "U29?,e~V01D*~pxuM{RQ00Io~TtR4pR6S%x]", + "size": 847935, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "seed": 371, + "comfy": "{\"prompt\":{\"21\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"55\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":371,\"steps\":40,\"cfg\":5,\"sampler_name\":\"dpmpp_2m_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"positive\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"negative\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"latent_image\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"VAEDecode\"},\"56\":{\"inputs\":{\"text\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"}},\"class_type\":\"ShowText|pysssss\"},\"57\":{\"inputs\":{\"text\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"}},\"class_type\":\"ShowText|pysssss\"},\"59\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"69\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"negative_prompt\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"filename_prefix\":\"\",\"parameters\":\"\",\"images\":{\"inputs\":{\"samples\":{\"inputs\":{\"seed\":371,\"steps\":40,\"cfg\":5,\"sampler_name\":\"dpmpp_2m_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"positive\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"negative\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"latent_image\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"}},\"class_type\":\"KSampler\"},\"vae\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"VAEDecode\"}},\"class_type\":\"Save Image With Prompt Data\"},\"84\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"85\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"89\":{\"inputs\":{\"seed\":371,\"steps\":40,\"cfg\":5,\"sampler_name\":\"dpmpp_2m_sde\",\"scheduler\":\"karras\",\"denoise\":1,\"model\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"positive\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"negative\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"},\"latent_image\":{\"inputs\":{\"positive_prompt\":{\"inputs\":{\"prompt\":\"texta logo, __patterns__ background\",\"seed\":420523095515620},\"class_type\":\"Wildcard Processor\"},\"negative_prompt\":{\"inputs\":{\"prompt\":\"\",\"seed\":849355631634268},\"class_type\":\"Wildcard Processor\"},\"ratio_selected\":\"3:4 [896x1152 portrait]\",\"custom_size\":\"false\",\"fit_custom_size\":\"true\",\"custom_width\":1024,\"custom_height\":1024,\"batch_size\":1,\"seed\":581045236064915,\"target_mode\":\"4x\",\"base_model\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"},\"clip_base\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip_refiner\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"output_positive_prompt\":\"texta logo, medallion background\",\"output_negative_prompt\":\"\",\"output_latent_width\":896,\"output_latent_height\":1152,\"output_target_width\":3584,\"output_target_height\":4608,\"output_refiner_width\":3584,\"output_refiner_height\":4608,\"output_crop_w\":0,\"output_crop_h\":0},\"class_type\":\"Prompt With Style V3\"}},\"class_type\":\"KSampler\"},\"99\":{\"inputs\":{\"text\":\"\",\"seed\":256,\"model\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"},\"clip\":{\"inputs\":{\"ckpt_name\":\"PixelWave05.safetensors\"},\"class_type\":\"Checkpoint Loader (Simple)\"}},\"class_type\":\"WildcardAndLoraSyntaxProcessor\"}},\"workflow\":{\"last_node_id\":104,\"last_link_id\":348,\"nodes\":[{\"id\":55,\"type\":\"VAEDecode\",\"pos\":[1240,-120],\"size\":{\"0\":210,\"1\":46},\"flags\":{},\"order\":10,\"mode\":0,\"inputs\":[{\"name\":\"samples\",\"type\":\"LATENT\",\"link\":271},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":272}],\"outputs\":[{\"name\":\"IMAGE\",\"type\":\"IMAGE\",\"links\":[227],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"VAEDecode\"},\"color\":\"#2e571a\",\"bgcolor\":\"#426b2e\"},{\"id\":86,\"type\":\"PrimitiveNode\",\"pos\":[-320,350],\"size\":{\"0\":210,\"1\":80},\"flags\":{},\"order\":0,\"mode\":0,\"outputs\":[{\"name\":\"INT\",\"type\":\"INT\",\"links\":[255],\"widget\":{\"name\":\"seed\"},\"slot_index\":0}],\"properties\":{},\"widgets_values\":[581045236064915,\"increment\"],\"color\":\"#1a2e57\",\"bgcolor\":\"#2e426b\"},{\"id\":57,\"type\":\"ShowText|pysssss\",\"pos\":[2550,430],\"size\":{\"0\":310,\"1\":80},\"flags\":{},\"order\":9,\"mode\":0,\"inputs\":[{\"name\":\"text\",\"type\":\"STRING\",\"link\":137}],\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":6}],\"properties\":{\"Node name for S&R\":\"ShowText|pysssss\"},\"widgets_values\":[[\"\"]],\"color\":\"#224f25\",\"bgcolor\":\"#366339\"},{\"id\":56,\"type\":\"ShowText|pysssss\",\"pos\":[2200,430],\"size\":{\"0\":300,\"1\":80},\"flags\":{},\"order\":8,\"mode\":0,\"inputs\":[{\"name\":\"text\",\"type\":\"STRING\",\"link\":136}],\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":null,\"shape\":6}],\"properties\":{\"Node name for S&R\":\"ShowText|pysssss\"},\"widgets_values\":[[\"texta logo, medallion background\"]],\"color\":\"#2b4f22\",\"bgcolor\":\"#3f6336\"},{\"id\":69,\"type\":\"Save Image With Prompt Data\",\"pos\":[1750,-190],\"size\":{\"0\":670,\"1\":520},\"flags\":{},\"order\":12,\"mode\":0,\"inputs\":[{\"name\":\"images\",\"type\":\"IMAGE\",\"link\":296},{\"name\":\"positive_prompt\",\"type\":\"STRING\",\"link\":178,\"widget\":{\"name\":\"positive_prompt\"}},{\"name\":\"negative_prompt\",\"type\":\"STRING\",\"link\":179,\"widget\":{\"name\":\"negative_prompt\"}}],\"properties\":{\"Node name for S&R\":\"Save Image With Prompt Data\"},\"widgets_values\":[\"Positive Prompt\",\"Negative Prompt\",\"\",\"\"],\"color\":\"#1a5757\",\"bgcolor\":\"#2e6b6b\"},{\"id\":85,\"type\":\"Wildcard Processor\",\"pos\":[-330,110],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":1,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[254],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Wildcard Processor\"},\"widgets_values\":[\"\",849355631634268,\"randomize\"],\"color\":\"#224f42\",\"bgcolor\":\"#366356\"},{\"id\":91,\"type\":\"CheckpointSave\",\"pos\":[940,-280],\"size\":{\"0\":315,\"1\":98},\"flags\":{},\"order\":6,\"mode\":4,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":278},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":279},{\"name\":\"vae\",\"type\":\"VAE\",\"link\":280}],\"properties\":{\"Node name for S&R\":\"CheckpointSave\"},\"widgets_values\":[\"checkpoints/textextract\"]},{\"id\":84,\"type\":\"Wildcard Processor\",\"pos\":[-330,-140],\"size\":{\"0\":400,\"1\":200},\"flags\":{},\"order\":2,\"mode\":0,\"outputs\":[{\"name\":\"STRING\",\"type\":\"STRING\",\"links\":[253],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Wildcard Processor\"},\"widgets_values\":[\"texta logo, __patterns__ background\",420523095515620,\"increment\"],\"color\":\"#394f22\",\"bgcolor\":\"#4d6336\"},{\"id\":60,\"type\":\"Image Caption\",\"pos\":[1460,-120],\"size\":{\"0\":220,\"1\":80},\"flags\":{},\"order\":11,\"mode\":4,\"inputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"link\":227},{\"name\":\"caption\",\"type\":\"STRING\",\"link\":306,\"widget\":{\"name\":\"caption\"}}],\"outputs\":[{\"name\":\"image\",\"type\":\"IMAGE\",\"links\":[296],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"Image Caption\"},\"widgets_values\":[\"ARIAL.TTF\",\"Caption\"],\"color\":\"#1a5757\",\"bgcolor\":\"#2e6b6b\"},{\"id\":99,\"type\":\"WildcardAndLoraSyntaxProcessor\",\"pos\":[160,140],\"size\":{\"0\":352.79998779296875,\"1\":300},\"flags\":{},\"order\":4,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":303},{\"name\":\"clip\",\"type\":\"CLIP\",\"link\":304}],\"outputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"links\":[348],\"shape\":3,\"slot_index\":0},{\"name\":\"clip\",\"type\":\"CLIP\",\"links\":null,\"shape\":3,\"slot_index\":1},{\"name\":\"text\",\"type\":\"STRING\",\"links\":null,\"shape\":3},{\"name\":\"unprocessed_text\",\"type\":\"STRING\",\"links\":[306],\"shape\":3,\"slot_index\":3}],\"properties\":{\"Node name for S&R\":\"WildcardAndLoraSyntaxProcessor\"},\"widgets_values\":[\"\",256,\"increment\"]},{\"id\":21,\"type\":\"Prompt With Style V3\",\"pos\":[580,-120],\"size\":{\"0\":320,\"1\":720},\"flags\":{\"pinned\":false},\"order\":5,\"mode\":0,\"inputs\":[{\"name\":\"base_model\",\"type\":\"MODEL\",\"link\":348},{\"name\":\"clip_base\",\"type\":\"CLIP\",\"link\":301},{\"name\":\"clip_refiner\",\"type\":\"CLIP\",\"link\":302},{\"name\":\"positive_prompt\",\"type\":\"STRING\",\"link\":253,\"widget\":{\"name\":\"positive_prompt\"},\"slot_index\":3},{\"name\":\"negative_prompt\",\"type\":\"STRING\",\"link\":254,\"widget\":{\"name\":\"negative_prompt\"}},{\"name\":\"seed\",\"type\":\"INT\",\"link\":255,\"widget\":{\"name\":\"seed\"},\"slot_index\":5}],\"outputs\":[{\"name\":\"base_model\",\"type\":\"MODEL\",\"links\":[267,278],\"shape\":3,\"slot_index\":0},{\"name\":\"samples\",\"type\":\"LATENT\",\"links\":[268],\"shape\":3,\"slot_index\":1},{\"name\":\"base_pos_cond\",\"type\":\"CONDITIONING\",\"links\":[269],\"shape\":3,\"slot_index\":2},{\"name\":\"base_neg_cond\",\"type\":\"CONDITIONING\",\"links\":[270],\"shape\":3,\"slot_index\":3},{\"name\":\"refiner_pos_cond\",\"type\":\"CONDITIONING\",\"links\":[],\"shape\":3,\"slot_index\":4},{\"name\":\"refiner_neg_cond\",\"type\":\"CONDITIONING\",\"links\":[],\"shape\":3,\"slot_index\":5},{\"name\":\"positive_prompt\",\"type\":\"STRING\",\"links\":[136,178],\"shape\":3,\"slot_index\":6},{\"name\":\"negative_prompt\",\"type\":\"STRING\",\"links\":[137,179],\"shape\":3,\"slot_index\":7}],\"properties\":{\"Node name for S&R\":\"Prompt With Style V3\"},\"widgets_values\":[\"(best_quality),(ultra_detailed), photo of beautiful age 18 girl, pastel hair, freckles sexy, beautiful, close up, young, dslr, 8k, 4k, ultrarealistic, realistic, natural skin, textured skin\",\"(worst quality:2), (low quality:2), (normal quality:2), border, frame, poorly drawn, childish, hands, hand, ((dof)), fingers, deformed, distorted, disfigured, limb, hands, anatomy, long neck, skin blemishes\",\"3:4 [896x1152 portrait]\",\"false\",\"true\",1024,1024,1,581045236064915,\"increment\",\"4x\"],\"color\":\"#1a4357\",\"bgcolor\":\"#2e576b\"},{\"id\":59,\"type\":\"Checkpoint Loader (Simple)\",\"pos\":[170,-40],\"size\":{\"0\":315,\"1\":118},\"flags\":{},\"order\":3,\"mode\":0,\"outputs\":[{\"name\":\"MODEL\",\"type\":\"MODEL\",\"links\":[303],\"shape\":3,\"slot_index\":0},{\"name\":\"CLIP\",\"type\":\"CLIP\",\"links\":[279,301,302,304],\"shape\":3,\"slot_index\":1},{\"name\":\"VAE\",\"type\":\"VAE\",\"links\":[272,280],\"shape\":3,\"slot_index\":2},{\"name\":\"NAME_STRING\",\"type\":\"STRING\",\"links\":[],\"shape\":3,\"slot_index\":3}],\"properties\":{\"Node name for S&R\":\"Checkpoint Loader (Simple)\"},\"widgets_values\":[\"PixelWave05.safetensors\"],\"color\":\"#571a1a\",\"bgcolor\":\"#6b2e2e\"},{\"id\":89,\"type\":\"KSampler\",\"pos\":[910,-120],\"size\":{\"0\":320,\"1\":470},\"flags\":{},\"order\":7,\"mode\":0,\"inputs\":[{\"name\":\"model\",\"type\":\"MODEL\",\"link\":267},{\"name\":\"positive\",\"type\":\"CONDITIONING\",\"link\":269},{\"name\":\"negative\",\"type\":\"CONDITIONING\",\"link\":270},{\"name\":\"latent_image\",\"type\":\"LATENT\",\"link\":268}],\"outputs\":[{\"name\":\"LATENT\",\"type\":\"LATENT\",\"links\":[271],\"shape\":3,\"slot_index\":0}],\"properties\":{\"Node name for S&R\":\"KSampler\"},\"widgets_values\":[371,\"increment\",40,5,\"dpmpp_2m_sde\",\"karras\",1],\"color\":\"#57571a\",\"bgcolor\":\"#6b6b2e\"}],\"links\":[[136,21,6,56,0,\"STRING\"],[137,21,7,57,0,\"STRING\"],[178,21,6,69,1,\"STRING\"],[179,21,7,69,2,\"STRING\"],[227,55,0,60,0,\"IMAGE\"],[253,84,0,21,3,\"STRING\"],[254,85,0,21,4,\"STRING\"],[255,86,0,21,5,\"INT\"],[267,21,0,89,0,\"MODEL\"],[268,21,1,89,3,\"LATENT\"],[269,21,2,89,1,\"CONDITIONING\"],[270,21,3,89,2,\"CONDITIONING\"],[271,89,0,55,0,\"LATENT\"],[272,59,2,55,1,\"VAE\"],[278,21,0,91,0,\"MODEL\"],[279,59,1,91,1,\"CLIP\"],[280,59,2,91,2,\"VAE\"],[296,60,0,69,0,\"IMAGE\"],[301,59,1,21,1,\"CLIP\"],[302,59,1,21,2,\"CLIP\"],[303,59,0,99,0,\"MODEL\"],[304,59,1,99,1,\"CLIP\"],[306,99,3,60,1,\"STRING\"],[348,99,0,21,0,\"MODEL\"]],\"groups\":[],\"config\":{},\"extra\":{},\"version\":0.4}}", + "steps": 40, + "prompt": "texta logo, medallion background", + "denoise": 1, + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 5, + "scheduler": "karras" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9ba56a7e-6049-4653-af93-f259f2de56eb/width=450/4176533.jpeg", + "nsfw": "None", + "width": 896, + "height": 1152, + "hash": "U26@vHM{03%L_2WBIoof05t6^hM|9boe%KR*", + "type": "image", + "metadata": { + "hash": "U26@vHM{03%L_2WBIoof05t6^hM|9boe%KR*", + "size": 1427268, + "width": 896, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469281, + "Model": "sd_xl_base_1.0_0.9vae.safetensors", + "steps": 40, + "hashes": { + "model": "9B4D991F" + }, + "prompt": "texta logo, medallion background", + "sampler": "dpmpp_2m_sde_gpu", + "cfgScale": 4, + "resources": [ + { + "hash": "9B4D991F", + "name": "sd_xl_base_1.0_0.9vae.safetensors", + "type": "model" + } + ], + "Model hash": "9B4D991F", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0bbffb4c-a5bb-4416-bc3a-403602dbdc87/width=450/4177068.jpeg", + "nsfw": "None", + "width": 1152, + "height": 896, + "hash": "U09~.i~Q04D;~7xW9yR-06IY~hB7=^a#0*M~", + "type": "image", + "metadata": { + "hash": "U09~.i~Q04D;~7xW9yR-06IY~hB7=^a#0*M~", + "size": 756299, + "width": 1152, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469295, + "Model": "PixelWaveTurbo.safetensors", + "steps": 6, + "hashes": { + "model": "F1C6917C" + }, + "prompt": "A fire symbol surrounding the word Oxygen.", + "sampler": "dpmpp_sde_gpu", + "cfgScale": 2, + "resources": [ + { + "hash": "F1C6917C", + "name": "PixelWaveTurbo.safetensors", + "type": "model" + } + ], + "Model hash": "F1C6917C", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/420d375f-7b23-457a-a668-caeb33c74918/width=450/4177023.jpeg", + "nsfw": "None", + "width": 1152, + "height": 896, + "hash": "U27-l:IT03M}_2RiRlay03Rk?at7xsWURVah", + "type": "image", + "metadata": { + "hash": "U27-l:IT03M}_2RiRlay03Rk?at7xsWURVah", + "size": 1345169, + "width": 1152, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469295, + "Model": "PixelWaveTurbo.safetensors", + "steps": 6, + "hashes": { + "model": "F1C6917C" + }, + "prompt": "A wreath symbol surrounding the word Leaf.", + "sampler": "dpmpp_sde_gpu", + "cfgScale": 2, + "resources": [ + { + "hash": "F1C6917C", + "name": "PixelWaveTurbo.safetensors", + "type": "model" + } + ], + "Model hash": "F1C6917C", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bf1274fd-6492-4ce7-809a-5bd27ef174d2/width=450/4177278.jpeg", + "nsfw": "None", + "width": 1152, + "height": 896, + "hash": "UDE|uCQmH?Md.Sn3vKvzK,ic-Us9{wm+xvsD", + "type": "image", + "metadata": { + "hash": "UDE|uCQmH?Md.Sn3vKvzK,ic-Us9{wm+xvsD", + "size": 1202873, + "width": 1152, + "height": 896 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "%Prompt With Style V3.output_latent_height%x%Prompt With Style V3.output_latent_width%", + "seed": 401812297469295, + "Model": "PixelWaveTurbo.safetensors", + "steps": 6, + "hashes": { + "model": "F1C6917C" + }, + "prompt": "floral design with the word Island.", + "sampler": "dpmpp_sde_gpu", + "cfgScale": 2, + "resources": [ + { + "hash": "F1C6917C", + "name": "PixelWaveTurbo.safetensors", + "type": "model" + } + ], + "Model hash": "F1C6917C", + "negativePrompt": "blurry, low resolution, poorly drawn" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/249521" +} \ No newline at end of file diff --git a/texta.preview.png b/texta.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..cc37b297294aea9735a2caf21de1042e6d224d2e Binary files /dev/null and b/texta.preview.png differ diff --git a/texta.safetensors b/texta.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..220eedf0da57e16a3a4b1eb36f80dc235691413c --- /dev/null +++ b/texta.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7026f2c5739f6f46a7b2eb85b40a26f6b7f6ac11504b0f04b4fdc9603ed4c95 +size 282514664 diff --git a/vapor_graphic_sdxl.civitai.info b/vapor_graphic_sdxl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..fc8781fbe5ee1a0710c85d1727b27ec7815d7c87 --- /dev/null +++ b/vapor_graphic_sdxl.civitai.info @@ -0,0 +1,504 @@ +{ + "id": 142491, + "modelId": 124587, + "name": "SDXL v1.0", + "createdAt": "2023-08-17T12:00:58.651Z", + "updatedAt": "2023-08-17T12:14:13.235Z", + "status": "Published", + "publishedAt": "2023-08-17T12:14:13.231Z", + "trainedWords": [ + "vapor_graphic" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1036, + "ratingCount": 144, + "rating": 5, + "thumbsUpCount": 229 + }, + "model": { + "name": "Vaporwave Graphic", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 104860, + "sizeKB": 166546.88671875, + "name": "vapor_graphic_sdxl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-17T12:10:38.108Z", + "hashes": { + "AutoV1": "1B3EE044", + "AutoV2": "FA07E4BCE6", + "SHA256": "FA07E4BCE6A7EC4CBEFFD196D8308150846590940AB9EFE597104837A0877A25", + "CRC32": "724C8B27", + "BLAKE3": "EFBA6D618A66E7F5097BBE10CA29750B9DE9A616C76F8815913494C46084F005" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/142491" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b9cedc19-b815-4680-8783-81c90cd9895d/width=450/2071978.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UXMP0C+e{.Rk%0#9K}jb}YwJRj$j+5KNohXR", + "type": "image", + "metadata": { + "hash": "UXMP0C+e{.Rk%0#9K}jb}YwJRj$j+5KNohXR", + "size": 1537427, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 884792838, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a woman with a crown on her head and a pink background and a message that reads : 'VAPOR-GRAPHIC', , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"", + "\"__prompts_vaporwave__ and a message that reads": "'VAPOR-GRAPHIC'" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/7679d69d-b737-4150-860a-26a5feac6d36/width=450/2071954.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UcD.FAizQ.TfHOt6ROW8WCOWn-v*k=bFS}nT", + "type": "image", + "metadata": { + "hash": "UcD.FAizQ.TfHOt6ROW8WCOWn-v*k=bFS}nT", + "size": 2107267, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 1347239949, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a computer and a statue with palm trees in the background and a message that reads : VAPOR-GRAPHIC', , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 10, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/75cef469-fee5-4f26-882e-752850e90904/width=450/2071953.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UFG[dW]@_y$T17I.9XI-4TXSpRobRpkCX{n+", + "type": "image", + "metadata": { + "hash": "UFG[dW]@_y$T17I.9XI-4TXSpRobRpkCX{n+", + "size": 1877391, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 1347239952, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a computer and a statue with palm trees in the background and a message that reads : VAPOR-GRAPHIC', , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 10, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b02b0614-80f2-4305-8ce1-be960ee8452c/width=450/2071956.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UFGTESwQZyWZ3]XjIAr?4DOni*NFuK#F+Ib;", + "type": "image", + "metadata": { + "hash": "UFGTESwQZyWZ3]XjIAr?4DOni*NFuK#F+Ib;", + "size": 2084034, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 1347239951, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a computer and a statue with palm trees in the background and a message that reads : VAPOR-GRAPHIC', , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 10, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/b5b8a069-cd4c-4f30-a34a-3f5b5f429f14/width=450/2071970.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "URJ@%@K%}v%g{ArEq^+c-XXTEzsm+$NZ-WKN", + "type": "image", + "metadata": { + "hash": "URJ@%@K%}v%g{ArEq^+c-XXTEzsm+$NZ-WKN", + "size": 1708099, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 884792835, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a statue of a woman with a flower in her hair and a message that reads : 'VAPOR-GRAPHIC', , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"", + "\"__prompts_vaporwave__ and a message that reads": "'VAPOR-GRAPHIC'" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e222eda5-d56c-4af7-a517-bdf9cd58101a/width=450/2071968.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UQJad9X-7h+c;YR7ERTbRgOYK$wh-TkDt9My", + "type": "image", + "metadata": { + "hash": "UQJad9X-7h+c;YR7ERTbRgOYK$wh-TkDt9My", + "size": 1624696, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 884792834, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a woman with flowers in her hand and a bird flying above her and a message that reads : 'VAPOR-GRAPHIC', , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"", + "\"__prompts_vaporwave__ and a message that reads": "'VAPOR-GRAPHIC'" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d67a0835-84bc-489b-9255-6d8640c45c9c/width=450/2072009.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "UOJRn6{,#0=gpY:8t{t2+$qwJhrE}IvOidrv", + "type": "image", + "metadata": { + "hash": "UOJRn6{,#0=gpY:8t{t2+$qwJhrE}IvOidrv", + "size": 1325284, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2589625878, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a statue of a woman with a triangle on her head , , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"__prompts_vaporwave__", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/ea482283-a9c0-4371-9a20-3035c2637577/width=450/2071987.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UEGlb*-Y.$|6:c7KF+$QqS-Cl9zWxANPBzOG", + "type": "image", + "metadata": { + "hash": "UEGlb*-Y.$|6:c7KF+$QqS-Cl9zWxANPBzOG", + "size": 1820060, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 4198223854, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a statue of a man with a beard , , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"__prompts_vaporwave__", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/90792f5b-5475-410b-80bf-7eb3b5e2635b/width=450/2071993.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1440, + "hash": "UKK,a_^Pvp+{0FO?B-GY4f*}W9#-5tBQ{e#D", + "type": "image", + "metadata": { + "hash": "UKK,a_^Pvp+{0FO?B-GY4f*}W9#-5tBQ{e#D", + "size": 1360417, + "width": 1024, + "height": 1440 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1440", + "seed": 4198223855, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a cat is jumping over a pink and blue background , , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"__prompts_vaporwave__", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a6cde9ea-1d80-456c-bd26-68fec04e0d6c/width=450/2072002.jpeg", + "nsfw": "None", + "width": 1024, + "height": 1280, + "hash": "ULJib+}szpw5-tRowKxZsuniOYNw0%JAJUWm", + "type": "image", + "metadata": { + "hash": "ULJib+}szpw5-tRowKxZsuniOYNw0%JAJUWm", + "size": 1278525, + "width": 1024, + "height": 1280 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1280", + "seed": 2589625875, + "Model": "SDXL_HK_V0.6", + "steps": 20, + "hashes": { + "model": "b3a902c45c" + }, + "prompt": "a retro tv with a sunset scene on it , , vaporwave, vapor_graphic", + "Version": "v1.5.1", + "sampler": "DPM++ 2M SDE Karras", + "cfgScale": 11, + "resources": [ + { + "name": "vapor_graphic_sdxl", + "type": "lora", + "weight": 0.6 + }, + { + "hash": "b3a902c45c", + "name": "SDXL_HK_V0.6", + "type": "model" + } + ], + "Model hash": "b3a902c45c", + "Wildcard prompt": "\"__prompts_vaporwave__", + "vapor_graphic_sdxl": "0.6> , vaporwave, vapor_graphic\"", + "\"vapor_graphic_sdxl": "ebdbc27a5804\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/142491" +} \ No newline at end of file diff --git a/vapor_graphic_sdxl.preview.png b/vapor_graphic_sdxl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..ae3193646781e5cdc54be8131eb57f2f7bdf92fd Binary files /dev/null and b/vapor_graphic_sdxl.preview.png differ diff --git a/vapor_graphic_sdxl.safetensors b/vapor_graphic_sdxl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..281864c19f215d062538fa9ca6e1e510deb99326 --- /dev/null +++ b/vapor_graphic_sdxl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa07e4bce6a7ec4cbeffd196d8308150846590940ab9efe597104837a0877a25 +size 170544012 diff --git a/xl_more_art-full_v1.civitai.info b/xl_more_art-full_v1.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..45037fa0fca459b0d623de262659dacab5073c81 --- /dev/null +++ b/xl_more_art-full_v1.civitai.info @@ -0,0 +1,484 @@ +{ + "id": 152309, + "modelId": 124347, + "name": "xl_more_art-full-v1", + "createdAt": "2023-08-31T06:01:33.046Z", + "updatedAt": "2023-09-24T22:24:47.795Z", + "status": "Published", + "publishedAt": "2023-08-31T06:54:44.399Z", + "trainedWords": [], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

The V1 is OUT !!

Incredible good !!!!

", + "stats": { + "downloadCount": 51391, + "ratingCount": 1157, + "rating": 4.95, + "thumbsUpCount": 1445 + }, + "model": { + "name": "xl_more_art-full / xl_real / Enhancer", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 113467, + "sizeKB": 702168.71875, + "name": "xl_more_art-full_v1.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-08-31T06:21:13.973Z", + "hashes": { + "AutoV1": "C3149766", + "AutoV2": "15E31FE2B6", + "SHA256": "15E31FE2B6AE2E77EE47A3CCDF27BD14F7B54CE27C6A58502875FDAD26F34460", + "CRC32": "1B3B9470", + "BLAKE3": "3CA25696AB1B233B8DD613FAB04E9ED9026E50AE53D638191FD19BD538627EDF" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/152309" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/321764d7-8265-4050-bbcd-d6c0a5da556c/width=450/2287991.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1152, + "hash": "U9BNZDx^4T8|~nD-4WMyDPRjNz%e4Wx:yD.6", + "type": "image", + "metadata": { + "hash": "U9BNZDx^4T8|~nD-4WMyDPRjNz%e4Wx:yD.6", + "size": 2401172, + "width": 1536, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 4106511344, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Cute creature from Space. terraforming. Alien Flora, Miki Asai Macro photography, close-up, hyper detailed, trending on artstation, sharp focus, studio photo, intricate details, highly detailed, by greg rutkowski\ndetailed face, detailed skin\n", + "Version": "v1.6.0-RC", + "sampler": "Euler", + "cfgScale": 7.5, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "30", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, text, logo", + "Denoising strength": "0.45", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/31bc873b-6fd5-4645-a72c-e61c3c91ae9f/width=450/2443509.jpeg", + "nsfw": "None", + "width": 4800, + "height": 3709, + "hash": "UDDvZk{c:j+uT_k=kWS#vLS%OYSOtT+G#7v}", + "type": "image", + "metadata": { + "hash": "UDDvZk{c:j+uT_k=kWS#vLS%OYSOtT+G#7v}", + "size": 18965580, + "width": 4800, + "height": 3709 + }, + "availability": "Public", + "sizeKB": null, + "meta": null + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0dda8569-33b7-47cb-a405-eadfcc806bc2/width=450/2287990.jpeg", + "nsfw": "None", + "width": 1376, + "height": 1376, + "hash": "UGE|h|-900BqM{TKXnicDOOF~9wb#5wHO@Fz", + "type": "image", + "metadata": { + "hash": "UGE|h|-900BqM{TKXnicDOOF~9wb#5wHO@Fz", + "size": 1969804, + "width": 1376, + "height": 1376 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x1024", + "seed": 1785409721, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 20, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "cartoon style illustration, minimalist style,\nanimals made of cloud\nCat and Goldfish:\n( scene on the background, hyper detailed, artistic,:1.2)\n", + "Version": "v1.6.0-RC", + "sampler": "Euler", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "60", + "Hires upscale": "1.35", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "low quality, worst quality, monochrome, lowres, painting, crayon, sketch, graphite, impressionist, noisy, blurry, long neck, long torso, bad anatomy, Cropped, Fake,", + "Denoising strength": "0.35", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/df8952e5-e439-4949-888f-f53f97245295/width=450/2287986.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "URIg}.%MyEV[~qRjRQV@={V@E1oJ^+xu%Nfk", + "type": "image", + "metadata": { + "hash": "URIg}.%MyEV[~qRjRQV@={V@E1oJ^+xu%Nfk", + "size": 2923946, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 3719211272, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 50, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "photo of Hyperrealistic art Wonderful, intricate, (colorful), (a superb Art Brut Decollage art, Lycanthropic Phoenix), ethereal neural network organism, divine cyborg dragon:2 girl:0.3, ginger:0.5 biomorph, glass skeleton, skinless:3, anatomical face, biomechanical details, (white and iridescent colors:1.1) bright colors, alchemist, alt_style, cinematic, 35mm film, 35mm photography, film, photo realism, DSLR, 8k uhd, hdr, ultra-detailed, high quality, high contrast,\n", + "Version": "v1.6.0-RC", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "70", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "croped, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, text, logo, jewel, armor, plastic,3D, weapon", + "Denoising strength": "0.5", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5ce8a631-8a5b-4543-996b-020fbf2c7f94/width=450/2287988.jpeg", + "nsfw": "Soft", + "width": 1152, + "height": 1536, + "hash": "U6AmrE}j001SyYt7n2Mx015B~A~84To~%$IT", + "type": "image", + "metadata": { + "hash": "U6AmrE}j001SyYt7n2Mx015B~A~84To~%$IT", + "size": 1809458, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 2498513514, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 50, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "a photograph of a luminecent creature in style of Luminogram portrait with fiber optic light painting, Light field photography, Light painting, Light tracing, Long exposure photography, Holography\nethereal neural network organism, biomorph,\nalchemist, alt_style, cinematic, 35mm film, 35mm photography, film, photo realism, DSLR, 8k uhd, hdr, ultra-detailed, high quality, high contrast, dress\n", + "Version": "v1.6.0-RC", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "70", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "croped, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, text, logo, jewel, armor, plastic,3D, weapon,", + "Denoising strength": "0.5", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/58a1b7d0-151f-426b-b9ab-80eaa94115ee/width=450/2287981.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1152, + "hash": "U6DcN~0000M{00~Wt8xaXl+Z~CR-0f9w-VV?", + "type": "image", + "metadata": { + "hash": "U6DcN~0000M{00~Wt8xaXl+Z~CR-0f9w-VV?", + "size": 3326887, + "width": 1536, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 3627197887, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "big stylized light grey shape of a fish made of rocks, landart\nAerial, rocks, beach, water, river, road, sea, island\n", + "Version": "v1.6.0-RC", + "sampler": "Euler", + "cfgScale": 7.5, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "60", + "Hires prompt": "\"Aerial, rocks, beach, water, river, road, sea", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, text, logo, jewel, armor, plastic,3D, hair, fur", + "Denoising strength": "0.3", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "xl_more_art-full-beta3_1_0.5": "1>\"", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/480ef8ae-b55c-4933-9521-807f822464f6/width=450/2288027.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1152, + "hash": "UKHL0UEL15Iq00~qbbRkGI9F%0tR56kY-VR%", + "type": "image", + "metadata": { + "hash": "UKHL0UEL15Iq00~qbbRkGI9F%0tR56kY-VR%", + "size": 3152777, + "width": 1536, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "1024x768", + "seed": 350967300, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 30, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Aerial, fields, forest, rocks, water, river, snow, sand, grass, road, lake,\nbig stylized shape of a bear, landart\n", + "Version": "v1.6.0-RC", + "sampler": "Euler", + "cfgScale": 7.5, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "120", + "Hires prompt": "\"Aerial, fields, forest, rocks, water, river, snow, sand, grass, road, lake", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, text, logo, jewel, armor, plastic,3D, hair, fur", + "Denoising strength": "0.3", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "xl_more_art-full-beta3_1_0.5": "1>\"", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d14dc7dd-ff0a-4f3c-bb29-1746378e2338/width=450/2287978.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "UbG^OdyDuPRkl9S6M}t6o~nMM|bbXnahahoI", + "type": "image", + "metadata": { + "hash": "UbG^OdyDuPRkl9S6M}t6o~nMM|bbXnahahoI", + "size": 2155794, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 3038067512, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 40, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "Kawaii Chameleon Painter changing its body colors to match its vibrant painting. Render this in an anime style, focusing on the chameleon's cute, wide eyes and intricate patterns on its body.\n", + "Version": "v1.6.0-RC", + "sampler": "DPM++ 2S a Karras", + "cfgScale": 8, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "35", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "blurry, blurred detail, poorly hands, poorly face, enhanced hands, missing fingers, mutated hands, fused fingers, deformed, malformed limbs, disfigured, watermarked, text, extremely grainy, very chromatic aberration, oversaturated", + "Denoising strength": "0.55", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/5ed4b819-3d5e-4ae3-b983-e4f11f295a78/width=450/2288132.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "U9B}Qv8~,8_H00bc4_Sc0N?SvoTJ?uE5v~D,", + "type": "image", + "metadata": { + "hash": "U9B}Qv8~,8_H00bc4_Sc0N?SvoTJ?uE5v~D,", + "size": 2988274, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 1396618076, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 70, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "anime artwork Kool aide man chuthulu yelling \"Oh yeah!\" and crashing through the abyss, eldritch abomination, the old god, green, hyper detailed, refreshing drink, blue drink, lemon slice . anime style, key visual, vibrant, studio anime, highly detailed\n", + "Version": "v1.6.0-RC", + "sampler": "DPM++ SDE Karras", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "45", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "blurry, blurred detail, poorly hands, poorly face, enhanced hands, missing fingers, mutated hands, fused fingers, deformed, malformed limbs, disfigured, watermarked, text, extremely grainy, very chromatic aberration, oversaturated, asian", + "Denoising strength": "0.5", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/a725027c-c05e-4628-abb2-72380158aaa6/width=450/2287980.jpeg", + "nsfw": "None", + "width": 1152, + "height": 1536, + "hash": "UDHT?0tQ03-S~Us:M|WC0gjZN_R+VsoJS#j]", + "type": "image", + "metadata": { + "hash": "UDHT?0tQ03-S~Us:M|WC0gjZN_R+VsoJS#j]", + "size": 2355656, + "width": 1152, + "height": 1536 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "768x1024", + "seed": 1135730645, + "Model": "sd_xl_base_1.0_0.9vae", + "steps": 50, + "hashes": { + "model": "e6bb9ea85b" + }, + "prompt": "paper cut light box, zombie with twisted limbs, masterpiece, 8k, high resolution, shallow depth of field, sharp focus\n", + "Version": "v1.6.0-RC", + "sampler": "Euler a", + "cfgScale": 7, + "resources": [ + { + "hash": "e6bb9ea85b", + "name": "sd_xl_base_1.0_0.9vae", + "type": "model" + } + ], + "Model hash": "e6bb9ea85b", + "Hires steps": "70", + "Hires upscale": "1.5", + "Hires upscaler": "4x-UltraSharp", + "negativePrompt": "croped, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, text, logo, jewel, armor, plastic,3D, weapon", + "Denoising strength": "0.5", + "Token merging ratio": "0.9", + "Token merging ratio hr": "0.9", + "\"xl_more_art-full-beta3_1_0.5": "fe3b4816be83\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/152309" +} \ No newline at end of file diff --git a/xl_more_art-full_v1.preview.png b/xl_more_art-full_v1.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..0fac3f999d3ac80684055beda5f2bdc23d7b1efd Binary files /dev/null and b/xl_more_art-full_v1.preview.png differ diff --git a/xl_more_art-full_v1.safetensors b/xl_more_art-full_v1.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4c7c25722f36f420e94119b727c68ba278c95791 --- /dev/null +++ b/xl_more_art-full_v1.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15e31fe2b6ae2e77ee47a3ccdf27bd14f7b54ce27c6a58502875fdad26f34460 +size 719020768 diff --git a/zavy-ctsmtrc-sdxl.civitai.info b/zavy-ctsmtrc-sdxl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..eb6fef4692ed04fc77c8e105c1f995496f749d4d --- /dev/null +++ b/zavy-ctsmtrc-sdxl.civitai.info @@ -0,0 +1,372 @@ +{ + "id": 381373, + "modelId": 340599, + "name": "v1.0", + "createdAt": "2024-03-08T17:52:14.220Z", + "updatedAt": "2024-03-08T17:54:46.032Z", + "status": "Published", + "publishedAt": "2024-03-08T17:54:46.031Z", + "trainedWords": [ + "zavy-ctsmtrc", + "isometric" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

First upload, see description.

", + "stats": { + "downloadCount": 796, + "ratingCount": 0, + "rating": 0, + "thumbsUpCount": 159 + }, + "model": { + "name": "Zavy's Cute Isometric Tiles - SDXL", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 305890, + "sizeKB": 223084.92578125, + "name": "zavy-ctsmtrc-sdxl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2024-03-08T17:56:04.319Z", + "hashes": { + "AutoV1": "55BC0E33", + "AutoV2": "38F5217998", + "SHA256": "38F52179982B9135AB4AEB0553D3C2F8702382B73CD45C2B12CB59554355F363", + "CRC32": "9F9A34E4", + "BLAKE3": "DEB41F7258FC469AAEE339C87658CAB36D38D985A67F4DB00A513A4CCBB05747", + "AutoV3": "AF81782FB640D64AC7D941C0E78EFC57A696816EC6DE99FD048F3AB7C080DE36" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/381373" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/06f32013-d524-4ce2-b7da-bddd89c29491/width=450/7637825.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "ULE;Dq.7lCRk%gWDkYb0pfM{RPf,RNs+Mws,", + "type": "image", + "metadata": { + "hash": "ULE;Dq.7lCRk%gWDkYb0pfM{RPf,RNs+Mws,", + "size": 2690401, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 735517551787264, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "waterfall, glowing, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4ad4d1a3-9555-4f65-bbb9-ea636cc9e260/width=450/7637808.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "U9Eo}h-:000L]zi_58o}00bI?Gt7.TozIUMw", + "type": "image", + "metadata": { + "hash": "U9Eo}h-:000L]zi_58o}00bI?Gt7.TozIUMw", + "size": 2375728, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 638289126074573, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "ghost rapunzel, glowing blue eyes, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e2f434d9-8dac-4872-9dbd-e29d4e66c64d/width=450/7637802.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "UACs1~~S030O94IXK0xZ4=Iq-U-mX%xs-5oI", + "type": "image", + "metadata": { + "hash": "UACs1~~S030O94IXK0xZ4=Iq-U-mX%xs-5oI", + "size": 2735654, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 294575332058161, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "rapunzel witch tower, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/cd40b217-2f0d-4618-8f94-677205e8ae9a/width=450/7637803.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "UEAn[x%#02IpuPW=Iqf+0LRj%0t6g$oy-nt7", + "type": "image", + "metadata": { + "hash": "UEAn[x%#02IpuPW=Iqf+0LRj%0t6g$oy-nt7", + "size": 2596130, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 822019609100513, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "a tiny boat in a violent ocean, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6d20ae48-45ea-45f4-a8df-4c626faa8d67/width=450/7637807.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "U7Ci,bD%00pI-5Rix]S48^WAKPV@~Vba4nt6", + "type": "image", + "metadata": { + "hash": "U7Ci,bD%00pI-5Rix]S48^WAKPV@~Vba4nt6", + "size": 2843008, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 624898088184751, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "a wormhole portal in space, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/82e60b0e-407e-44fd-a466-8a4e98c6e91a/width=450/7637824.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "UKJbdS.8xvof?vWBRjog.TROozWrI9t7RjjY", + "type": "image", + "metadata": { + "hash": "UKJbdS.8xvof?vWBRjog.TROozWrI9t7RjjY", + "size": 2638819, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 520428230050573, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "the great wave off kanagawa, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6bf1971f-af14-41f8-b2ed-85cdc6cff68b/width=450/7637815.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "U66viRyC00IBH=WFr9i_U[a{X=jIvcngXqbv", + "type": "image", + "metadata": { + "hash": "U66viRyC00IBH=WFr9i_U[a{X=jIvcngXqbv", + "size": 2298789, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 1023722821444831, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "glowing blue ghost, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/53907b12-43c9-4b8d-8570-0899b5231e49/width=450/7637810.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "U59*ZA.Q00H@vgIVDixF00M{%hx]QlxC*0X.", + "type": "image", + "metadata": { + "hash": "U59*ZA.Q00H@vgIVDixF00M{%hx]QlxC*0X.", + "size": 2520954, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 573762909040651, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "ghost metroid prime, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/07345b99-6fd7-4716-b310-c9c6514562f2/width=450/7637809.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "U37Lx}oe00RiD9a#%9oc03j?%gkD}{V?JIog", + "type": "image", + "metadata": { + "hash": "U37Lx}oe00RiD9a#%9oc03j?%gkD}{V?JIog", + "size": 2689033, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 138360638175553, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "amazing blue ghostly flower glowing in a deathly ashen swamp, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/30af09ea-8997-487a-9e41-ad609925606d/width=450/7637816.jpeg", + "nsfw": "None", + "width": 2152, + "height": 2768, + "hash": "U48Xa[~n050-H@IWSv%200E3+s-T+WxBnNn$", + "type": "image", + "metadata": { + "hash": "U48Xa[~n050-H@IWSv%200E3+s-T+WxBnNn$", + "size": 2860493, + "width": 2152, + "height": 2768 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "Size": "896x1152", + "seed": 753881219091706, + "steps": 40, + "hashes": { + "model": "780124a413" + }, + "prompt": "old wooden shack, spooky graveyard, graves, glowing blue lights, isometric, zavy-ctsmtrc, art,", + "sampler": "dpmpp_3m_sde_exponential", + "cfgScale": 6, + "resources": [], + "Model hash": "780124a413", + "negativePrompt": "photography, cropped, crop,", + "zavychromaxl_v50 Version": "ComfyUI" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/381373" +} \ No newline at end of file diff --git a/zavy-ctsmtrc-sdxl.preview.png b/zavy-ctsmtrc-sdxl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..5ce7c762394c14ffdff49f9c991207d9b8290228 --- /dev/null +++ b/zavy-ctsmtrc-sdxl.preview.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7305a6fb91b1f5104b89c20f29d92543b3d7e693146cdc5ce04f13e2dc52213 +size 2846030 diff --git a/zavy-ctsmtrc-sdxl.safetensors b/zavy-ctsmtrc-sdxl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c5aced1b1bdc398cb473c6ca48547986489ea237 --- /dev/null +++ b/zavy-ctsmtrc-sdxl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:38f52179982b9135ab4aeb0553d3c2f8702382b73cd45c2b12cb59554355f363 +size 228438964 diff --git a/zhibi-sdxl.civitai.info b/zhibi-sdxl.civitai.info new file mode 100644 index 0000000000000000000000000000000000000000..ed236935f067721e0141852b86786143a523f383 --- /dev/null +++ b/zhibi-sdxl.civitai.info @@ -0,0 +1,510 @@ +{ + "id": 209649, + "modelId": 186716, + "name": "SDXL", + "createdAt": "2023-11-02T17:32:06.592Z", + "updatedAt": "2023-11-27T16:33:04.879Z", + "status": "Published", + "publishedAt": "2023-11-02T17:34:16.420Z", + "trainedWords": [ + "zhibi" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": "

SDXL:
Trigger word: zhibi
Sampling Method: DPM++ 2M SDE Karras

", + "stats": { + "downloadCount": 3933, + "ratingCount": 143, + "rating": 4.98, + "thumbsUpCount": 229 + }, + "model": { + "name": "Smol Animals [LoRA 1.5+SDXL]", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 160957, + "sizeKB": 223098.50390625, + "name": "zhibi-sdxl.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-11-02T17:35:48.974Z", + "hashes": { + "AutoV1": "F7E8627C", + "AutoV2": "16D11D894A", + "SHA256": "16D11D894AEAE97F89FF122148D47E92EC8F9919D39D55D422E2BD7EDBB12122", + "CRC32": "B9BFCCB4", + "BLAKE3": "82C9733BF96FA094AE8760C785AD36286716943A3AC3C8B6384DC050593308EB" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/209649" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/86508320-b91c-47a1-a3e9-b5a7bad8b280/width=450/3339633.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UHD]C^~B9_IpS%NGIVM{EjXT$2%2D*jb-o%2", + "type": "image", + "metadata": { + "hash": "UHD]C^~B9_IpS%NGIVM{EjXT$2%2D*jb-o%2", + "size": 1140196, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 3926580180, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, animal spider, small spider, chibi spider, cute happy spider, sitting in a spiderweb ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/f1cec93a-670d-4c16-9c17-6a4fdf44a129/width=450/3339632.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UTKnbw~V?wIpY8NGNGbbpJ%1Mx%2-=R+xaae", + "type": "image", + "metadata": { + "hash": "UTKnbw~V?wIpY8NGNGbbpJ%1Mx%2-=R+xaae", + "size": 907416, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 537872991, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, seal, small seal, chibi seal, cute, on the beach ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/071cf188-3b92-488b-9ae9-3cb5f0b8e5f6/width=450/3339618.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UXH2WMW=IU%L?dIqV@fR%LIpt7Rk^%oLxts.", + "type": "image", + "metadata": { + "hash": "UXH2WMW=IU%L?dIqV@fR%LIpt7Rk^%oLxts.", + "size": 1053031, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 731624032, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, cow, small cow, chibi cow, cute, on a farm, eating grass, in the alps, sun going down on the horizont ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/8bd51d5f-3443-4880-ada4-b1e76f4fe7a6/width=450/3339619.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UWGlFlyDV?E2?dtmniWBNzxvV@xZtkt7xWxY", + "type": "image", + "metadata": { + "hash": "UWGlFlyDV?E2?dtmniWBNzxvV@xZtkt7xWxY", + "size": 1106416, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 731624026, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, cow, small cow, chibi cow, cute, on a farm, eating grass, in the alps, sun going down on the horizont ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6269a52e-f145-4af5-9d2a-b30f63ad0dcf/width=450/3339623.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UGDv=OS2EMx@.j-:IVRQ-;buslD*9@%2w|aw", + "type": "image", + "metadata": { + "hash": "UGDv=OS2EMx@.j-:IVRQ-;buslD*9@%2w|aw", + "size": 1052133, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 2705379300, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, parrot, small parrot, chibi parrot, cute, sitting on a finger, wild rain forrest background ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/860c17cb-69f7-4839-b6d7-5f78b300fca0/width=450/3339627.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UGE3Y9R6ABXR.kIVE1kW?aaerrep6xNG$+r^", + "type": "image", + "metadata": { + "hash": "UGE3Y9R6ABXR.kIVE1kW?aaerrep6xNG$+r^", + "size": 1028881, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 2705379299, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, parrot, small parrot, chibi parrot, cute, sitting on a finger, wild rain forrest background ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/61c0a7ed-b9cd-4a37-b475-23006f50ee09/width=450/3339626.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "ULIg=kI:yC~W%1x]nNEL%LE1xuE1n#%MM{Rj", + "type": "image", + "metadata": { + "hash": "ULIg=kI:yC~W%1x]nNEL%LE1xuE1n#%MM{Rj", + "size": 1001886, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 3259973915, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, cat, small cat, chibi cat, cute, with red ball of wool ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/dd0f31d6-1ff3-4b9c-933d-176d26959424/width=450/3339622.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UHKc%|56?a~Ciw-o9FXRMdRjWYD%-:-:Iot7", + "type": "image", + "metadata": { + "hash": "UHKc%|56?a~Ciw-o9FXRMdRjWYD%-:-:Iot7", + "size": 965649, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 3013067113, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, cat, small cat, chibi cat, cute, with red big ball of wool ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/9c755586-3319-4348-bba3-ce0e3963c64a/width=450/3339621.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UJHKqV4:xC~U0NxWIUR-Mxt7ogM{r:W?%2jX", + "type": "image", + "metadata": { + "hash": "UJHKqV4:xC~U0NxWIUR-Mxt7ogM{r:W?%2jX", + "size": 921546, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 2164613300, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, mouse, small mouse, chibi mouse, cute, munching on a way too big cheese ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d0dc5676-b555-4bb4-90b1-f497ca57fcd2/width=450/3339625.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UCI4eE~q~9IU009a4nxZ-T-o~Vx]-l4:Rk-T", + "type": "image", + "metadata": { + "hash": "UCI4eE~q~9IU009a4nxZ-T-o~Vx]-l4:Rk-T", + "size": 942252, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 2164613306, + "Model": "starlightXLAnimated_v3", + "steps": 20, + "\"zhibi": "d8aa9e4e7926\"", + "hashes": { + "model": "00a14cdeaa" + }, + "prompt": "zhibi, chibi, mouse, small mouse, chibi mouse, cute, munching on a way too big cheese ", + "Version": "1.6.0", + "sampler": "DPM++ 2M Karras", + "VAE hash": "63aeecb90f", + "cfgScale": 7, + "resources": [ + { + "name": "zhibi", + "type": "lora", + "weight": 1 + }, + { + "hash": "00a14cdeaa", + "name": "starlightXLAnimated_v3", + "type": "model" + } + ], + "Model hash": "00a14cdeaa" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/209649" +} \ No newline at end of file diff --git a/zhibi-sdxl.preview.png b/zhibi-sdxl.preview.png new file mode 100644 index 0000000000000000000000000000000000000000..bd09b859e50c516070bbcf393eac957d36ea0ba9 Binary files /dev/null and b/zhibi-sdxl.preview.png differ diff --git a/zhibi-sdxl.safetensors b/zhibi-sdxl.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4eeca088c60f1741964d7e99296e46ed8bf0b82d --- /dev/null +++ b/zhibi-sdxl.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16d11d894aeae97f89ff122148d47e92ec8f9919d39d55d422e2bd7edbb12122 +size 228452868 diff --git "a/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.civitai.info" "b/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.civitai.info" new file mode 100644 index 0000000000000000000000000000000000000000..547b08b5910c4f6a4925fd67c0427c29215b8473 --- /dev/null +++ "b/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.civitai.info" @@ -0,0 +1,233 @@ +{ + "id": 266666, + "modelId": 185469, + "name": "Anime-style landscape", + "createdAt": "2023-12-18T12:33:14.247Z", + "updatedAt": "2023-12-18T12:37:13.569Z", + "status": "Published", + "publishedAt": "2023-12-18T12:37:13.568Z", + "trainedWords": [ + "(\\ka tong feng jing ming xin pian\\)" + ], + "trainingStatus": null, + "trainingDetails": null, + "baseModel": "SDXL 1.0", + "baseModelType": null, + "earlyAccessTimeFrame": 0, + "description": null, + "stats": { + "downloadCount": 1417, + "ratingCount": 247, + "rating": 5, + "thumbsUpCount": 251 + }, + "model": { + "name": "XL\u63d2\u753b\u7cfb\u5217 XL Illustration Series", + "type": "LORA", + "nsfw": false, + "poi": false + }, + "files": [ + { + "id": 207793, + "sizeKB": 28237.30859375, + "name": "\u5361\u901a\u98ce\u666f\u660e\u4fe1\u7247.safetensors", + "type": "Model", + "metadata": { + "fp": null, + "size": null, + "format": "SafeTensor" + }, + "pickleScanResult": "Success", + "pickleScanMessage": "No Pickle imports", + "virusScanResult": "Success", + "virusScanMessage": null, + "scannedAt": "2023-12-18T12:40:47.786Z", + "hashes": { + "AutoV1": "600DAAD9", + "AutoV2": "FF19F7571B", + "SHA256": "FF19F7571B4E35149B3FAE423FDE7011FF055B37E52B6F000C6D74857768077E", + "CRC32": "178368FB", + "BLAKE3": "7F46E24C1DB69CE54BFFD9B42DDC00D1FA4507137FF6D1EB0D08622648CBF44C", + "AutoV3": "A2F9D0397AF7E6F9FF419A686CB14471BBD70ECED825FBD41D71793E5D4697DD" + }, + "primary": true, + "downloadUrl": "https://civitai.com/api/download/models/266666" + } + ], + "images": [ + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2143411c-a275-4464-af2a-1b72c9618b62/width=450/4684262.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1152, + "hash": "UBH.5G9g02~T4_%1?HIs00$_^bE3W,IqEOxV", + "type": "image", + "metadata": { + "hash": "UBH.5G9g02~T4_%1?HIs00$_^bE3W,IqEOxV", + "size": 2149287, + "width": 1536, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x768", + "seed": 235035072, + "Model": "animexlXuebimix_v60LCM", + "steps": 25, + "hashes": { + "model": "c75ac9af77" + }, + "prompt": ",(\\ka tong feng jing ming xin pian\\),cloud, no humans, scenery, moon, bird, outdoors, reflection, starry sky, water, horizon, cloudy sky, crescent moon, minimalism", + "Version": "v1.6.0", + "sampler": "Euler a", + "VAE hash": "235745af8d", + "cfgScale": 4, + "resources": [ + { + "hash": "c75ac9af77", + "name": "animexlXuebimix_v60LCM", + "type": "model" + } + ], + "Model hash": "c75ac9af77", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "ESRGAN_4x", + "negativePrompt": "nsfw,lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],mutated hands,poorly drawn hands,{{{missing toes}}},extra fingers,fused fingers,{{{extra toes}}},{{{fused toes}}},{{{mutated toes}}},{{{bad foot}}},{{{mutated foot}}},lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,username,scan,[abstract],lowres,{bad},fewer,extra,{{{{worst quality}}}},", + "Denoising strength": "0.5", + "\"\u00e5\u008d\u00a1\u00e9\u0080\u009a\u00e9\u00a3\u008e\u00e6\u0099\u00af\u00e6\u0098\u008e\u00e4\u00bf\u00a1\u00e7\u0089\u0087": "a2f9d0397af7\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/4ef9f075-e361-4ec7-b0d8-3e09cfe87bf5/width=450/4684263.jpeg", + "nsfw": "None", + "width": 1536, + "height": 1152, + "hash": "UBI;*qtR00IcEXIp~WxV009v^x?D4-xo9bD+", + "type": "image", + "metadata": { + "hash": "UBI;*qtR00IcEXIp~WxV009v^x?D4-xo9bD+", + "size": 2291926, + "width": 1536, + "height": 1152 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "1024x768", + "seed": 3534472493, + "Model": "animexlXuebimix_v60LCM", + "steps": 25, + "hashes": { + "model": "c75ac9af77" + }, + "prompt": ",(\\ka tong feng jing ming xin pian\\),cloud, no humans, scenery, outdoors, reflection, sunset, grass, cloudy sky, water, starry sky, horizon, minimalism", + "Version": "v1.6.0", + "sampler": "Euler a", + "VAE hash": "235745af8d", + "cfgScale": 4, + "resources": [ + { + "hash": "c75ac9af77", + "name": "animexlXuebimix_v60LCM", + "type": "model" + } + ], + "Model hash": "c75ac9af77", + "Hires steps": "5", + "Hires upscale": "1.5", + "Hires upscaler": "ESRGAN_4x", + "negativePrompt": "nsfw,lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],mutated hands,poorly drawn hands,{{{missing toes}}},extra fingers,fused fingers,{{{extra toes}}},{{{fused toes}}},{{{mutated toes}}},{{{bad foot}}},{{{mutated foot}}},lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,username,scan,[abstract],lowres,{bad},fewer,extra,{{{{worst quality}}}},", + "Denoising strength": "0.5", + "\"\u00e5\u008d\u00a1\u00e9\u0080\u009a\u00e9\u00a3\u008e\u00e6\u0099\u00af\u00e6\u0098\u008e\u00e4\u00bf\u00a1\u00e7\u0089\u0087": "a2f9d0397af7\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/e3756c42-e4bf-4638-ba60-9f0ebc6a34af/width=450/4684286.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UFLDN40q0M?YBabb=|NL05-m=pE48|NGT1s:", + "type": "image", + "metadata": { + "hash": "UFLDN40q0M?YBabb=|NL05-m=pE48|NGT1s:", + "size": 1072997, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 2024041609, + "Model": "animexlXuebimix_v60LCM", + "steps": 25, + "hashes": { + "model": "c75ac9af77" + }, + "prompt": ",(\\ka tong feng jing ming xin pian\\),cloud, no humans, scenery, outdoors, reflection, bird, sunset, grass, lake, cloudy sky, water, mountain, minimalism", + "Version": "v1.6.0", + "sampler": "Euler a", + "VAE hash": "235745af8d", + "cfgScale": 4, + "resources": [ + { + "hash": "c75ac9af77", + "name": "animexlXuebimix_v60LCM", + "type": "model" + } + ], + "Model hash": "c75ac9af77", + "negativePrompt": "nsfw,lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],mutated hands,poorly drawn hands,{{{missing toes}}},extra fingers,fused fingers,{{{extra toes}}},{{{fused toes}}},{{{mutated toes}}},{{{bad foot}}},{{{mutated foot}}},lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,username,scan,[abstract],lowres,{bad},fewer,extra,{{{{worst quality}}}},", + "\"\u00e5\u008d\u00a1\u00e9\u0080\u009a\u00e9\u00a3\u008e\u00e6\u0099\u00af\u00e6\u0098\u008e\u00e4\u00bf\u00a1\u00e7\u0089\u0087": "a2f9d0397af7\"" + } + }, + { + "url": "https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/85fd5ceb-0961-4f23-9d2e-caac5f49a8db/width=450/4684290.jpeg", + "nsfw": "None", + "width": 768, + "height": 1024, + "hash": "UIKm*MXB02%0pMt7%MR-0EX5?Eso0NWEEMSg", + "type": "image", + "metadata": { + "hash": "UIKm*MXB02%0pMt7%MR-0EX5?Eso0NWEEMSg", + "size": 1000158, + "width": 768, + "height": 1024 + }, + "availability": "Public", + "sizeKB": null, + "meta": { + "VAE": "sdxl_vae.safetensors", + "Size": "768x1024", + "seed": 2641521797, + "Model": "animexlXuebimix_v60LCM", + "steps": 25, + "hashes": { + "model": "c75ac9af77" + }, + "prompt": ",(\\ka tong feng jing ming xin pian\\),no humans, cloud, reflection, scenery, outdoors, bird, sunset, lake, water, cloudy sky, mountain, tree, nature, reflective water, minimalism", + "Version": "v1.6.0", + "sampler": "Euler a", + "VAE hash": "235745af8d", + "cfgScale": 4, + "resources": [ + { + "hash": "c75ac9af77", + "name": "animexlXuebimix_v60LCM", + "type": "model" + } + ], + "Model hash": "c75ac9af77", + "negativePrompt": "nsfw,lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],mutated hands,poorly drawn hands,{{{missing toes}}},extra fingers,fused fingers,{{{extra toes}}},{{{fused toes}}},{{{mutated toes}}},{{{bad foot}}},{{{mutated foot}}},lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,artistic error,username,scan,[abstract],lowres,{bad},error,fewer,extra,missing,worst quality,jpeg artifacts,bad quality,watermark,unfinished,displeasing,chromatic aberration,signature,extra digits,username,scan,[abstract],lowres,{bad},fewer,extra,{{{{worst quality}}}},", + "\"\u00e5\u008d\u00a1\u00e9\u0080\u009a\u00e9\u00a3\u008e\u00e6\u0099\u00af\u00e6\u0098\u008e\u00e4\u00bf\u00a1\u00e7\u0089\u0087": "a2f9d0397af7\"" + } + } + ], + "downloadUrl": "https://civitai.com/api/download/models/266666" +} \ No newline at end of file diff --git "a/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.preview.png" "b/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.preview.png" new file mode 100644 index 0000000000000000000000000000000000000000..2fe6938534e92c50269aafe250b44c01d5e2b3f9 Binary files /dev/null and "b/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.preview.png" differ diff --git "a/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.safetensors" "b/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.safetensors" new file mode 100644 index 0000000000000000000000000000000000000000..a794149dd8de30af58098c2764cfb9d87928e475 --- /dev/null +++ "b/\345\215\241\351\200\232\351\243\216\346\231\257\346\230\216\344\277\241\347\211\207.safetensors" @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff19f7571b4e35149b3fae423fde7011ff055b37e52b6f000c6d74857768077e +size 28915004