NoNothing1 commited on
Commit
12cc71b
·
verified ·
1 Parent(s): 91c9410

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -7,8 +7,7 @@ from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
7
  import torch
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
- model_repo_id = "John6666/unstable-illusion-sdxxxl-sdxl" # Replace to the model you would like to use
11
- #model_repo_id = "nonothing1/m4"
12
 
13
  if torch.cuda.is_available():
14
  torch_dtype = torch.float16
@@ -70,7 +69,7 @@ with gr.Blocks(css=css) as demo:
70
  max_lines=1,
71
  placeholder="Enter your prompt",
72
  container=False,
73
- value="a random naked really busty German female teenager at the nudist beach, very realistic impure skin, very realistic teen body, very huge saggy breasts, severe macromastia, detailed realistic face, extreme gigantomastia, candid, 4K"
74
  )
75
 
76
  run_button = gr.Button("Run", scale=0, variant="primary")
@@ -82,7 +81,7 @@ with gr.Blocks(css=css) as demo:
82
  label="Negative prompt",
83
  max_lines=7,
84
  placeholder="Enter a negative prompt",
85
- value="low details, bad quality, worst quality, missing details, posing, athletic, tattoos, piercing, flat chest, generic, pregnant, pornstar, implants, inflated, bimbo, old, adult, grown-up, model, sexy, stunning, tropical, child, preteen, sad"
86
  )
87
 
88
  seed = gr.Slider(
 
7
  import torch
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
+ model_repo_id = "nonothing1/m5"
 
11
 
12
  if torch.cuda.is_available():
13
  torch_dtype = torch.float16
 
69
  max_lines=1,
70
  placeholder="Enter your prompt",
71
  container=False,
72
+ value=""
73
  )
74
 
75
  run_button = gr.Button("Run", scale=0, variant="primary")
 
81
  label="Negative prompt",
82
  max_lines=7,
83
  placeholder="Enter a negative prompt",
84
+ value=""
85
  )
86
 
87
  seed = gr.Slider(