anzorq commited on
Commit
10f783d
β€’
1 Parent(s): cc79574

Update template/app_advanced.py

Browse files
Files changed (1) hide show
  1. template/app_advanced.py +4 -3
template/app_advanced.py CHANGED
@@ -37,7 +37,7 @@ def error_str(error, title="Error"):
37
  return f"""#### {title}
38
  {error}""" if error else ""
39
 
40
- def inference(prompt, guidance, steps, width=512, height=512, seed=0, img=None, strength=0.5, neg_prompt="", auto_prefix=True):
41
 
42
  generator = torch.Generator('cuda').manual_seed(seed) if seed != 0 else None
43
  prompt = f"{prefix} {prompt}" if auto_prefix else prompt
@@ -97,7 +97,8 @@ with gr.Blocks(css=css) as demo:
97
  <h1>$title</h1>
98
  </div>
99
  <p>
100
- $description
 
101
  </p>
102
  Running on {"<b>GPU πŸ”₯</b>" if torch.cuda.is_available() else f"<b>CPU πŸ₯Ά</b>. For faster inference it is recommended to <b>upgrade to GPU in <a href='https://huggingface.co/spaces/$space_id/settings'>Settings</a></b>"}<br><br>
103
  <a style="display:inline-block" href="https://huggingface.co/spaces/$space_id?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>
@@ -119,7 +120,7 @@ with gr.Blocks(css=css) as demo:
119
  with gr.Tab("Options"):
120
  with gr.Group():
121
  neg_prompt = gr.Textbox(label="Negative prompt", placeholder="What to exclude from the image")
122
- auto_prefix = gr.Checkbox(label="Prefix styling tokens automatically ($prefix)", value=True)
123
 
124
  with gr.Row():
125
  guidance = gr.Slider(label="Guidance scale", value=7.5, maximum=15)
 
37
  return f"""#### {title}
38
  {error}""" if error else ""
39
 
40
+ def inference(prompt, guidance, steps, width=512, height=512, seed=0, img=None, strength=0.5, neg_prompt="", auto_prefix=False):
41
 
42
  generator = torch.Generator('cuda').manual_seed(seed) if seed != 0 else None
43
  prompt = f"{prefix} {prompt}" if auto_prefix else prompt
 
97
  <h1>$title</h1>
98
  </div>
99
  <p>
100
+ $description<br>
101
+ {"Add the following tokens to your prompts for the model to work properly: <b>prefix</b>" if prefix else ""}
102
  </p>
103
  Running on {"<b>GPU πŸ”₯</b>" if torch.cuda.is_available() else f"<b>CPU πŸ₯Ά</b>. For faster inference it is recommended to <b>upgrade to GPU in <a href='https://huggingface.co/spaces/$space_id/settings'>Settings</a></b>"}<br><br>
104
  <a style="display:inline-block" href="https://huggingface.co/spaces/$space_id?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>
 
120
  with gr.Tab("Options"):
121
  with gr.Group():
122
  neg_prompt = gr.Textbox(label="Negative prompt", placeholder="What to exclude from the image")
123
+ auto_prefix = gr.Checkbox(label="Prefix styling tokens automatically ($prefix)", value=prefix, visible=prefix)
124
 
125
  with gr.Row():
126
  guidance = gr.Slider(label="Guidance scale", value=7.5, maximum=15)