Borcherding commited on
Commit
67ecd0e
·
verified ·
1 Parent(s): 7c20e4b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +192 -56
app.py CHANGED
@@ -10,11 +10,12 @@ from image_gen_aux import DepthPreprocessor
10
  MAX_SEED = np.iinfo(np.int32).max
11
  MAX_IMAGE_SIZE = 2048
12
 
13
- # Initialize models without moving to CUDA yet
14
  pipe = FluxControlPipeline.from_pretrained(
15
  "black-forest-labs/FLUX.1-Depth-dev",
16
  torch_dtype=torch.bfloat16
17
  )
 
18
  processor = DepthPreprocessor.from_pretrained("LiheYoung/depth-anything-large-hf")
19
 
20
  @spaces.GPU
@@ -75,92 +76,227 @@ def infer(control_image, prompt, seed=42, randomize_seed=False, width=1024, heig
75
  except Exception as e:
76
  return None, f"Error during inference: {str(e)}"
77
 
78
- css="""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  #col-container {
80
  margin: 0 auto;
81
- max-width: 520px;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
  }
83
  """
84
 
85
  with gr.Blocks(css=css) as demo:
86
-
87
  with gr.Column(elem_id="col-container"):
88
- gr.Markdown(f"""# FLUX.1 Depth [dev] with LoRA Support
89
  12B param rectified flow transformer structural conditioning tuned, guidance-distilled from [FLUX.1 [pro]](https://blackforestlabs.ai/)
90
  [[non-commercial license](https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/LICENSE.md)] [[blog](https://blackforestlabs.ai/announcing-black-forest-labs/)] [[model](https://huggingface.co/black-forest-labs/FLUX.1-dev)]
91
  """)
92
 
93
- # LoRA controls
94
  with gr.Row():
95
  lora_path = gr.Textbox(
96
  label="HuggingFace LoRA Path",
97
- placeholder="e.g., Borcherding/FLUX.1-dev-LoRA-AutumnSpringTrees"
 
98
  )
99
- load_lora_btn = gr.Button("Load LoRA")
100
- unload_lora_btn = gr.Button("Unload LoRA")
101
 
102
  lora_status = gr.Textbox(label="LoRA Status", interactive=False)
103
 
104
- control_image = gr.Image(label="Upload the image for control", type="pil")
105
  with gr.Row():
106
  prompt = gr.Text(
107
  label="Prompt",
108
- show_label=False,
109
  max_lines=1,
110
  placeholder="Enter your prompt",
111
- container=False,
112
  )
113
  run_button = gr.Button("Run", scale=0)
114
-
115
- result = gr.Image(label="Result", show_label=False)
116
- error_message = gr.Textbox(label="Error", visible=False)
 
 
 
 
 
 
 
 
 
 
117
 
118
  with gr.Accordion("Advanced Settings", open=False):
119
- seed = gr.Slider(
120
- label="Seed",
121
- minimum=0,
122
- maximum=MAX_SEED,
123
- step=1,
124
- value=0,
125
- )
126
-
127
- randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
128
-
129
  with gr.Row():
130
- width = gr.Slider(
131
- label="Width",
132
- minimum=256,
133
- maximum=MAX_IMAGE_SIZE,
134
- step=32,
135
- value=1024,
136
- )
137
-
138
- height = gr.Slider(
139
- label="Height",
140
- minimum=256,
141
- maximum=MAX_IMAGE_SIZE,
142
- step=32,
143
- value=1024,
144
- )
145
 
146
  with gr.Row():
147
- guidance_scale = gr.Slider(
148
- label="Guidance Scale",
149
- minimum=1,
150
- maximum=30,
151
- step=0.5,
152
- value=10,
153
- )
154
-
155
- num_inference_steps = gr.Slider(
156
- label="Number of inference steps",
157
- minimum=1,
158
- maximum=50,
159
- step=1,
160
- value=28,
161
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
 
163
- # Event handlers
164
  load_lora_btn.click(
165
  fn=load_lora,
166
  inputs=[lora_path],
 
10
  MAX_SEED = np.iinfo(np.int32).max
11
  MAX_IMAGE_SIZE = 2048
12
 
13
+ # Initialize models without moving to CUDA yet - following working version
14
  pipe = FluxControlPipeline.from_pretrained(
15
  "black-forest-labs/FLUX.1-Depth-dev",
16
  torch_dtype=torch.bfloat16
17
  )
18
+ pipe.enable_attention_slicing() # Keep this as it's helpful
19
  processor = DepthPreprocessor.from_pretrained("LiheYoung/depth-anything-large-hf")
20
 
21
  @spaces.GPU
 
76
  except Exception as e:
77
  return None, f"Error during inference: {str(e)}"
78
 
79
+ css = """
80
+ @keyframes gradientMove {
81
+ 0% { background-position: 0% 50%; }
82
+ 50% { background-position: 100% 50%; }
83
+ 100% { background-position: 0% 50%; }
84
+ }
85
+ body {
86
+ background: black !important;
87
+ margin: 0;
88
+ min-height: 100vh;
89
+ }
90
+ body::before {
91
+ content: '';
92
+ position: fixed;
93
+ top: 0;
94
+ left: 0;
95
+ right: 0;
96
+ bottom: 0;
97
+ z-index: -1;
98
+ background:
99
+ linear-gradient(125deg, rgba(255,105,180,0.3), rgba(0,0,0,0.5)),
100
+ url('data:image/svg+xml,<svg viewBox="0 0 200 200" xmlns="http://www.w3.org/2000/svg"><filter id="noise"><feTurbulence type="fractalNoise" baseFrequency="0.005" numOctaves="3" /><feColorMatrix type="saturate" values="0"/></filter><rect width="100%" height="100%" filter="url(%23noise)"/></svg>');
101
+ filter: blur(70px);
102
+ animation: gradientMove 15s ease infinite;
103
+ background-size: 400% 400%;
104
+ opacity: 0.8;
105
+ }
106
+ :root {
107
+ --hot-pink: #FF69B4;
108
+ --light-pink: #FFB6C6;
109
+ --dark-pink: #FF1493;
110
+ }
111
  #col-container {
112
  margin: 0 auto;
113
+ max-width: 1200px;
114
+ padding: 2rem;
115
+ background: rgba(0, 0, 0, 0.85);
116
+ border-radius: 15px;
117
+ box-shadow: 0 0 20px rgba(255, 105, 180, 0.3);
118
+ border: 2px solid var(--hot-pink);
119
+ position: relative;
120
+ z-index: 1;
121
+ }
122
+ .gr-box {
123
+ background: var(--hot-pink) !important;
124
+ border: 2px solid black !important;
125
+ border-radius: 8px !important;
126
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.2) !important;
127
+ transition: all 0.3s ease !important;
128
+ }
129
+ .gr-box:hover {
130
+ box-shadow: 0 0 15px rgba(255, 255, 255, 0.3) !important;
131
+ }
132
+ .gr-button {
133
+ background: var(--hot-pink) !important;
134
+ border: 2px solid black !important;
135
+ color: black !important;
136
+ font-weight: 600 !important;
137
+ transition: all 0.3s ease !important;
138
+ }
139
+ .gr-button:hover {
140
+ background: var(--dark-pink) !important;
141
+ box-shadow: 0 0 15px rgba(255, 255, 255, 0.5);
142
+ transform: translateY(-2px);
143
+ }
144
+ .gr-input, .gr-input-label {
145
+ background: var(--hot-pink) !important;
146
+ border: 2px solid black !important;
147
+ border-radius: 8px !important;
148
+ color: black !important;
149
+ transition: all 0.3s ease !important;
150
+ }
151
+ .gr-input::placeholder {
152
+ color: rgba(0, 0, 0, 0.6) !important;
153
+ }
154
+ .gr-input:focus {
155
+ box-shadow: 0 0 15px rgba(255, 255, 255, 0.3) !important;
156
+ }
157
+ .gr-form {
158
+ gap: 1.5rem !important;
159
+ }
160
+ .gr-slider {
161
+ accent-color: var(--hot-pink) !important;
162
+ }
163
+ .gr-slider-value {
164
+ color: white !important;
165
+ }
166
+ .gr-checkbox {
167
+ accent-color: var(--hot-pink) !important;
168
+ }
169
+ .gr-panel {
170
+ background: var(--hot-pink) !important;
171
+ border: 2px solid black !important;
172
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.2) !important;
173
+ }
174
+ .gr-accordion {
175
+ border: 2px solid black !important;
176
+ background: var(--hot-pink) !important;
177
+ border-radius: 10px !important;
178
+ margin-top: 1.5rem !important;
179
+ }
180
+ label, .gr-box label, .gr-accordion-title {
181
+ color: black !important;
182
+ font-weight: 600 !important;
183
+ }
184
+ .markdown {
185
+ color: white !important;
186
+ }
187
+ .markdown a {
188
+ color: var(--hot-pink) !important;
189
+ text-decoration: none !important;
190
+ transition: color 0.3s ease !important;
191
+ }
192
+ .markdown a:hover {
193
+ color: var(--light-pink) !important;
194
+ }
195
+ .upload-box {
196
+ border: 2px dashed var(--hot-pink) !important;
197
+ background: rgba(0, 0, 0, 0.3) !important;
198
+ transition: all 0.3s ease !important;
199
+ }
200
+ .upload-box:hover {
201
+ border-color: var(--light-pink) !important;
202
+ box-shadow: 0 0 15px rgba(255, 105, 180, 0.2) !important;
203
+ }
204
+ .generating {
205
+ box-shadow: 0 0 20px rgba(255, 255, 255, 0.8) !important;
206
+ }
207
+ .progress-bar {
208
+ background: var(--hot-pink) !important;
209
  }
210
  """
211
 
212
  with gr.Blocks(css=css) as demo:
 
213
  with gr.Column(elem_id="col-container"):
214
+ gr.Markdown("""# FLUX.1 Depth [dev] with LoRA Support
215
  12B param rectified flow transformer structural conditioning tuned, guidance-distilled from [FLUX.1 [pro]](https://blackforestlabs.ai/)
216
  [[non-commercial license](https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/LICENSE.md)] [[blog](https://blackforestlabs.ai/announcing-black-forest-labs/)] [[model](https://huggingface.co/black-forest-labs/FLUX.1-dev)]
217
  """)
218
 
 
219
  with gr.Row():
220
  lora_path = gr.Textbox(
221
  label="HuggingFace LoRA Path",
222
+ placeholder="e.g., Borcherding/FLUX.1-dev-LoRA-AutumnSpringTrees",
223
+ scale=3
224
  )
225
+ load_lora_btn = gr.Button("Load LoRA", scale=1)
226
+ unload_lora_btn = gr.Button("Unload LoRA", scale=1)
227
 
228
  lora_status = gr.Textbox(label="LoRA Status", interactive=False)
229
 
 
230
  with gr.Row():
231
  prompt = gr.Text(
232
  label="Prompt",
233
+ show_label=True,
234
  max_lines=1,
235
  placeholder="Enter your prompt",
236
+ container=True,
237
  )
238
  run_button = gr.Button("Run", scale=0)
239
+
240
+ with gr.Row(equal_height=True):
241
+ with gr.Column(scale=1):
242
+ control_image = gr.Image(
243
+ label="Control Image",
244
+ type="pil",
245
+ elem_id="image-upload"
246
+ )
247
+ with gr.Column(scale=1):
248
+ result = gr.Image(
249
+ label="Generated Result",
250
+ elem_id="result-image"
251
+ )
252
 
253
  with gr.Accordion("Advanced Settings", open=False):
 
 
 
 
 
 
 
 
 
 
254
  with gr.Row():
255
+ with gr.Column(scale=1):
256
+ seed = gr.Slider(
257
+ label="Seed",
258
+ minimum=0,
259
+ maximum=MAX_SEED,
260
+ step=1,
261
+ value=0,
262
+ )
263
+ randomize_seed = gr.Checkbox(
264
+ label="Randomize seed",
265
+ value=True
266
+ )
 
 
 
267
 
268
  with gr.Row():
269
+ with gr.Column(scale=1):
270
+ width = gr.Slider(
271
+ label="Width",
272
+ minimum=256,
273
+ maximum=MAX_IMAGE_SIZE,
274
+ step=32,
275
+ value=1024,
276
+ )
277
+ height = gr.Slider(
278
+ label="Height",
279
+ minimum=256,
280
+ maximum=MAX_IMAGE_SIZE,
281
+ step=32,
282
+ value=1024,
283
+ )
284
+ with gr.Column(scale=1):
285
+ guidance_scale = gr.Slider(
286
+ label="Guidance Scale",
287
+ minimum=1,
288
+ maximum=30,
289
+ step=0.5,
290
+ value=10,
291
+ )
292
+ num_inference_steps = gr.Slider(
293
+ label="Number of inference steps",
294
+ minimum=1,
295
+ maximum=50,
296
+ step=1,
297
+ value=28,
298
+ )
299
 
 
300
  load_lora_btn.click(
301
  fn=load_lora,
302
  inputs=[lora_path],