Fabrice-TIERCELIN commited on
Commit
c5303c2
1 Parent(s): f2ad942

model.ae_dtype = convert_dtype(ae_dtype)

Browse files
Files changed (1) hide show
  1. app.py +15 -21
app.py CHANGED
@@ -82,7 +82,12 @@ def check(input_image):
82
  raise gr.Error("Please provide an image to restore.")
83
 
84
  @spaces.GPU(duration=420)
85
- def stage1_process(input_image, gamma_correction):
 
 
 
 
 
86
  print('stage1_process ==>>')
87
  if torch.cuda.device_count() == 0:
88
  gr.Warning('Set this space to GPU config to make it work.')
@@ -93,6 +98,10 @@ def stage1_process(input_image, gamma_correction):
93
  # stage1
94
  LQ = np.array(LQ) / 255 * 2 - 1
95
  LQ = torch.tensor(LQ, dtype=torch.float32).permute(2, 0, 1).unsqueeze(0).to(SUPIR_device)[:, :3, :, :]
 
 
 
 
96
  LQ = model.batchify_denoise(LQ, is_stage1=True)
97
  LQ = (LQ[0].permute(1, 2, 0) * 127.5 + 127.5).cpu().numpy().round().clip(0, 255).astype(np.uint8)
98
  # gamma correction
@@ -318,14 +327,6 @@ def restore(
318
  gr.Warning('Set this space to GPU config to make it work.')
319
  return [input_image] * 2, [input_image], None, None
320
  torch.cuda.set_device(SUPIR_device)
321
- event_id = str(time.time_ns())
322
- event_dict = {'event_id': event_id, 'localtime': time.ctime(), 'prompt': prompt, 'a_prompt': a_prompt,
323
- 'n_prompt': n_prompt, 'num_samples': num_samples, 'upscale': upscale, 'edm_steps': edm_steps,
324
- 's_stage1': s_stage1, 's_stage2': s_stage2, 's_cfg': s_cfg, 'seed': seed, 's_churn': s_churn,
325
- 's_noise': s_noise, 'color_fix_type': color_fix_type, 'diff_dtype': diff_dtype, 'ae_dtype': ae_dtype,
326
- 'gamma_correction': gamma_correction, 'linear_CFG': linear_CFG, 'linear_s_stage2': linear_s_stage2,
327
- 'spt_linear_CFG': spt_linear_CFG, 'spt_linear_s_stage2': spt_linear_s_stage2,
328
- 'model_select': model_select}
329
 
330
  if model_select != model.current_model:
331
  print('load ' + model_select)
@@ -362,15 +363,6 @@ def restore(
362
  0, 255).astype(np.uint8)
363
  results = [x_samples[i] for i in range(num_samples)]
364
 
365
- if args.log_history:
366
- os.makedirs(f'./history/{event_id[:5]}/{event_id[5:]}', exist_ok=True)
367
- with open(f'./history/{event_id[:5]}/{event_id[5:]}/logs.txt', 'w') as f:
368
- f.write(str(event_dict))
369
- f.close()
370
- Image.fromarray(input_image).save(f'./history/{event_id[:5]}/{event_id[5:]}/LQ.png')
371
- for i, result in enumerate(results):
372
- Image.fromarray(result).save(f'./history/{event_id[:5]}/{event_id[5:]}/HQ_{i}.png')
373
-
374
  # All the results have the same size
375
  result_height, result_width, result_channel = np.array(results[0]).shape
376
 
@@ -452,7 +444,7 @@ title_html = """
452
  The aim of SUPIR is the beauty and the illustration.
453
  Most of the processes only last few minutes.
454
  This demo can handle huge images but the process will be aborted if it lasts more than 8 min.
455
- Please <a href="https://huggingface.co/spaces/Fabrice-TIERCELIN/SUPIR/discussions/new">leave a message in discussion</a> if you encounter issues.
456
 
457
  <p><center><a href="https://arxiv.org/abs/2401.13627">Paper</a> &emsp; <a href="http://supir.xpixel.group/">Project Page</a> &emsp; <a href="https://huggingface.co/blog/MonsterMMORPG/supir-sota-image-upscale-better-than-magnific-ai">Local Install Guide</a></center></p>
458
  """
@@ -477,7 +469,7 @@ with gr.Blocks() as interface:
477
  gr.HTML("""
478
  <p style="background-color: red;"><big><big><big><b>⚠️To use SUPIR, <a href="https://huggingface.co/spaces/Fabrice-TIERCELIN/SUPIR?duplicate=true">duplicate this space</a> and set a GPU with 30 GB VRAM.</b>
479
 
480
- You can't use SUPIR directly here because this space runs on a CPU, which is not enough for SUPIR. This is a template space. Please provide feedback if you have issues.
481
  </big></big></big></p>
482
  """)
483
  gr.HTML(title_html)
@@ -677,7 +669,9 @@ with gr.Blocks() as interface:
677
  input_image
678
  ], outputs = [], queue = False, show_progress = False).success(fn = stage1_process, inputs = [
679
  input_image,
680
- gamma_correction
 
 
681
  ], outputs=[
682
  denoise_image,
683
  denoise_information
 
82
  raise gr.Error("Please provide an image to restore.")
83
 
84
  @spaces.GPU(duration=420)
85
+ def stage1_process(
86
+ input_image,
87
+ gamma_correction,
88
+ diff_dtype,
89
+ ae_dtype
90
+ ):
91
  print('stage1_process ==>>')
92
  if torch.cuda.device_count() == 0:
93
  gr.Warning('Set this space to GPU config to make it work.')
 
98
  # stage1
99
  LQ = np.array(LQ) / 255 * 2 - 1
100
  LQ = torch.tensor(LQ, dtype=torch.float32).permute(2, 0, 1).unsqueeze(0).to(SUPIR_device)[:, :3, :, :]
101
+
102
+ model.ae_dtype = convert_dtype(ae_dtype)
103
+ model.model.dtype = convert_dtype(diff_dtype)
104
+
105
  LQ = model.batchify_denoise(LQ, is_stage1=True)
106
  LQ = (LQ[0].permute(1, 2, 0) * 127.5 + 127.5).cpu().numpy().round().clip(0, 255).astype(np.uint8)
107
  # gamma correction
 
327
  gr.Warning('Set this space to GPU config to make it work.')
328
  return [input_image] * 2, [input_image], None, None
329
  torch.cuda.set_device(SUPIR_device)
 
 
 
 
 
 
 
 
330
 
331
  if model_select != model.current_model:
332
  print('load ' + model_select)
 
363
  0, 255).astype(np.uint8)
364
  results = [x_samples[i] for i in range(num_samples)]
365
 
 
 
 
 
 
 
 
 
 
366
  # All the results have the same size
367
  result_height, result_width, result_channel = np.array(results[0]).shape
368
 
 
444
  The aim of SUPIR is the beauty and the illustration.
445
  Most of the processes only last few minutes.
446
  This demo can handle huge images but the process will be aborted if it lasts more than 8 min.
447
+ Please leave a <a href="https://huggingface.co/spaces/Fabrice-TIERCELIN/SUPIR/discussions/new">message in discussion</a> if you encounter issues.
448
 
449
  <p><center><a href="https://arxiv.org/abs/2401.13627">Paper</a> &emsp; <a href="http://supir.xpixel.group/">Project Page</a> &emsp; <a href="https://huggingface.co/blog/MonsterMMORPG/supir-sota-image-upscale-better-than-magnific-ai">Local Install Guide</a></center></p>
450
  """
 
469
  gr.HTML("""
470
  <p style="background-color: red;"><big><big><big><b>⚠️To use SUPIR, <a href="https://huggingface.co/spaces/Fabrice-TIERCELIN/SUPIR?duplicate=true">duplicate this space</a> and set a GPU with 30 GB VRAM.</b>
471
 
472
+ You can't use SUPIR directly here because this space runs on a CPU, which is not enough for SUPIR. Please provide <a href="https://huggingface.co/spaces/Fabrice-TIERCELIN/SUPIR/discussions/new">feedback</a> if you have issues.
473
  </big></big></big></p>
474
  """)
475
  gr.HTML(title_html)
 
669
  input_image
670
  ], outputs = [], queue = False, show_progress = False).success(fn = stage1_process, inputs = [
671
  input_image,
672
+ gamma_correction,
673
+ diff_dtype,
674
+ ae_dtype
675
  ], outputs=[
676
  denoise_image,
677
  denoise_information