hysts HF staff commited on
Commit
46438b0
β€’
1 Parent(s): 0aff08c
Files changed (5) hide show
  1. .pre-commit-config.yaml +3 -12
  2. README.md +4 -1
  3. app.py +73 -104
  4. model.py +5 -8
  5. requirements.txt +5 -5
.pre-commit-config.yaml CHANGED
@@ -20,26 +20,17 @@ repos:
20
  - id: docformatter
21
  args: ['--in-place']
22
  - repo: https://github.com/pycqa/isort
23
- rev: 5.10.1
24
  hooks:
25
  - id: isort
26
  - repo: https://github.com/pre-commit/mirrors-mypy
27
- rev: v0.812
28
  hooks:
29
  - id: mypy
30
  args: ['--ignore-missing-imports']
 
31
  - repo: https://github.com/google/yapf
32
  rev: v0.32.0
33
  hooks:
34
  - id: yapf
35
  args: ['--parallel', '--in-place']
36
- - repo: https://github.com/kynan/nbstripout
37
- rev: 0.5.0
38
- hooks:
39
- - id: nbstripout
40
- args: ['--extra-keys', 'metadata.interpreter metadata.kernelspec cell.metadata.pycharm']
41
- - repo: https://github.com/nbQA-dev/nbQA
42
- rev: 1.3.1
43
- hooks:
44
- - id: nbqa-isort
45
- - id: nbqa-yapf
 
20
  - id: docformatter
21
  args: ['--in-place']
22
  - repo: https://github.com/pycqa/isort
23
+ rev: 5.12.0
24
  hooks:
25
  - id: isort
26
  - repo: https://github.com/pre-commit/mirrors-mypy
27
+ rev: v0.991
28
  hooks:
29
  - id: mypy
30
  args: ['--ignore-missing-imports']
31
+ additional_dependencies: ['types-python-slugify']
32
  - repo: https://github.com/google/yapf
33
  rev: v0.32.0
34
  hooks:
35
  - id: yapf
36
  args: ['--parallel', '--in-place']
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -4,9 +4,12 @@ emoji: 🌍
4
  colorFrom: pink
5
  colorTo: red
6
  sdk: gradio
7
- sdk_version: 3.0.17
8
  app_file: app.py
9
  pinned: false
 
10
  ---
11
 
12
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
 
 
 
4
  colorFrom: pink
5
  colorTo: red
6
  sdk: gradio
7
+ sdk_version: 3.35.2
8
  app_file: app.py
9
  pinned: false
10
+ suggested_hardware: t4-small
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
14
+
15
+ https://arxiv.org/abs/2204.11823
app.py CHANGED
@@ -2,111 +2,80 @@
2
 
3
  from __future__ import annotations
4
 
5
- import argparse
6
-
7
  import gradio as gr
8
 
9
  from model import Model
10
 
11
- DESCRIPTION = '''# StyleGAN-Human
12
-
13
- This is an unofficial demo for [https://github.com/stylegan-human/StyleGAN-Human](https://github.com/stylegan-human/StyleGAN-Human).
14
- '''
15
- FOOTER = '<img id="visitor-badge" alt="visitor badge" src="https://visitor-badge.glitch.me/badge?page_id=gradio-blocks.stylegan-human" />'
16
-
17
-
18
- def parse_args() -> argparse.Namespace:
19
- parser = argparse.ArgumentParser()
20
- parser.add_argument('--device', type=str, default='cpu')
21
- parser.add_argument('--theme', type=str)
22
- parser.add_argument('--share', action='store_true')
23
- parser.add_argument('--port', type=int)
24
- parser.add_argument('--disable-queue',
25
- dest='enable_queue',
26
- action='store_false')
27
- return parser.parse_args()
28
-
29
-
30
- def main():
31
- args = parse_args()
32
- model = Model(device=args.device)
33
-
34
- with gr.Blocks(theme=args.theme, css='style.css') as demo:
35
- gr.Markdown(DESCRIPTION)
36
-
37
- with gr.Row():
38
- with gr.Column():
39
- with gr.Row():
40
- seed1 = gr.Number(value=6876, label='Seed 1')
41
- psi1 = gr.Slider(0,
42
- 2,
43
- value=0.7,
44
- step=0.05,
45
- label='Truncation psi 1')
46
- with gr.Row():
47
- generate_button1 = gr.Button('Generate')
48
- with gr.Row():
49
- generated_image1 = gr.Image(type='numpy',
50
- label='Generated Image 1')
51
-
52
- with gr.Column():
53
- with gr.Row():
54
- seed2 = gr.Number(value=6886, label='Seed 2')
55
- psi2 = gr.Slider(0,
56
- 2,
57
- value=0.7,
58
- step=0.05,
59
- label='Truncation psi 2')
60
- with gr.Row():
61
- generate_button2 = gr.Button('Generate')
62
- with gr.Row():
63
- generated_image2 = gr.Image(type='numpy',
64
- label='Generated Image 2')
65
-
66
- with gr.Row():
67
- with gr.Column():
68
- with gr.Row():
69
- num_frames = gr.Slider(
70
- 0,
71
- 41,
72
- value=7,
73
- step=1,
74
- label='Number of Intermediate Frames')
75
- with gr.Row():
76
- interpolate_button = gr.Button('Interpolate')
77
- with gr.Row():
78
- interpolated_images = gr.Gallery(label='Output Images')
79
-
80
- gr.Markdown(FOOTER)
81
-
82
- generate_button1.click(model.generate_single_image,
83
- inputs=[
84
- seed1,
85
- psi1,
86
- ],
87
- outputs=generated_image1)
88
- generate_button2.click(model.generate_single_image,
89
- inputs=[
90
- seed2,
91
- psi2,
92
- ],
93
- outputs=generated_image2)
94
- interpolate_button.click(model.generate_interpolated_images,
95
- inputs=[
96
- seed1,
97
- psi1,
98
- seed2,
99
- psi2,
100
- num_frames,
101
- ],
102
- outputs=interpolated_images)
103
-
104
- demo.launch(
105
- enable_queue=args.enable_queue,
106
- server_port=args.port,
107
- share=args.share,
108
- )
109
-
110
-
111
- if __name__ == '__main__':
112
- main()
 
2
 
3
  from __future__ import annotations
4
 
 
 
5
  import gradio as gr
6
 
7
  from model import Model
8
 
9
+ DESCRIPTION = '# [StyleGAN-Human](https://github.com/stylegan-human/StyleGAN-Human)'
10
+
11
+ model = Model()
12
+
13
+ with gr.Blocks(css='style.css') as demo:
14
+ gr.Markdown(DESCRIPTION)
15
+ with gr.Row():
16
+ with gr.Column():
17
+ with gr.Row():
18
+ seed1 = gr.Number(label='Seed 1', value=6876)
19
+ psi1 = gr.Slider(label='Truncation psi 1',
20
+ minimum=0,
21
+ maximum=2,
22
+ step=0.05,
23
+ value=0.7)
24
+ with gr.Row():
25
+ generate_button1 = gr.Button('Generate')
26
+ with gr.Row():
27
+ generated_image1 = gr.Image(label='Generated Image 1',
28
+ type='numpy',
29
+ height=600)
30
+
31
+ with gr.Column():
32
+ with gr.Row():
33
+ seed2 = gr.Number(label='Seed 2', value=6886)
34
+ psi2 = gr.Slider(label='Truncation psi 2',
35
+ minimum=0,
36
+ maximum=2,
37
+ step=0.05,
38
+ value=0.7)
39
+ with gr.Row():
40
+ generate_button2 = gr.Button('Generate')
41
+ with gr.Row():
42
+ generated_image2 = gr.Image(label='Generated Image 2',
43
+ type='numpy',
44
+ height=600)
45
+
46
+ with gr.Row():
47
+ with gr.Column():
48
+ with gr.Row():
49
+ num_frames = gr.Slider(label='Number of Intermediate Frames',
50
+ minimum=0,
51
+ maximum=41,
52
+ step=1,
53
+ value=7)
54
+ with gr.Row():
55
+ interpolate_button = gr.Button('Interpolate')
56
+ with gr.Row():
57
+ interpolated_images = gr.Gallery(label='Output Images',
58
+ object_fit='scale-down')
59
+
60
+ generate_button1.click(model.generate_single_image,
61
+ inputs=[
62
+ seed1,
63
+ psi1,
64
+ ],
65
+ outputs=generated_image1)
66
+ generate_button2.click(model.generate_single_image,
67
+ inputs=[
68
+ seed2,
69
+ psi2,
70
+ ],
71
+ outputs=generated_image2)
72
+ interpolate_button.click(model.generate_interpolated_images,
73
+ inputs=[
74
+ seed1,
75
+ psi1,
76
+ seed2,
77
+ psi2,
78
+ num_frames,
79
+ ],
80
+ outputs=interpolated_images)
81
+ demo.queue(max_size=10).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model.py CHANGED
@@ -1,6 +1,5 @@
1
  from __future__ import annotations
2
 
3
- import os
4
  import pathlib
5
  import pickle
6
  import sys
@@ -14,18 +13,16 @@ app_dir = pathlib.Path(__file__).parent
14
  submodule_dir = app_dir / 'StyleGAN-Human'
15
  sys.path.insert(0, submodule_dir.as_posix())
16
 
17
- HF_TOKEN = os.environ['HF_TOKEN']
18
-
19
 
20
  class Model:
21
- def __init__(self, device: str | torch.device):
22
- self.device = torch.device(device)
 
23
  self.model = self.load_model('stylegan_human_v2_1024.pkl')
24
 
25
  def load_model(self, file_name: str) -> nn.Module:
26
- path = hf_hub_download('hysts/StyleGAN-Human',
27
- f'models/{file_name}',
28
- use_auth_token=HF_TOKEN)
29
  with open(path, 'rb') as f:
30
  model = pickle.load(f)['G_ema']
31
  model.eval()
 
1
  from __future__ import annotations
2
 
 
3
  import pathlib
4
  import pickle
5
  import sys
 
13
  submodule_dir = app_dir / 'StyleGAN-Human'
14
  sys.path.insert(0, submodule_dir.as_posix())
15
 
 
 
16
 
17
  class Model:
18
+ def __init__(self):
19
+ self.device = torch.device(
20
+ 'cuda:0' if torch.cuda.is_available() else 'cpu')
21
  self.model = self.load_model('stylegan_human_v2_1024.pkl')
22
 
23
  def load_model(self, file_name: str) -> nn.Module:
24
+ path = hf_hub_download('public-data/StyleGAN-Human',
25
+ f'models/{file_name}')
 
26
  with open(path, 'rb') as f:
27
  model = pickle.load(f)['G_ema']
28
  model.eval()
requirements.txt CHANGED
@@ -1,5 +1,5 @@
1
- numpy==1.22.3
2
- Pillow==9.1.0
3
- scipy==1.8.0
4
- torch==1.11.0
5
- torchvision==0.12.0
 
1
+ numpy==1.23.5
2
+ Pillow==10.0.0
3
+ scipy==1.10.1
4
+ torch==2.0.1
5
+ torchvision==0.15.2