hysts HF staff commited on
Commit
1af5cc8
β€’
1 Parent(s): 3b6fea8
Files changed (4) hide show
  1. .pre-commit-config.yaml +2 -12
  2. README.md +1 -1
  3. app.py +11 -42
  4. model.py +4 -3
.pre-commit-config.yaml CHANGED
@@ -21,11 +21,11 @@ repos:
21
  - id: docformatter
22
  args: ['--in-place']
23
  - repo: https://github.com/pycqa/isort
24
- rev: 5.10.1
25
  hooks:
26
  - id: isort
27
  - repo: https://github.com/pre-commit/mirrors-mypy
28
- rev: v0.812
29
  hooks:
30
  - id: mypy
31
  args: ['--ignore-missing-imports']
@@ -34,13 +34,3 @@ repos:
34
  hooks:
35
  - id: yapf
36
  args: ['--parallel', '--in-place']
37
- - repo: https://github.com/kynan/nbstripout
38
- rev: 0.5.0
39
- hooks:
40
- - id: nbstripout
41
- args: ['--extra-keys', 'metadata.interpreter metadata.kernelspec cell.metadata.pycharm']
42
- - repo: https://github.com/nbQA-dev/nbQA
43
- rev: 1.3.1
44
- hooks:
45
- - id: nbqa-isort
46
- - id: nbqa-yapf
 
21
  - id: docformatter
22
  args: ['--in-place']
23
  - repo: https://github.com/pycqa/isort
24
+ rev: 5.12.0
25
  hooks:
26
  - id: isort
27
  - repo: https://github.com/pre-commit/mirrors-mypy
28
+ rev: v0.991
29
  hooks:
30
  - id: mypy
31
  args: ['--ignore-missing-imports']
 
34
  hooks:
35
  - id: yapf
36
  args: ['--parallel', '--in-place']
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: πŸ“ˆ
4
  colorFrom: green
5
  colorTo: indigo
6
  sdk: gradio
7
- sdk_version: 3.0.11
8
  app_file: app.py
9
  pinned: false
10
  ---
 
4
  colorFrom: green
5
  colorTo: indigo
6
  sdk: gradio
7
+ sdk_version: 3.19.1
8
  app_file: app.py
9
  pinned: false
10
  ---
app.py CHANGED
@@ -2,8 +2,6 @@
2
 
3
  from __future__ import annotations
4
 
5
- import argparse
6
-
7
  import gradio as gr
8
 
9
  from model import Model
@@ -12,48 +10,19 @@ DESCRIPTION = '''# MangaLineExtraction_PyTorch
12
 
13
  This is an unofficial demo for [https://github.com/ljsabc/MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch).
14
  '''
15
- FOOTER = '<img id="visitor-badge" alt="visitor badge" src="https://visitor-badge.glitch.me/badge?page_id=hysts.mangalineextraction_pytorch" />'
16
-
17
-
18
- def parse_args() -> argparse.Namespace:
19
- parser = argparse.ArgumentParser()
20
- parser.add_argument('--device', type=str, default='cpu')
21
- parser.add_argument('--theme', type=str)
22
- parser.add_argument('--share', action='store_true')
23
- parser.add_argument('--port', type=int)
24
- parser.add_argument('--disable-queue',
25
- dest='enable_queue',
26
- action='store_false')
27
- return parser.parse_args()
28
-
29
-
30
- def main():
31
- args = parse_args()
32
- model = Model(device=args.device)
33
-
34
- with gr.Blocks(theme=args.theme, css='style.css') as demo:
35
- gr.Markdown(DESCRIPTION)
36
-
37
- with gr.Row():
38
- with gr.Column():
39
- with gr.Group():
40
- input_image = gr.Image(label='Input', type='numpy')
41
- run_button = gr.Button(value='Run')
42
- with gr.Column():
43
- result = gr.Image(label='Result',
44
- type='numpy',
45
- elem_id='result')
46
 
47
- gr.Markdown(FOOTER)
48
 
49
- run_button.click(fn=model.predict, inputs=input_image, outputs=result)
 
50
 
51
- demo.launch(
52
- enable_queue=args.enable_queue,
53
- server_port=args.port,
54
- share=args.share,
55
- )
 
56
 
 
57
 
58
- if __name__ == '__main__':
59
- main()
 
2
 
3
  from __future__ import annotations
4
 
 
 
5
  import gradio as gr
6
 
7
  from model import Model
 
10
 
11
  This is an unofficial demo for [https://github.com/ljsabc/MangaLineExtraction_PyTorch](https://github.com/ljsabc/MangaLineExtraction_PyTorch).
12
  '''
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
+ model = Model()
15
 
16
+ with gr.Blocks(css='style.css') as demo:
17
+ gr.Markdown(DESCRIPTION)
18
 
19
+ with gr.Row():
20
+ with gr.Column():
21
+ input_image = gr.Image(label='Input', type='numpy')
22
+ run_button = gr.Button(value='Run')
23
+ with gr.Column():
24
+ result = gr.Image(label='Result', type='numpy', elem_id='result')
25
 
26
+ run_button.click(fn=model.predict, inputs=input_image, outputs=result)
27
 
28
+ demo.queue().launch(show_api=False)
 
model.py CHANGED
@@ -16,14 +16,15 @@ sys.path.insert(0, submodule_dir.as_posix())
16
 
17
  from model_torch import res_skip
18
 
19
- HF_TOKEN = os.environ['HF_TOKEN']
20
 
21
  MAX_SIZE = 1000
22
 
23
 
24
  class Model:
25
- def __init__(self, device: str | torch.device):
26
- self.device = torch.device(device)
 
27
  self.model = self._load_model()
28
 
29
  def _load_model(self) -> nn.Module:
 
16
 
17
  from model_torch import res_skip
18
 
19
+ HF_TOKEN = os.getenv('HF_TOKEN')
20
 
21
  MAX_SIZE = 1000
22
 
23
 
24
  class Model:
25
+ def __init__(self):
26
+ self.device = torch.device(
27
+ 'cuda:0' if torch.cuda.is_available() else 'cpu')
28
  self.model = self._load_model()
29
 
30
  def _load_model(self) -> nn.Module: