hysts HF staff commited on
Commit
f031659
1 Parent(s): 61d97ef
.gitattributes CHANGED
@@ -1,3 +1,5 @@
 
 
1
  *.7z filter=lfs diff=lfs merge=lfs -text
2
  *.arrow filter=lfs diff=lfs merge=lfs -text
3
  *.bin filter=lfs diff=lfs merge=lfs -text
 
1
+ *.jpg filter=lfs diff=lfs merge=lfs -text
2
+ *.tar filter=lfs diff=lfs merge=lfs -text
3
  *.7z filter=lfs diff=lfs merge=lfs -text
4
  *.arrow filter=lfs diff=lfs merge=lfs -text
5
  *.bin filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ mmdet_configs/configs
.gitmodules ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [submodule "ViTPose"]
2
+ path = ViTPose
3
+ url = https://github.com/ViTAE-Transformer/ViTPose
.pre-commit-config.yaml ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ exclude: ^(ViTPose/|mmdet_configs/configs/)
2
+ repos:
3
+ - repo: https://github.com/pre-commit/pre-commit-hooks
4
+ rev: v4.2.0
5
+ hooks:
6
+ - id: check-executables-have-shebangs
7
+ - id: check-json
8
+ - id: check-merge-conflict
9
+ - id: check-shebang-scripts-are-executable
10
+ - id: check-toml
11
+ - id: check-yaml
12
+ - id: double-quote-string-fixer
13
+ - id: end-of-file-fixer
14
+ - id: mixed-line-ending
15
+ args: ['--fix=lf']
16
+ - id: requirements-txt-fixer
17
+ - id: trailing-whitespace
18
+ - repo: https://github.com/myint/docformatter
19
+ rev: v1.4
20
+ hooks:
21
+ - id: docformatter
22
+ args: ['--in-place']
23
+ - repo: https://github.com/pycqa/isort
24
+ rev: 5.10.1
25
+ hooks:
26
+ - id: isort
27
+ - repo: https://github.com/pre-commit/mirrors-mypy
28
+ rev: v0.812
29
+ hooks:
30
+ - id: mypy
31
+ args: ['--ignore-missing-imports']
32
+ - repo: https://github.com/google/yapf
33
+ rev: v0.32.0
34
+ hooks:
35
+ - id: yapf
36
+ args: ['--parallel', '--in-place']
37
+ - repo: https://github.com/kynan/nbstripout
38
+ rev: 0.5.0
39
+ hooks:
40
+ - id: nbstripout
41
+ args: ['--extra-keys', 'metadata.interpreter metadata.kernelspec cell.metadata.pycharm']
42
+ - repo: https://github.com/nbQA-dev/nbQA
43
+ rev: 1.3.1
44
+ hooks:
45
+ - id: nbqa-isort
46
+ - id: nbqa-yapf
.style.yapf ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ [style]
2
+ based_on_style = pep8
3
+ blank_line_before_nested_class_or_def = false
4
+ spaces_before_comment = 2
5
+ split_before_logical_operator = true
ViTPose ADDED
@@ -0,0 +1 @@
 
 
1
+ Subproject commit 86eced957665e62fa728eea059d5fffb9b94d653
app.py ADDED
@@ -0,0 +1,386 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import os
7
+ import pathlib
8
+ import subprocess
9
+ import sys
10
+ import tarfile
11
+
12
+ if os.getenv('SYSTEM') == 'spaces':
13
+ import mim
14
+
15
+ mim.uninstall('mmcv-full', confirm_yes=True)
16
+ mim.install('mmcv-full==1.5.0', is_yes=True)
17
+
18
+ subprocess.call('pip uninstall -y opencv-python'.split())
19
+ subprocess.call('pip uninstall -y opencv-python-headless'.split())
20
+ subprocess.call('pip install opencv-python-headless==4.5.5.64'.split())
21
+
22
+ import gradio as gr
23
+ import huggingface_hub
24
+ import numpy as np
25
+ import torch
26
+ import torch.nn as nn
27
+
28
+ sys.path.insert(0, 'ViTPose/')
29
+
30
+ from mmdet.apis import inference_detector, init_detector
31
+ from mmpose.apis import (inference_top_down_pose_model, init_pose_model,
32
+ process_mmdet_results, vis_pose_result)
33
+
34
+ TOKEN = os.environ['TOKEN']
35
+
36
+
37
+ def parse_args() -> argparse.Namespace:
38
+ parser = argparse.ArgumentParser()
39
+ parser.add_argument('--device', type=str, default='cpu')
40
+ parser.add_argument('--theme', type=str)
41
+ parser.add_argument('--share', action='store_true')
42
+ parser.add_argument('--port', type=int)
43
+ parser.add_argument('--disable-queue',
44
+ dest='enable_queue',
45
+ action='store_false')
46
+ return parser.parse_args()
47
+
48
+
49
+ class DetModel:
50
+ def __init__(self, device: str | torch.device):
51
+ self.device = torch.device(device)
52
+ self.models = self._load_models()
53
+ self.model_name = 'YOLOX-l'
54
+
55
+ def _load_models(self) -> dict[str, nn.Module]:
56
+ model_dict = {
57
+ 'YOLOX-tiny': {
58
+ 'config':
59
+ 'mmdet_configs/configs/yolox/yolox_tiny_8x8_300e_coco.py',
60
+ 'model':
61
+ 'https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_tiny_8x8_300e_coco/yolox_tiny_8x8_300e_coco_20211124_171234-b4047906.pth',
62
+ },
63
+ 'YOLOX-s': {
64
+ 'config':
65
+ 'mmdet_configs/configs/yolox/yolox_s_8x8_300e_coco.py',
66
+ 'model':
67
+ 'https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_s_8x8_300e_coco/yolox_s_8x8_300e_coco_20211121_095711-4592a793.pth',
68
+ },
69
+ 'YOLOX-l': {
70
+ 'config':
71
+ 'mmdet_configs/configs/yolox/yolox_l_8x8_300e_coco.py',
72
+ 'model':
73
+ 'https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_l_8x8_300e_coco/yolox_l_8x8_300e_coco_20211126_140236-d3bd2b23.pth',
74
+ },
75
+ 'YOLOX-x': {
76
+ 'config':
77
+ 'mmdet_configs/configs/yolox/yolox_x_8x8_300e_coco.py',
78
+ 'model':
79
+ 'https://download.openmmlab.com/mmdetection/v2.0/yolox/yolox_x_8x8_300e_coco/yolox_x_8x8_300e_coco_20211126_140254-1ef88d67.pth',
80
+ },
81
+ }
82
+ models = {
83
+ key: init_detector(dic['config'], dic['model'], device=self.device)
84
+ for key, dic in model_dict.items()
85
+ }
86
+ return models
87
+
88
+ def set_model_name(self, name: str) -> None:
89
+ self.model_name = name
90
+
91
+ def detect_and_visualize(
92
+ self, image: np.ndarray,
93
+ score_threshold: float) -> tuple[list[np.ndarray], np.ndarray]:
94
+ out = self.detect(image)
95
+ vis = self.visualize_detection_results(image, out, score_threshold)
96
+ return out, vis
97
+
98
+ def detect(self, image: np.ndarray) -> list[np.ndarray]:
99
+ image = image[:, :, ::-1] # RGB -> BGR
100
+ model = self.models[self.model_name]
101
+ out = inference_detector(model, image)
102
+ return out
103
+
104
+ def visualize_detection_results(
105
+ self,
106
+ image: np.ndarray,
107
+ detection_results: list[np.ndarray],
108
+ score_threshold: float = 0.3) -> np.ndarray:
109
+ person_det = [detection_results[0]] + [np.array([]).reshape(0, 5)]
110
+
111
+ image = image[:, :, ::-1] # RGB -> BGR
112
+ model = self.models[self.model_name]
113
+ vis = model.show_result(image,
114
+ person_det,
115
+ score_thr=score_threshold,
116
+ bbox_color=None,
117
+ text_color=(200, 200, 200),
118
+ mask_color=None)
119
+ return vis[:, :, ::-1] # BGR -> RGB
120
+
121
+
122
+ class PoseModel:
123
+ def __init__(self, device: str | torch.device):
124
+ self.device = torch.device(device)
125
+ self.models = self._load_models()
126
+ self.model_name = 'ViTPose-B (multi-task train, COCO)'
127
+
128
+ def _load_models(self) -> dict[str, nn.Module]:
129
+ model_dict = {
130
+ 'ViTPose-B (single-task train)': {
131
+ 'config':
132
+ 'ViTPose/configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/ViTPose_base_coco_256x192.py',
133
+ 'model': 'models/vitpose-b.pth',
134
+ },
135
+ 'ViTPose-L (single-task train)': {
136
+ 'config':
137
+ 'ViTPose/configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/ViTPose_large_coco_256x192.py',
138
+ 'model': 'models/vitpose-l.pth',
139
+ },
140
+ 'ViTPose-B (multi-task train, COCO)': {
141
+ 'config':
142
+ 'ViTPose/configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/ViTPose_base_coco_256x192.py',
143
+ 'model': 'models/vitpose-b-multi-coco.pth',
144
+ },
145
+ 'ViTPose-L (multi-task train, COCO)': {
146
+ 'config':
147
+ 'ViTPose/configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/coco/ViTPose_large_coco_256x192.py',
148
+ 'model': 'models/vitpose-l-multi-coco.pth',
149
+ },
150
+ }
151
+ models = dict()
152
+ for key, dic in model_dict.items():
153
+ ckpt_path = huggingface_hub.hf_hub_download('hysts/ViTPose',
154
+ dic['model'],
155
+ use_auth_token=TOKEN)
156
+ model = init_pose_model(dic['config'],
157
+ ckpt_path,
158
+ device=self.device)
159
+ models[key] = model
160
+ return models
161
+
162
+ def set_model_name(self, name: str) -> None:
163
+ self.model_name = name
164
+
165
+ def predict_pose_and_visualize(
166
+ self,
167
+ image: np.ndarray,
168
+ det_results: list[np.ndarray],
169
+ box_score_threshold: float,
170
+ kpt_score_threshold: float,
171
+ vis_dot_radius: int,
172
+ vis_line_thickness: int,
173
+ ) -> tuple[list[dict[str, np.ndarray]], np.ndarray]:
174
+ out = self.predict_pose(image, det_results, box_score_threshold)
175
+ vis = self.visualize_pose_results(image, out, kpt_score_threshold,
176
+ vis_dot_radius, vis_line_thickness)
177
+ return out, vis
178
+
179
+ def predict_pose(
180
+ self,
181
+ image: np.ndarray,
182
+ det_results: list[np.ndarray],
183
+ box_score_threshold: float = 0.5) -> list[dict[str, np.ndarray]]:
184
+ image = image[:, :, ::-1] # RGB -> BGR
185
+ model = self.models[self.model_name]
186
+ person_results = process_mmdet_results(det_results, 1)
187
+ out, _ = inference_top_down_pose_model(model,
188
+ image,
189
+ person_results=person_results,
190
+ bbox_thr=box_score_threshold,
191
+ format='xyxy')
192
+ return out
193
+
194
+ def visualize_pose_results(self,
195
+ image: np.ndarray,
196
+ pose_results: list[np.ndarray],
197
+ kpt_score_threshold: float = 0.3,
198
+ vis_dot_radius: int = 4,
199
+ vis_line_thickness: int = 1) -> np.ndarray:
200
+ image = image[:, :, ::-1] # RGB -> BGR
201
+ model = self.models[self.model_name]
202
+ vis = vis_pose_result(model,
203
+ image,
204
+ pose_results,
205
+ kpt_score_thr=kpt_score_threshold,
206
+ radius=vis_dot_radius,
207
+ thickness=vis_line_thickness)
208
+ return vis[:, :, ::-1] # BGR -> RGB
209
+
210
+
211
+ def set_example_image(example: list) -> dict:
212
+ return gr.Image.update(value=example[0])
213
+
214
+
215
+ def extract_tar() -> None:
216
+ if pathlib.Path('mmdet_configs/configs').exists():
217
+ return
218
+ with tarfile.open('mmdet_configs/configs.tar') as f:
219
+ f.extractall('mmdet_configs')
220
+
221
+
222
+ def main():
223
+ args = parse_args()
224
+
225
+ extract_tar()
226
+
227
+ det_model = DetModel(device=args.device)
228
+ pose_model = PoseModel(device=args.device)
229
+
230
+ css = '''
231
+ h1#title {
232
+ text-align: center;
233
+ }
234
+ '''
235
+
236
+ with gr.Blocks(theme=args.theme, css=css) as demo:
237
+ gr.Markdown('''<h1 id="title">ViTPose</h1>
238
+
239
+ This is an unofficial demo for [https://github.com/ViTAE-Transformer/ViTPose](https://github.com/ViTAE-Transformer/ViTPose).'''
240
+ )
241
+
242
+ with gr.Box():
243
+ gr.Markdown('## Step 1')
244
+ with gr.Row():
245
+ with gr.Column():
246
+ with gr.Row():
247
+ input_image = gr.Image(label='Input Image',
248
+ type='numpy')
249
+ with gr.Row():
250
+ detector_name = gr.Dropdown(list(
251
+ det_model.models.keys()),
252
+ value=det_model.model_name,
253
+ label='Detector')
254
+ with gr.Row():
255
+ detect_button = gr.Button(value='Detect')
256
+ det_preds = gr.Variable()
257
+ with gr.Column():
258
+ detection_visualization = gr.Image(
259
+ label='Detection Result', type='numpy')
260
+ vis_det_score_threshold = gr.Slider(
261
+ 0,
262
+ 1,
263
+ step=0.05,
264
+ value=0.5,
265
+ label='Visualization Score Threshold')
266
+ redraw_det_button = gr.Button(value='Redraw')
267
+
268
+ with gr.Row():
269
+ paths = sorted(pathlib.Path('images').rglob('*.jpg'))
270
+ example_images = gr.Dataset(components=[input_image],
271
+ samples=[[path.as_posix()]
272
+ for path in paths])
273
+
274
+ with gr.Box():
275
+ gr.Markdown('## Step 2')
276
+ with gr.Row():
277
+ with gr.Column():
278
+ with gr.Row():
279
+ pose_model_name = gr.Dropdown(
280
+ list(pose_model.models.keys()),
281
+ value=pose_model.model_name,
282
+ label='Pose Model')
283
+ det_score_threshold = gr.Slider(
284
+ 0,
285
+ 1,
286
+ step=0.05,
287
+ value=0.5,
288
+ label='Box Score Threshold')
289
+ with gr.Row():
290
+ predict_button = gr.Button(value='Predict')
291
+ pose_preds = gr.Variable()
292
+ with gr.Column():
293
+ pose_visualization = gr.Image(label='Result', type='numpy')
294
+ vis_kpt_score_threshold = gr.Slider(
295
+ 0,
296
+ 1,
297
+ step=0.05,
298
+ value=0.3,
299
+ label='Visualization Score Threshold')
300
+ vis_dot_radius = gr.Slider(1,
301
+ 10,
302
+ step=1,
303
+ value=4,
304
+ label='Dot Radius')
305
+ vis_line_thickness = gr.Slider(1,
306
+ 10,
307
+ step=1,
308
+ value=2,
309
+ label='Line Thickness')
310
+ redraw_pose_button = gr.Button(value='Redraw')
311
+
312
+ gr.Markdown(
313
+ '<center><img src="https://visitor-badge.glitch.me/badge?page_id=hysts.vitpose" alt="visitor badge"/></center>'
314
+ )
315
+
316
+ detector_name.change(fn=det_model.set_model_name,
317
+ inputs=[
318
+ detector_name,
319
+ ],
320
+ outputs=None)
321
+ detect_button.click(fn=det_model.detect_and_visualize,
322
+ inputs=[
323
+ input_image,
324
+ vis_det_score_threshold,
325
+ ],
326
+ outputs=[
327
+ det_preds,
328
+ detection_visualization,
329
+ ])
330
+ redraw_det_button.click(fn=det_model.visualize_detection_results,
331
+ inputs=[
332
+ input_image,
333
+ det_preds,
334
+ vis_det_score_threshold,
335
+ ],
336
+ outputs=[
337
+ detection_visualization,
338
+ ])
339
+
340
+ pose_model_name.change(fn=pose_model.set_model_name,
341
+ inputs=[
342
+ pose_model_name,
343
+ ],
344
+ outputs=None)
345
+ predict_button.click(fn=pose_model.predict_pose_and_visualize,
346
+ inputs=[
347
+ input_image,
348
+ det_preds,
349
+ det_score_threshold,
350
+ vis_kpt_score_threshold,
351
+ vis_dot_radius,
352
+ vis_line_thickness,
353
+ ],
354
+ outputs=[
355
+ pose_preds,
356
+ pose_visualization,
357
+ ])
358
+ redraw_pose_button.click(fn=pose_model.visualize_pose_results,
359
+ inputs=[
360
+ input_image,
361
+ pose_preds,
362
+ vis_kpt_score_threshold,
363
+ vis_dot_radius,
364
+ vis_line_thickness,
365
+ ],
366
+ outputs=[
367
+ pose_visualization,
368
+ ])
369
+
370
+ example_images.click(fn=set_example_image,
371
+ inputs=[
372
+ example_images,
373
+ ],
374
+ outputs=[
375
+ input_image,
376
+ ])
377
+
378
+ demo.launch(
379
+ enable_queue=args.enable_queue,
380
+ server_port=args.port,
381
+ share=args.share,
382
+ )
383
+
384
+
385
+ if __name__ == '__main__':
386
+ main()
images/README.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ These images are from the following public domain:
2
+
3
+ - https://www.pexels.com/photo/women-in-active-wear-balancing-their-body-while-leaning-by-the-doorway-5770445/
4
+ - https://www.pexels.com/photo/woman-balancing-her-body-on-a-handstand-using-one-hand-5770708/
5
+ - https://www.pexels.com/photo/persons-in-black-shirt-and-pants-690598/
6
+ - https://www.pexels.com/photo/photo-of-woman-doing-a-ballet-dance-1164975/
7
+ - https://www.pexels.com/photo/beautiful-woman-in-a-red-dress-wearing-red-lipstick-7909580/
8
+ - https://www.pexels.com/photo/girl-in-red-jacket-riding-bicycle-5792907/
9
+ - https://www.pexels.com/photo/woman-wearing-a-white-gown-walking-on-grass-field-8574605/
images/pexels-cottonbro-5770445.jpg ADDED

Git LFS Details

  • SHA256: b4548cd4a16238f559a149670c6ad2606b3b2147c92e5a2a380dd12fd922f276
  • Pointer size: 131 Bytes
  • Size of remote file: 379 kB
images/pexels-cottonbro-5770708.jpg ADDED

Git LFS Details

  • SHA256: 951720e6bb6053756ef555e5fcae4b54927582c4974e5908ea1984a9f14d7843
  • Pointer size: 131 Bytes
  • Size of remote file: 478 kB
images/pexels-haste-leart-v-690598.jpg ADDED

Git LFS Details

  • SHA256: 90009cbaceb3c3802d0df460862434e446e5cfad7892986444146ce73a02f61c
  • Pointer size: 131 Bytes
  • Size of remote file: 329 kB
images/pexels-luis-gallegos-alvarez-1164975.jpg ADDED

Git LFS Details

  • SHA256: 05cb7605dbac48915eee1b6ef0de3aba386abb7ab06ef27d58c092df2c76a176
  • Pointer size: 131 Bytes
  • Size of remote file: 553 kB
images/pexels-victoria-borodinova-7909580.jpg ADDED

Git LFS Details

  • SHA256: c05ceaf9c468dd21d24977f2c50e3f3b9b1ba83474d93180f66496635216b573
  • Pointer size: 131 Bytes
  • Size of remote file: 279 kB
images/pexels-yan-krukov-5792907.jpg ADDED

Git LFS Details

  • SHA256: 0500121b9044cb1d4c7913e48ebe5e2374848d57d6a2905f3b7c9469f959f2fe
  • Pointer size: 131 Bytes
  • Size of remote file: 648 kB
images/pexels-лиза-медведева-8574605.jpg ADDED

Git LFS Details

  • SHA256: 85cf4db499f0c5b11397af648e66178a4e40e6d478f1e6b31ade35e225ff6ceb
  • Pointer size: 131 Bytes
  • Size of remote file: 816 kB
mmdet_configs/LICENSE ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright 2018-2023 OpenMMLab. All rights reserved.
2
+
3
+ Apache License
4
+ Version 2.0, January 2004
5
+ http://www.apache.org/licenses/
6
+
7
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
8
+
9
+ 1. Definitions.
10
+
11
+ "License" shall mean the terms and conditions for use, reproduction,
12
+ and distribution as defined by Sections 1 through 9 of this document.
13
+
14
+ "Licensor" shall mean the copyright owner or entity authorized by
15
+ the copyright owner that is granting the License.
16
+
17
+ "Legal Entity" shall mean the union of the acting entity and all
18
+ other entities that control, are controlled by, or are under common
19
+ control with that entity. For the purposes of this definition,
20
+ "control" means (i) the power, direct or indirect, to cause the
21
+ direction or management of such entity, whether by contract or
22
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
23
+ outstanding shares, or (iii) beneficial ownership of such entity.
24
+
25
+ "You" (or "Your") shall mean an individual or Legal Entity
26
+ exercising permissions granted by this License.
27
+
28
+ "Source" form shall mean the preferred form for making modifications,
29
+ including but not limited to software source code, documentation
30
+ source, and configuration files.
31
+
32
+ "Object" form shall mean any form resulting from mechanical
33
+ transformation or translation of a Source form, including but
34
+ not limited to compiled object code, generated documentation,
35
+ and conversions to other media types.
36
+
37
+ "Work" shall mean the work of authorship, whether in Source or
38
+ Object form, made available under the License, as indicated by a
39
+ copyright notice that is included in or attached to the work
40
+ (an example is provided in the Appendix below).
41
+
42
+ "Derivative Works" shall mean any work, whether in Source or Object
43
+ form, that is based on (or derived from) the Work and for which the
44
+ editorial revisions, annotations, elaborations, or other modifications
45
+ represent, as a whole, an original work of authorship. For the purposes
46
+ of this License, Derivative Works shall not include works that remain
47
+ separable from, or merely link (or bind by name) to the interfaces of,
48
+ the Work and Derivative Works thereof.
49
+
50
+ "Contribution" shall mean any work of authorship, including
51
+ the original version of the Work and any modifications or additions
52
+ to that Work or Derivative Works thereof, that is intentionally
53
+ submitted to Licensor for inclusion in the Work by the copyright owner
54
+ or by an individual or Legal Entity authorized to submit on behalf of
55
+ the copyright owner. For the purposes of this definition, "submitted"
56
+ means any form of electronic, verbal, or written communication sent
57
+ to the Licensor or its representatives, including but not limited to
58
+ communication on electronic mailing lists, source code control systems,
59
+ and issue tracking systems that are managed by, or on behalf of, the
60
+ Licensor for the purpose of discussing and improving the Work, but
61
+ excluding communication that is conspicuously marked or otherwise
62
+ designated in writing by the copyright owner as "Not a Contribution."
63
+
64
+ "Contributor" shall mean Licensor and any individual or Legal Entity
65
+ on behalf of whom a Contribution has been received by Licensor and
66
+ subsequently incorporated within the Work.
67
+
68
+ 2. Grant of Copyright License. Subject to the terms and conditions of
69
+ this License, each Contributor hereby grants to You a perpetual,
70
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
71
+ copyright license to reproduce, prepare Derivative Works of,
72
+ publicly display, publicly perform, sublicense, and distribute the
73
+ Work and such Derivative Works in Source or Object form.
74
+
75
+ 3. Grant of Patent License. Subject to the terms and conditions of
76
+ this License, each Contributor hereby grants to You a perpetual,
77
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
78
+ (except as stated in this section) patent license to make, have made,
79
+ use, offer to sell, sell, import, and otherwise transfer the Work,
80
+ where such license applies only to those patent claims licensable
81
+ by such Contributor that are necessarily infringed by their
82
+ Contribution(s) alone or by combination of their Contribution(s)
83
+ with the Work to which such Contribution(s) was submitted. If You
84
+ institute patent litigation against any entity (including a
85
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
86
+ or a Contribution incorporated within the Work constitutes direct
87
+ or contributory patent infringement, then any patent licenses
88
+ granted to You under this License for that Work shall terminate
89
+ as of the date such litigation is filed.
90
+
91
+ 4. Redistribution. You may reproduce and distribute copies of the
92
+ Work or Derivative Works thereof in any medium, with or without
93
+ modifications, and in Source or Object form, provided that You
94
+ meet the following conditions:
95
+
96
+ (a) You must give any other recipients of the Work or
97
+ Derivative Works a copy of this License; and
98
+
99
+ (b) You must cause any modified files to carry prominent notices
100
+ stating that You changed the files; and
101
+
102
+ (c) You must retain, in the Source form of any Derivative Works
103
+ that You distribute, all copyright, patent, trademark, and
104
+ attribution notices from the Source form of the Work,
105
+ excluding those notices that do not pertain to any part of
106
+ the Derivative Works; and
107
+
108
+ (d) If the Work includes a "NOTICE" text file as part of its
109
+ distribution, then any Derivative Works that You distribute must
110
+ include a readable copy of the attribution notices contained
111
+ within such NOTICE file, excluding those notices that do not
112
+ pertain to any part of the Derivative Works, in at least one
113
+ of the following places: within a NOTICE text file distributed
114
+ as part of the Derivative Works; within the Source form or
115
+ documentation, if provided along with the Derivative Works; or,
116
+ within a display generated by the Derivative Works, if and
117
+ wherever such third-party notices normally appear. The contents
118
+ of the NOTICE file are for informational purposes only and
119
+ do not modify the License. You may add Your own attribution
120
+ notices within Derivative Works that You distribute, alongside
121
+ or as an addendum to the NOTICE text from the Work, provided
122
+ that such additional attribution notices cannot be construed
123
+ as modifying the License.
124
+
125
+ You may add Your own copyright statement to Your modifications and
126
+ may provide additional or different license terms and conditions
127
+ for use, reproduction, or distribution of Your modifications, or
128
+ for any such Derivative Works as a whole, provided Your use,
129
+ reproduction, and distribution of the Work otherwise complies with
130
+ the conditions stated in this License.
131
+
132
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
133
+ any Contribution intentionally submitted for inclusion in the Work
134
+ by You to the Licensor shall be under the terms and conditions of
135
+ this License, without any additional terms or conditions.
136
+ Notwithstanding the above, nothing herein shall supersede or modify
137
+ the terms of any separate license agreement you may have executed
138
+ with Licensor regarding such Contributions.
139
+
140
+ 6. Trademarks. This License does not grant permission to use the trade
141
+ names, trademarks, service marks, or product names of the Licensor,
142
+ except as required for reasonable and customary use in describing the
143
+ origin of the Work and reproducing the content of the NOTICE file.
144
+
145
+ 7. Disclaimer of Warranty. Unless required by applicable law or
146
+ agreed to in writing, Licensor provides the Work (and each
147
+ Contributor provides its Contributions) on an "AS IS" BASIS,
148
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
149
+ implied, including, without limitation, any warranties or conditions
150
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
151
+ PARTICULAR PURPOSE. You are solely responsible for determining the
152
+ appropriateness of using or redistributing the Work and assume any
153
+ risks associated with Your exercise of permissions under this License.
154
+
155
+ 8. Limitation of Liability. In no event and under no legal theory,
156
+ whether in tort (including negligence), contract, or otherwise,
157
+ unless required by applicable law (such as deliberate and grossly
158
+ negligent acts) or agreed to in writing, shall any Contributor be
159
+ liable to You for damages, including any direct, indirect, special,
160
+ incidental, or consequential damages of any character arising as a
161
+ result of this License or out of the use or inability to use the
162
+ Work (including but not limited to damages for loss of goodwill,
163
+ work stoppage, computer failure or malfunction, or any and all
164
+ other commercial damages or losses), even if such Contributor
165
+ has been advised of the possibility of such damages.
166
+
167
+ 9. Accepting Warranty or Additional Liability. While redistributing
168
+ the Work or Derivative Works thereof, You may choose to offer,
169
+ and charge a fee for, acceptance of support, warranty, indemnity,
170
+ or other liability obligations and/or rights consistent with this
171
+ License. However, in accepting such obligations, You may act only
172
+ on Your own behalf and on Your sole responsibility, not on behalf
173
+ of any other Contributor, and only if You agree to indemnify,
174
+ defend, and hold each Contributor harmless for any liability
175
+ incurred by, or claims asserted against, such Contributor by reason
176
+ of your accepting any such warranty or additional liability.
177
+
178
+ END OF TERMS AND CONDITIONS
179
+
180
+ APPENDIX: How to apply the Apache License to your work.
181
+
182
+ To apply the Apache License to your work, attach the following
183
+ boilerplate notice, with the fields enclosed by brackets "[]"
184
+ replaced with your own identifying information. (Don't include
185
+ the brackets!) The text should be enclosed in the appropriate
186
+ comment syntax for the file format. We also recommend that a
187
+ file or class name and description of purpose be included on the
188
+ same "printed page" as the copyright notice for easier
189
+ identification within third-party archives.
190
+
191
+ Copyright 2018-2023 OpenMMLab.
192
+
193
+ Licensed under the Apache License, Version 2.0 (the "License");
194
+ you may not use this file except in compliance with the License.
195
+ You may obtain a copy of the License at
196
+
197
+ http://www.apache.org/licenses/LICENSE-2.0
198
+
199
+ Unless required by applicable law or agreed to in writing, software
200
+ distributed under the License is distributed on an "AS IS" BASIS,
201
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
202
+ See the License for the specific language governing permissions and
203
+ limitations under the License.
mmdet_configs/README.md ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ `configs.tar` is a tarball of https://github.com/open-mmlab/mmdetection/tree/v2.24.1/configs.
2
+ The license file of the mmdetection is also included in this directory.
mmdet_configs/configs.tar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d2091e07da6b74a6cd694e895b653485f7ce9d5d17738a415ca77a56940b989
3
+ size 3389440
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ mmcv-full==1.5.0
2
+ mmdet==2.24.1
3
+ mmpose==0.25.1
4
+ numpy==1.22.4
5
+ opencv-python-headless==4.5.5.64
6
+ openmim==0.1.5
7
+ timm==0.5.4
8
+ torch==1.11.0
9
+ torchvision==0.12.0