Spaces:
Sleeping
Sleeping
ttxskk
commited on
Commit
•
d7e58f0
0
Parent(s):
update
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +44 -0
- .gitignore +4 -0
- README.md +13 -0
- app.py +126 -0
- assets/01.mp4 +3 -0
- assets/02.mp4 +3 -0
- assets/03.mp4 +3 -0
- assets/04.mp4 +3 -0
- assets/05.mp4 +3 -0
- assets/06.mp4 +3 -0
- assets/07.mp4 +3 -0
- assets/08.mp4 +3 -0
- assets/09.mp4 +3 -0
- config/__init__.py +0 -0
- config/aios_smplx.py +259 -0
- config/aios_smplx_agora_val.py +265 -0
- config/aios_smplx_bedlam.py +265 -0
- config/aios_smplx_demo.py +259 -0
- config/aios_smplx_inference.py +265 -0
- config/aios_smplx_pretrain.py +264 -0
- config/config.py +91 -0
- data/body_models/J_regressor_extra.npy +3 -0
- data/body_models/J_regressor_h36m.npy +3 -0
- data/body_models/J_regressor_mano_LEFT.txt +1902 -0
- data/body_models/J_regressor_mano_RIGHT.txt +1902 -0
- data/body_models/SMPLX_to_J14.pkl +3 -0
- data/body_models/SMPL_NEUTRAL.pkl +3 -0
- data/body_models/all_means.pkl +3 -0
- data/body_models/downsample_mat_smplx.pkl +3 -0
- data/body_models/joints_regressor_cmr.npy +3 -0
- data/body_models/smpl/SMPL_FEMALE.pkl +3 -0
- data/body_models/smpl/SMPL_MALE.pkl +3 -0
- data/body_models/smpl/SMPL_NEUTRAL.pkl +3 -0
- data/body_models/smpl/index.html +17 -0
- data/body_models/smpl_mean_params.npz +3 -0
- data/body_models/smplx/MANO_SMPLX_vertex_ids.pkl +3 -0
- data/body_models/smplx/SMPL-X__FLAME_vertex_ids.npy +3 -0
- data/body_models/smplx/SMPLX_FEMALE.npz +3 -0
- data/body_models/smplx/SMPLX_FEMALE.pkl +3 -0
- data/body_models/smplx/SMPLX_MALE.npz +3 -0
- data/body_models/smplx/SMPLX_MALE.pkl +3 -0
- data/body_models/smplx/SMPLX_NEUTRAL.npz +3 -0
- data/body_models/smplx/SMPLX_NEUTRAL.pkl +3 -0
- data/body_models/smplx/SMPLX_to_J14.npy +3 -0
- data/body_models/smplx/SMPLX_to_J14.pkl +3 -0
- data/body_models/smplx/smplx_kid_template.npy +3 -0
- data/body_models/smplx2smpl.pkl +3 -0
- datasets/AGORA_MM.py +974 -0
- datasets/ARCTIC.py +215 -0
- datasets/BEDLAM.py +32 -0
.gitattributes
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
assets/01.mp4 filter=lfs diff=lfs merge=lfs -text
|
37 |
+
assets/02.mp4 filter=lfs diff=lfs merge=lfs -text
|
38 |
+
assets/03.mp4 filter=lfs diff=lfs merge=lfs -text
|
39 |
+
assets/04.mp4 filter=lfs diff=lfs merge=lfs -text
|
40 |
+
assets/05.mp4 filter=lfs diff=lfs merge=lfs -text
|
41 |
+
assets/06.mp4 filter=lfs diff=lfs merge=lfs -text
|
42 |
+
assets/07.mp4 filter=lfs diff=lfs merge=lfs -text
|
43 |
+
assets/08.mp4 filter=lfs diff=lfs merge=lfs -text
|
44 |
+
assets/09.mp4 filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
data_ssc/
|
2 |
+
demo_out/
|
3 |
+
pretrained_models/*
|
4 |
+
.vscode/
|
README.md
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: AiOS
|
3 |
+
emoji: ⚡
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: indigo
|
6 |
+
sdk: gradio
|
7 |
+
python_version: 3.9
|
8 |
+
sdk_version: 4.38.1
|
9 |
+
app_file: app.py
|
10 |
+
pinned: false
|
11 |
+
---
|
12 |
+
|
13 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
import subprocess
|
4 |
+
import pkg_resources
|
5 |
+
|
6 |
+
def is_package_installed(package_name):
|
7 |
+
try:
|
8 |
+
pkg_resources.get_distribution(package_name)
|
9 |
+
return True
|
10 |
+
except pkg_resources.DistributionNotFound:
|
11 |
+
return False
|
12 |
+
|
13 |
+
if is_package_installed("mmcv"):
|
14 |
+
print("MMCV is installed.")
|
15 |
+
else:
|
16 |
+
print("MMCV is not installed. Build it from the source.")
|
17 |
+
os.environ["MMCV_WITH_OPS"] = "1"
|
18 |
+
os.environ["FORCE_MLU"] = "1"
|
19 |
+
subprocess.run(["pip", "install", "-e", "./mmcv"], check=True)
|
20 |
+
subprocess.run(["pip", "list"], check=True)
|
21 |
+
|
22 |
+
if is_package_installed("pytorch3d"):
|
23 |
+
print("pytorch3d is installed.")
|
24 |
+
else:
|
25 |
+
print("pytorch3d is not installed. Build it from the source.")
|
26 |
+
subprocess.run(["pip", "install", "-e", "./pytorch3d"], check=True)
|
27 |
+
|
28 |
+
if is_package_installed("MultiScaleDeformableAttention"):
|
29 |
+
print("MultiScaleDeformableAttention is installed.")
|
30 |
+
else:
|
31 |
+
print("MultiScaleDeformableAttention is not installed. Build it from the source.")
|
32 |
+
subprocess.run(["pip", "install", "-e", "./models/aios/ops"], check=True)
|
33 |
+
|
34 |
+
import os.path as osp
|
35 |
+
from pathlib import Path
|
36 |
+
import cv2
|
37 |
+
import gradio as gr
|
38 |
+
import torch
|
39 |
+
import math
|
40 |
+
import spaces
|
41 |
+
from huggingface_hub import hf_hub_download
|
42 |
+
|
43 |
+
hf_hub_download(repo_id="ttxskk/AiOS", filename="aios_checkpoint.pth", local_dir="/home/user/app/pretrained_models")
|
44 |
+
|
45 |
+
OUT_FOLDER = '/home/user/app/demo_out'
|
46 |
+
os.makedirs(OUT_FOLDER, exist_ok=True)
|
47 |
+
|
48 |
+
DEMO_CONFIG = '/home/user/app/config/aios_smplx_demo.py'
|
49 |
+
MODEL_PATH = '/home/user/app/pretrained_models/aios_checkpoint.pth'
|
50 |
+
@spaces.GPU(enable_queue=True, duration=300)
|
51 |
+
def infer(video_input, batch_size, threshold=0.5, num_person=1):
|
52 |
+
os.system(f'rm -rf {OUT_FOLDER}/*')
|
53 |
+
os.system(f'torchrun --nproc_per_node 1 \
|
54 |
+
main.py \
|
55 |
+
-c {DEMO_CONFIG} \
|
56 |
+
--options batch_size={batch_size} backbone="resnet50" num_person={num_person} threshold={threshold} \
|
57 |
+
--resume {MODEL_PATH} \
|
58 |
+
--eval \
|
59 |
+
--inference \
|
60 |
+
--inference_input {video_input} \
|
61 |
+
--to_vid \
|
62 |
+
--output_dir {OUT_FOLDER}')
|
63 |
+
|
64 |
+
video_path = os.path.join(OUT_FOLDER, 'demo_vid.mp4')
|
65 |
+
save_path_img = os.path.join(OUT_FOLDER, 'res_img')
|
66 |
+
save_path_mesh = os.path.join(OUT_FOLDER, 'mesh')
|
67 |
+
save_mesh_file = os.path.join(OUT_FOLDER, 'mesh.zip')
|
68 |
+
os.system(f'zip -r {save_mesh_file} {save_path_mesh}')
|
69 |
+
yield video_path, save_mesh_file
|
70 |
+
|
71 |
+
TITLE = """
|
72 |
+
|
73 |
+
<div style="display: flex; justify-content: center; align-items: center; text-align: center;">
|
74 |
+
<div>
|
75 |
+
<h1 align="center">AiOS: All-in-One-Stage Expressive Human Pose and Shape Estimation</h1>
|
76 |
+
</div>
|
77 |
+
</div>
|
78 |
+
|
79 |
+
<div style="display: flex; justify-content: center; align-items: center; text-align: center;">
|
80 |
+
<div style="display:flex; gap: 0.25rem;" align="center">
|
81 |
+
<a href="https://ttxskk.github.io/AiOS/" target="_blank"><img src='https://img.shields.io/badge/Project-Page-Green'></a>
|
82 |
+
<a href="https://github.com/ttxskk/AiOS" target="_blank"><img src='https://img.shields.io/badge/Github-Code-blue'></a>
|
83 |
+
<a href="https://ttxskk.github.io/AiOS/assets/aios_cvpr24.pdf" target="_blank"><img src='https://img.shields.io/badge/Paper-Arxiv-red'></a>
|
84 |
+
</div>
|
85 |
+
</div>
|
86 |
+
<div style="font-size: 1.1rem; color: #555; max-width: 800px; margin: 1rem auto; line-height: 1.5; justify-content: center; align-items: center; text-align: center;">
|
87 |
+
<div>
|
88 |
+
<p>Recover multiple expressive human pose and shape recovery from an RGB image without any additional requirements, such as an off-the-shelf detection model.</h1>
|
89 |
+
</div>
|
90 |
+
</div>
|
91 |
+
"""
|
92 |
+
with gr.Blocks(title="AiOS", theme=gr.themes.Soft(primary_hue="blue", secondary_hue="gray")) as demo:
|
93 |
+
|
94 |
+
gr.Markdown(TITLE)
|
95 |
+
with gr.Row():
|
96 |
+
with gr.Column(scale=2):
|
97 |
+
video_input = gr.Video(label="Input video", elem_classes="video")
|
98 |
+
with gr.Column(scale=1):
|
99 |
+
batch_size = gr.Textbox(label="Batch Size", type="text", value=8)
|
100 |
+
num_person = gr.Textbox(label="Number of Person", type="text", value=1)
|
101 |
+
threshold = gr.Slider(0, 1.0, value=0.5, label='Score Threshold')
|
102 |
+
send_button = gr.Button("Infer")
|
103 |
+
gr.HTML("""<br/>""")
|
104 |
+
|
105 |
+
with gr.Row():
|
106 |
+
with gr.Column():
|
107 |
+
# processed_frames = gr.Image(label="Last processed frame")
|
108 |
+
video_output = gr.Video(elem_classes="video")
|
109 |
+
with gr.Column():
|
110 |
+
meshes_output = gr.File(label="3D meshes")
|
111 |
+
|
112 |
+
send_button.click(fn=infer, inputs=[video_input, batch_size, threshold, num_person], outputs=[video_output, meshes_output])
|
113 |
+
# example_videos = gr.Examples([
|
114 |
+
# ['./assets/01.mp4'],
|
115 |
+
# ['./assets/02.mp4'],
|
116 |
+
# ['./assets/03.mp4'],
|
117 |
+
# ['./assets/04.mp4'],
|
118 |
+
# ['./assets/05.mp4'],
|
119 |
+
# ['./assets/06.mp4'],
|
120 |
+
# ['./assets/07.mp4'],
|
121 |
+
# ['./assets/08.mp4'],
|
122 |
+
# ['./assets/09.mp4'],
|
123 |
+
# ],
|
124 |
+
# inputs=[video_input, 0.5])
|
125 |
+
|
126 |
+
demo.queue().launch(debug=True)
|
assets/01.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2ba560996c248d78be6556f1727ae6ced81cd62a002715c3ffd542f6202b204b
|
3 |
+
size 2751935
|
assets/02.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00702a08c978b27b3ddf6ddfd48c5a057753664c8e80d83f4b4e04dff45b8a71
|
3 |
+
size 2827267
|
assets/03.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bfcc1ce90a0921ffa5550a04f743470081ff4599c265cf491e636a8ea70233d4
|
3 |
+
size 4033767
|
assets/04.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:28531c3c0ad9cbcc097a00f8553aafcdc0513a881f0fa6d1a7937248f46fce0c
|
3 |
+
size 2639842
|
assets/05.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1cf7f1b65d87f0a77c1d9456771e4f88228aa836426b4ad0cbad672e80d07e36
|
3 |
+
size 3584040
|
assets/06.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fcb4139d4863c5ec92224f7cb452ec4631be0613eb4c3f82ee7fbb6f89510fe2
|
3 |
+
size 19797950
|
assets/07.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4c71c5ed8573cb727c515d733e51c5da4654c58ab096cbca4bdf9b072e8284c7
|
3 |
+
size 3274979
|
assets/08.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d14f03e984a0ebefd9e8429c8e0d3ecdb0ffc9126ad91a489b57dc0f5d12695b
|
3 |
+
size 6825913
|
assets/09.mp4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:30b5b6f75f024647a9e430f02b33caa1ccec327b487ba5bb451e2859e1e45142
|
3 |
+
size 6336699
|
config/__init__.py
ADDED
File without changes
|
config/aios_smplx.py
ADDED
@@ -0,0 +1,259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
num_classes = 2
|
3 |
+
lr = 0.0001*1.414/10
|
4 |
+
param_dict_type = 'default'
|
5 |
+
lr_backbone = 1e-05*1.414/10
|
6 |
+
lr_backbone_names = ['backbone.0']
|
7 |
+
lr_linear_proj_names = ['reference_points', 'sampling_offsets']
|
8 |
+
lr_linear_proj_mult = 0.1
|
9 |
+
ddetr_lr_param = False
|
10 |
+
batch_size = 2
|
11 |
+
weight_decay = 0.0001
|
12 |
+
epochs = 200
|
13 |
+
lr_drop = 11
|
14 |
+
save_checkpoint_interval = 1
|
15 |
+
clip_max_norm = 0.1
|
16 |
+
onecyclelr = False
|
17 |
+
multi_step_lr = True
|
18 |
+
lr_drop_list = [30, 60]
|
19 |
+
|
20 |
+
modelname = 'aios_smplx'
|
21 |
+
frozen_weights = None
|
22 |
+
backbone = 'resnet50'
|
23 |
+
use_checkpoint = False
|
24 |
+
|
25 |
+
dilation = False
|
26 |
+
position_embedding = 'sine'
|
27 |
+
pe_temperatureH = 20
|
28 |
+
pe_temperatureW = 20
|
29 |
+
return_interm_indices = [1, 2, 3]
|
30 |
+
backbone_freeze_keywords = None
|
31 |
+
enc_layers = 6
|
32 |
+
dec_layers = 6
|
33 |
+
pre_norm = False
|
34 |
+
dim_feedforward = 2048
|
35 |
+
hidden_dim = 256
|
36 |
+
dropout = 0.0
|
37 |
+
nheads = 8
|
38 |
+
num_queries = 900
|
39 |
+
query_dim = 4
|
40 |
+
num_patterns = 0
|
41 |
+
random_refpoints_xy = False
|
42 |
+
fix_refpoints_hw = -1
|
43 |
+
dec_layer_number = None
|
44 |
+
num_feature_levels = 4
|
45 |
+
enc_n_points = 4
|
46 |
+
dec_n_points = 4
|
47 |
+
dln_xy_noise = 0.2
|
48 |
+
dln_hw_noise = 0.2
|
49 |
+
two_stage_type = 'standard'
|
50 |
+
two_stage_bbox_embed_share = False
|
51 |
+
two_stage_class_embed_share = False
|
52 |
+
two_stage_learn_wh = False
|
53 |
+
two_stage_default_hw = 0.05
|
54 |
+
two_stage_keep_all_tokens = False
|
55 |
+
rm_detach = None
|
56 |
+
num_select = 50
|
57 |
+
transformer_activation = 'relu'
|
58 |
+
batch_norm_type = 'FrozenBatchNorm2d'
|
59 |
+
|
60 |
+
masks = False
|
61 |
+
losses = ["smpl_pose", "smpl_beta", "smpl_expr",
|
62 |
+
"smpl_kp2d","smpl_kp3d","smpl_kp3d_ra",'labels', 'boxes', "keypoints"]
|
63 |
+
# losses = ['labels', 'boxes', "keypoints"]
|
64 |
+
aux_loss = True
|
65 |
+
set_cost_class = 2.0
|
66 |
+
set_cost_bbox = 5.0
|
67 |
+
set_cost_giou = 2.0
|
68 |
+
set_cost_keypoints = 10.0
|
69 |
+
set_cost_kpvis = 0.0
|
70 |
+
set_cost_oks = 4.0
|
71 |
+
cls_loss_coef = 2.0
|
72 |
+
# keypoints_loss_coef = 10.0
|
73 |
+
|
74 |
+
smpl_pose_loss_root_coef = 10 * 0.1
|
75 |
+
smpl_pose_loss_body_coef = 1 * 0.1
|
76 |
+
smpl_pose_loss_lhand_coef = 1 * 0.1
|
77 |
+
smpl_pose_loss_rhand_coef = 1 * 0.1
|
78 |
+
smpl_pose_loss_jaw_coef = 1 * 0.1
|
79 |
+
smpl_beta_loss_coef = 0.01
|
80 |
+
smpl_expr_loss_coef = 0.01
|
81 |
+
|
82 |
+
# smpl_kp3d_loss_coef = 10
|
83 |
+
smpl_body_kp3d_loss_coef = 10.0 * 0.1
|
84 |
+
smpl_face_kp3d_loss_coef = 1.0 * 0.1
|
85 |
+
smpl_lhand_kp3d_loss_coef = 1 * 0.1
|
86 |
+
smpl_rhand_kp3d_loss_coef = 1 * 0.1
|
87 |
+
|
88 |
+
# kp3d ra
|
89 |
+
smpl_body_kp3d_ra_loss_coef = 10 * 0.1
|
90 |
+
smpl_face_kp3d_ra_loss_coef = 1 * 0.1
|
91 |
+
smpl_lhand_kp3d_ra_loss_coef = 1 * 0.1
|
92 |
+
smpl_rhand_kp3d_ra_loss_coef = 1 * 0.1
|
93 |
+
|
94 |
+
|
95 |
+
# smpl_kp2d_ba_loss_coef = 1.0
|
96 |
+
smpl_body_kp2d_loss_coef = 10.0 * 0.1
|
97 |
+
smpl_lhand_kp2d_loss_coef = 5.0 * 0.1
|
98 |
+
smpl_rhand_kp2d_loss_coef = 5.0 * 0.1
|
99 |
+
smpl_face_kp2d_loss_coef = 1.0 * 0.1
|
100 |
+
|
101 |
+
smpl_body_kp2d_ba_loss_coef = 0 * 0.1
|
102 |
+
smpl_face_kp2d_ba_loss_coef = 0 * 0.1
|
103 |
+
smpl_lhand_kp2d_ba_loss_coef = 0 * 0.1
|
104 |
+
smpl_rhand_kp2d_ba_loss_coef = 0 * 0.1
|
105 |
+
|
106 |
+
bbox_loss_coef = 5.0
|
107 |
+
body_bbox_loss_coef = 5.0
|
108 |
+
lhand_bbox_loss_coef = 5.0
|
109 |
+
rhand_bbox_loss_coef = 5.0
|
110 |
+
face_bbox_loss_coef = 5.0
|
111 |
+
|
112 |
+
giou_loss_coef = 2.0
|
113 |
+
body_giou_loss_coef = 2.0
|
114 |
+
rhand_giou_loss_coef = 2.0
|
115 |
+
lhand_giou_loss_coef = 2.0
|
116 |
+
face_giou_loss_coef = 2.0
|
117 |
+
|
118 |
+
keypoints_loss_coef = 10.0
|
119 |
+
rhand_keypoints_loss_coef = 10.0
|
120 |
+
lhand_keypoints_loss_coef = 10.0
|
121 |
+
face_keypoints_loss_coef = 10.0
|
122 |
+
|
123 |
+
oks_loss_coef=4.0
|
124 |
+
rhand_oks_loss_coef = 0.5
|
125 |
+
lhand_oks_loss_coef = 0.5
|
126 |
+
face_oks_loss_coef = 4.0
|
127 |
+
|
128 |
+
|
129 |
+
enc_loss_coef = 1.0
|
130 |
+
interm_loss_coef = 1.0
|
131 |
+
no_interm_box_loss = False
|
132 |
+
focal_alpha = 0.25
|
133 |
+
rm_self_attn_layers = None
|
134 |
+
indices_idx_list = [1, 2, 3, 4, 5, 6, 7]
|
135 |
+
|
136 |
+
decoder_sa_type = 'sa'
|
137 |
+
matcher_type = 'HungarianMatcher'
|
138 |
+
decoder_module_seq = ['sa', 'ca', 'ffn']
|
139 |
+
nms_iou_threshold = -1
|
140 |
+
|
141 |
+
dec_pred_bbox_embed_share = False
|
142 |
+
dec_pred_class_embed_share = False
|
143 |
+
dec_pred_pose_embed_share = False
|
144 |
+
body_only = True
|
145 |
+
|
146 |
+
# for dn
|
147 |
+
use_dn = True
|
148 |
+
dn_number = 100
|
149 |
+
dn_box_noise_scale = 0.4
|
150 |
+
dn_label_noise_ratio = 0.5
|
151 |
+
embed_init_tgt = False
|
152 |
+
dn_label_coef = 0.3
|
153 |
+
dn_bbox_coef = 0.5
|
154 |
+
dn_batch_gt_fuse = False
|
155 |
+
dn_attn_mask_type_list = ['match2dn', 'dn2dn', 'group2group']
|
156 |
+
dn_labelbook_size = 100
|
157 |
+
|
158 |
+
match_unstable_error = False
|
159 |
+
|
160 |
+
# for ema
|
161 |
+
use_ema = True
|
162 |
+
ema_decay = 0.9997
|
163 |
+
ema_epoch = 0
|
164 |
+
|
165 |
+
cls_no_bias = False
|
166 |
+
num_body_points = 17 # for coco
|
167 |
+
num_hand_points = 6 # for coco
|
168 |
+
num_face_points = 6 # for coco
|
169 |
+
num_group = 100
|
170 |
+
num_box_decoder_layers = 2
|
171 |
+
num_hand_face_decoder_layers = 4
|
172 |
+
no_mmpose_keypoint_evaluator = True
|
173 |
+
strong_aug = False
|
174 |
+
|
175 |
+
body_model_test=\
|
176 |
+
dict(
|
177 |
+
type='smplx',
|
178 |
+
keypoint_src='smplx',
|
179 |
+
num_expression_coeffs=10,
|
180 |
+
num_betas=10,
|
181 |
+
keypoint_dst='smplx_137',
|
182 |
+
model_path='data/body_models/smplx',
|
183 |
+
use_pca=False,
|
184 |
+
use_face_contour=True)
|
185 |
+
|
186 |
+
body_model_train = \
|
187 |
+
dict(
|
188 |
+
type='smplx',
|
189 |
+
keypoint_src='smplx',
|
190 |
+
num_expression_coeffs=10,
|
191 |
+
num_betas=10,
|
192 |
+
keypoint_dst='smplx_137',
|
193 |
+
model_path='data/body_models/smplx',
|
194 |
+
use_pca=False,
|
195 |
+
use_face_contour=True)
|
196 |
+
|
197 |
+
# will be update in exp
|
198 |
+
exp_name = 'output/exp52/dataset_debug'
|
199 |
+
|
200 |
+
|
201 |
+
end_epoch = 150
|
202 |
+
train_batch_size = 32
|
203 |
+
|
204 |
+
scheduler = 'step'
|
205 |
+
step_size = 20
|
206 |
+
gamma = 0.1
|
207 |
+
|
208 |
+
# continue
|
209 |
+
continue_train = True
|
210 |
+
pretrained_model_path = '../output/train_gta_synbody_ft_20230410_132110/model_dump/snapshot_2.pth.tar'
|
211 |
+
|
212 |
+
# dataset setting
|
213 |
+
# dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
214 |
+
# trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
215 |
+
dataset_list = ['INFERENCE_demo']
|
216 |
+
trainset_3d = []
|
217 |
+
trainset_2d = []
|
218 |
+
trainset_partition = {
|
219 |
+
}
|
220 |
+
trainset_humandata = []
|
221 |
+
testset = 'INFERENCE_demo'
|
222 |
+
train_sizes=[480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
|
223 |
+
train_max_size=1333
|
224 |
+
test_sizes=[800]
|
225 |
+
test_max_size=1333
|
226 |
+
no_aug=False
|
227 |
+
# model
|
228 |
+
use_cache = True
|
229 |
+
|
230 |
+
## UBody setting
|
231 |
+
train_sample_interval = 10
|
232 |
+
test_sample_interval = 100
|
233 |
+
make_same_len = False
|
234 |
+
|
235 |
+
## input, output size
|
236 |
+
input_body_shape = (256, 192)
|
237 |
+
output_hm_shape = (16, 16, 12)
|
238 |
+
input_hand_shape = (256, 256)
|
239 |
+
output_hand_hm_shape = (16, 16, 16)
|
240 |
+
output_face_hm_shape = (8, 8, 8)
|
241 |
+
input_face_shape = (192, 192)
|
242 |
+
focal = (5000, 5000) # virtual focal lengths
|
243 |
+
princpt = (input_body_shape[1] / 2, input_body_shape[0] / 2
|
244 |
+
) # virtual principal point position
|
245 |
+
body_3d_size = 2
|
246 |
+
hand_3d_size = 0.3
|
247 |
+
face_3d_size = 0.3
|
248 |
+
camera_3d_size = 2.5
|
249 |
+
|
250 |
+
bbox_ratio = 1.2
|
251 |
+
|
252 |
+
## directory
|
253 |
+
output_dir, model_dir, vis_dir, log_dir, result_dir, code_dir = None, None, None, None, None, None
|
254 |
+
|
255 |
+
agora_benchmark = 'na' # 'agora_model', 'test_only'
|
256 |
+
|
257 |
+
# strategy
|
258 |
+
data_strategy = 'balance' # 'balance' need to define total_data_len
|
259 |
+
total_data_len = 'auto'
|
config/aios_smplx_agora_val.py
ADDED
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
num_classes = 2
|
3 |
+
lr = 1e-04
|
4 |
+
param_dict_type = 'default'
|
5 |
+
lr_backbone = 1e-05
|
6 |
+
lr_backbone_names = ['backbone.0']
|
7 |
+
lr_linear_proj_names = ['reference_points', 'sampling_offsets']
|
8 |
+
lr_linear_proj_mult = 0.1
|
9 |
+
ddetr_lr_param = False
|
10 |
+
batch_size = 2
|
11 |
+
weight_decay = 0.0001
|
12 |
+
epochs = 200
|
13 |
+
lr_drop = 11
|
14 |
+
save_checkpoint_interval = 1
|
15 |
+
clip_max_norm = 0.1
|
16 |
+
onecyclelr = False
|
17 |
+
multi_step_lr = True
|
18 |
+
lr_drop_list = [30, 60]
|
19 |
+
|
20 |
+
modelname = 'aios_smplx'
|
21 |
+
frozen_weights = None
|
22 |
+
backbone = 'resnet50'
|
23 |
+
use_checkpoint = False
|
24 |
+
|
25 |
+
dilation = False
|
26 |
+
position_embedding = 'sine'
|
27 |
+
pe_temperatureH = 20
|
28 |
+
pe_temperatureW = 20
|
29 |
+
return_interm_indices = [1, 2, 3]
|
30 |
+
backbone_freeze_keywords = None
|
31 |
+
enc_layers = 6
|
32 |
+
dec_layers = 6
|
33 |
+
pre_norm = False
|
34 |
+
dim_feedforward = 2048
|
35 |
+
hidden_dim = 256
|
36 |
+
dropout = 0.0
|
37 |
+
nheads = 8
|
38 |
+
num_queries = 900
|
39 |
+
query_dim = 4
|
40 |
+
num_patterns = 0
|
41 |
+
random_refpoints_xy = False
|
42 |
+
fix_refpoints_hw = -1
|
43 |
+
dec_layer_number = None
|
44 |
+
num_feature_levels = 4
|
45 |
+
enc_n_points = 4
|
46 |
+
dec_n_points = 4
|
47 |
+
dln_xy_noise = 0.2
|
48 |
+
dln_hw_noise = 0.2
|
49 |
+
two_stage_type = 'standard'
|
50 |
+
two_stage_bbox_embed_share = False
|
51 |
+
two_stage_class_embed_share = False
|
52 |
+
two_stage_learn_wh = False
|
53 |
+
two_stage_default_hw = 0.05
|
54 |
+
two_stage_keep_all_tokens = False
|
55 |
+
rm_detach = None
|
56 |
+
num_select = 50
|
57 |
+
transformer_activation = 'relu'
|
58 |
+
batch_norm_type = 'FrozenBatchNorm2d'
|
59 |
+
|
60 |
+
masks = False
|
61 |
+
losses = ["smpl_pose", "smpl_beta", "smpl_expr",
|
62 |
+
"smpl_kp2d","smpl_kp3d","smpl_kp3d_ra",'labels', 'boxes', "keypoints"]
|
63 |
+
# losses = ['labels', 'boxes', "keypoints"]
|
64 |
+
aux_loss = True
|
65 |
+
set_cost_class = 2.0
|
66 |
+
set_cost_bbox = 5.0
|
67 |
+
set_cost_giou = 2.0
|
68 |
+
set_cost_keypoints = 10.0
|
69 |
+
set_cost_kpvis = 0.0
|
70 |
+
set_cost_oks = 4.0
|
71 |
+
cls_loss_coef = 2.0
|
72 |
+
# keypoints_loss_coef = 10.0
|
73 |
+
|
74 |
+
smpl_pose_loss_root_coef = 10 * 0.1
|
75 |
+
smpl_pose_loss_body_coef = 1 * 0.1
|
76 |
+
smpl_pose_loss_lhand_coef = 1 * 0.1
|
77 |
+
smpl_pose_loss_rhand_coef = 1 * 0.1
|
78 |
+
smpl_pose_loss_jaw_coef = 1 * 0.1
|
79 |
+
smpl_beta_loss_coef = 0.01
|
80 |
+
smpl_expr_loss_coef = 0.01
|
81 |
+
|
82 |
+
# smpl_kp3d_loss_coef = 10
|
83 |
+
smpl_body_kp3d_loss_coef = 10.0 * 0.1
|
84 |
+
smpl_face_kp3d_loss_coef = 1.0 * 0.1
|
85 |
+
smpl_lhand_kp3d_loss_coef = 1 * 0.1
|
86 |
+
smpl_rhand_kp3d_loss_coef = 1 * 0.1
|
87 |
+
|
88 |
+
# kp3d ra
|
89 |
+
smpl_body_kp3d_ra_loss_coef = 10 * 0.1
|
90 |
+
smpl_face_kp3d_ra_loss_coef = 1 * 0.1
|
91 |
+
smpl_lhand_kp3d_ra_loss_coef = 1 * 0.1
|
92 |
+
smpl_rhand_kp3d_ra_loss_coef = 1 * 0.1
|
93 |
+
|
94 |
+
|
95 |
+
# smpl_kp2d_ba_loss_coef = 1.0
|
96 |
+
smpl_body_kp2d_loss_coef = 10.0 * 0.1
|
97 |
+
smpl_lhand_kp2d_loss_coef = 5.0 * 0.1
|
98 |
+
smpl_rhand_kp2d_loss_coef = 5.0 * 0.1
|
99 |
+
smpl_face_kp2d_loss_coef = 1.0 * 0.1
|
100 |
+
|
101 |
+
smpl_body_kp2d_ba_loss_coef = 0 * 0.1
|
102 |
+
smpl_face_kp2d_ba_loss_coef = 0 * 0.1
|
103 |
+
smpl_lhand_kp2d_ba_loss_coef = 0 * 0.1
|
104 |
+
smpl_rhand_kp2d_ba_loss_coef = 0 * 0.1
|
105 |
+
|
106 |
+
bbox_loss_coef = 5.0
|
107 |
+
body_bbox_loss_coef = 5.0
|
108 |
+
lhand_bbox_loss_coef = 5.0
|
109 |
+
rhand_bbox_loss_coef = 5.0
|
110 |
+
face_bbox_loss_coef = 5.0
|
111 |
+
|
112 |
+
giou_loss_coef = 2.0
|
113 |
+
body_giou_loss_coef = 2.0
|
114 |
+
rhand_giou_loss_coef = 2.0
|
115 |
+
lhand_giou_loss_coef = 2.0
|
116 |
+
face_giou_loss_coef = 2.0
|
117 |
+
|
118 |
+
keypoints_loss_coef = 10.0
|
119 |
+
rhand_keypoints_loss_coef = 10.0
|
120 |
+
lhand_keypoints_loss_coef = 10.0
|
121 |
+
face_keypoints_loss_coef = 10.0
|
122 |
+
|
123 |
+
oks_loss_coef=4.0
|
124 |
+
rhand_oks_loss_coef = 0.5
|
125 |
+
lhand_oks_loss_coef = 0.5
|
126 |
+
face_oks_loss_coef = 4.0
|
127 |
+
|
128 |
+
|
129 |
+
enc_loss_coef = 1.0
|
130 |
+
interm_loss_coef = 1.0
|
131 |
+
no_interm_box_loss = False
|
132 |
+
focal_alpha = 0.25
|
133 |
+
rm_self_attn_layers = None
|
134 |
+
indices_idx_list = [1, 2, 3, 4, 5, 6, 7]
|
135 |
+
|
136 |
+
decoder_sa_type = 'sa'
|
137 |
+
matcher_type = 'HungarianMatcher'
|
138 |
+
decoder_module_seq = ['sa', 'ca', 'ffn']
|
139 |
+
nms_iou_threshold = -1
|
140 |
+
|
141 |
+
dec_pred_bbox_embed_share = False
|
142 |
+
dec_pred_class_embed_share = False
|
143 |
+
dec_pred_pose_embed_share = False
|
144 |
+
body_only = True
|
145 |
+
|
146 |
+
# for dn
|
147 |
+
use_dn = True
|
148 |
+
dn_number = 100
|
149 |
+
dn_box_noise_scale = 0.4
|
150 |
+
dn_label_noise_ratio = 0.5
|
151 |
+
embed_init_tgt = False
|
152 |
+
dn_label_coef = 0.3
|
153 |
+
dn_bbox_coef = 0.5
|
154 |
+
dn_batch_gt_fuse = False
|
155 |
+
dn_attn_mask_type_list = ['match2dn', 'dn2dn', 'group2group']
|
156 |
+
dn_labelbook_size = 100
|
157 |
+
|
158 |
+
match_unstable_error = False
|
159 |
+
|
160 |
+
# for ema
|
161 |
+
use_ema = True
|
162 |
+
ema_decay = 0.9997
|
163 |
+
ema_epoch = 0
|
164 |
+
|
165 |
+
cls_no_bias = False
|
166 |
+
num_body_points = 17 # for coco
|
167 |
+
num_hand_points = 6 # for coco
|
168 |
+
num_face_points = 6 # for coco
|
169 |
+
num_group = 100
|
170 |
+
num_box_decoder_layers = 2
|
171 |
+
num_hand_face_decoder_layers = 4
|
172 |
+
no_mmpose_keypoint_evaluator = True
|
173 |
+
strong_aug = False
|
174 |
+
|
175 |
+
body_model_test=\
|
176 |
+
dict(
|
177 |
+
type='smplx',
|
178 |
+
keypoint_src='smplx',
|
179 |
+
num_expression_coeffs=10,
|
180 |
+
num_betas=10,
|
181 |
+
keypoint_dst='smplx_137',
|
182 |
+
model_path='data/body_models/smplx',
|
183 |
+
use_pca=False,
|
184 |
+
use_face_contour=True)
|
185 |
+
|
186 |
+
body_model_train = \
|
187 |
+
dict(
|
188 |
+
type='smplx',
|
189 |
+
keypoint_src='smplx',
|
190 |
+
num_expression_coeffs=10,
|
191 |
+
num_betas=10,
|
192 |
+
keypoint_dst='smplx_137',
|
193 |
+
model_path='data/body_models/smplx',
|
194 |
+
use_pca=False,
|
195 |
+
use_face_contour=True)
|
196 |
+
|
197 |
+
# will be update in exp
|
198 |
+
exp_name = 'output/exp52/dataset_debug'
|
199 |
+
|
200 |
+
|
201 |
+
end_epoch = 150
|
202 |
+
train_batch_size = 32
|
203 |
+
|
204 |
+
scheduler = 'step'
|
205 |
+
step_size = 20
|
206 |
+
gamma = 0.1
|
207 |
+
|
208 |
+
# continue
|
209 |
+
continue_train = True
|
210 |
+
pretrained_model_path = '../output/train_gta_synbody_ft_20230410_132110/model_dump/snapshot_2.pth.tar'
|
211 |
+
|
212 |
+
# dataset setting
|
213 |
+
# dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
214 |
+
# trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
215 |
+
dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
216 |
+
trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
217 |
+
trainset_2d = []
|
218 |
+
trainset_partition = {
|
219 |
+
'AGORA_MM': 0.4,
|
220 |
+
'BEDLAM': 0.7,
|
221 |
+
'COCO_NA': 1,
|
222 |
+
|
223 |
+
# 'EgoBody_Egocentric': 1,
|
224 |
+
# 'EgoBody_Kinect': 1.0,
|
225 |
+
}
|
226 |
+
trainset_humandata = []
|
227 |
+
testset = 'INFERENCE_AGORA'
|
228 |
+
train_sizes=[480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
|
229 |
+
train_max_size=1333
|
230 |
+
test_sizes=[800]
|
231 |
+
test_max_size=1333
|
232 |
+
no_aug=False
|
233 |
+
# model
|
234 |
+
use_cache = True
|
235 |
+
|
236 |
+
## UBody setting
|
237 |
+
train_sample_interval = 10
|
238 |
+
test_sample_interval = 100
|
239 |
+
make_same_len = False
|
240 |
+
|
241 |
+
## input, output size
|
242 |
+
input_body_shape = (256, 192)
|
243 |
+
output_hm_shape = (16, 16, 12)
|
244 |
+
input_hand_shape = (256, 256)
|
245 |
+
output_hand_hm_shape = (16, 16, 16)
|
246 |
+
output_face_hm_shape = (8, 8, 8)
|
247 |
+
input_face_shape = (192, 192)
|
248 |
+
focal = (5000, 5000) # virtual focal lengths
|
249 |
+
princpt = (input_body_shape[1] / 2, input_body_shape[0] / 2
|
250 |
+
) # virtual principal point position
|
251 |
+
body_3d_size = 2
|
252 |
+
hand_3d_size = 0.3
|
253 |
+
face_3d_size = 0.3
|
254 |
+
camera_3d_size = 2.5
|
255 |
+
|
256 |
+
bbox_ratio = 1.2
|
257 |
+
|
258 |
+
## directory
|
259 |
+
output_dir, model_dir, vis_dir, log_dir, result_dir, code_dir = None, None, None, None, None, None
|
260 |
+
|
261 |
+
agora_benchmark = 'na' # 'agora_model', 'test_only'
|
262 |
+
|
263 |
+
# strategy
|
264 |
+
data_strategy = 'balance' # 'balance' need to define total_data_len
|
265 |
+
total_data_len = 'auto'
|
config/aios_smplx_bedlam.py
ADDED
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
num_classes = 2
|
3 |
+
lr = 0.0001*1.414/10
|
4 |
+
param_dict_type = 'default'
|
5 |
+
lr_backbone = 1e-05*1.414/10
|
6 |
+
lr_backbone_names = ['backbone.0']
|
7 |
+
lr_linear_proj_names = ['reference_points', 'sampling_offsets']
|
8 |
+
lr_linear_proj_mult = 0.1
|
9 |
+
ddetr_lr_param = False
|
10 |
+
batch_size = 2
|
11 |
+
weight_decay = 0.0001
|
12 |
+
epochs = 200
|
13 |
+
lr_drop = 11
|
14 |
+
save_checkpoint_interval = 1
|
15 |
+
clip_max_norm = 0.1
|
16 |
+
onecyclelr = False
|
17 |
+
multi_step_lr = True
|
18 |
+
lr_drop_list = [30, 60]
|
19 |
+
|
20 |
+
modelname = 'aios_smplx'
|
21 |
+
frozen_weights = None
|
22 |
+
backbone = 'resnet50'
|
23 |
+
use_checkpoint = False
|
24 |
+
|
25 |
+
dilation = False
|
26 |
+
position_embedding = 'sine'
|
27 |
+
pe_temperatureH = 20
|
28 |
+
pe_temperatureW = 20
|
29 |
+
return_interm_indices = [1, 2, 3]
|
30 |
+
backbone_freeze_keywords = None
|
31 |
+
enc_layers = 6
|
32 |
+
dec_layers = 6
|
33 |
+
pre_norm = False
|
34 |
+
dim_feedforward = 2048
|
35 |
+
hidden_dim = 256
|
36 |
+
dropout = 0.0
|
37 |
+
nheads = 8
|
38 |
+
num_queries = 900
|
39 |
+
query_dim = 4
|
40 |
+
num_patterns = 0
|
41 |
+
random_refpoints_xy = False
|
42 |
+
fix_refpoints_hw = -1
|
43 |
+
dec_layer_number = None
|
44 |
+
num_feature_levels = 4
|
45 |
+
enc_n_points = 4
|
46 |
+
dec_n_points = 4
|
47 |
+
dln_xy_noise = 0.2
|
48 |
+
dln_hw_noise = 0.2
|
49 |
+
two_stage_type = 'standard'
|
50 |
+
two_stage_bbox_embed_share = False
|
51 |
+
two_stage_class_embed_share = False
|
52 |
+
two_stage_learn_wh = False
|
53 |
+
two_stage_default_hw = 0.05
|
54 |
+
two_stage_keep_all_tokens = False
|
55 |
+
rm_detach = None
|
56 |
+
num_select = 50
|
57 |
+
transformer_activation = 'relu'
|
58 |
+
batch_norm_type = 'FrozenBatchNorm2d'
|
59 |
+
|
60 |
+
masks = False
|
61 |
+
losses = ["smpl_pose", "smpl_beta", "smpl_expr",
|
62 |
+
"smpl_kp2d","smpl_kp3d","smpl_kp3d_ra",'labels', 'boxes', "keypoints"]
|
63 |
+
# losses = ['labels', 'boxes', "keypoints"]
|
64 |
+
aux_loss = True
|
65 |
+
set_cost_class = 2.0
|
66 |
+
set_cost_bbox = 5.0
|
67 |
+
set_cost_giou = 2.0
|
68 |
+
set_cost_keypoints = 10.0
|
69 |
+
set_cost_kpvis = 0.0
|
70 |
+
set_cost_oks = 4.0
|
71 |
+
cls_loss_coef = 2.0
|
72 |
+
# keypoints_loss_coef = 10.0
|
73 |
+
|
74 |
+
smpl_pose_loss_root_coef = 10 * 0.1
|
75 |
+
smpl_pose_loss_body_coef = 1 * 0.1
|
76 |
+
smpl_pose_loss_lhand_coef = 1 * 0.1
|
77 |
+
smpl_pose_loss_rhand_coef = 1 * 0.1
|
78 |
+
smpl_pose_loss_jaw_coef = 1 * 0.1
|
79 |
+
smpl_beta_loss_coef = 0.01
|
80 |
+
smpl_expr_loss_coef = 0.01
|
81 |
+
|
82 |
+
# smpl_kp3d_loss_coef = 10
|
83 |
+
smpl_body_kp3d_loss_coef = 10.0 * 0.1
|
84 |
+
smpl_face_kp3d_loss_coef = 1.0 * 0.1
|
85 |
+
smpl_lhand_kp3d_loss_coef = 1 * 0.1
|
86 |
+
smpl_rhand_kp3d_loss_coef = 1 * 0.1
|
87 |
+
|
88 |
+
# kp3d ra
|
89 |
+
smpl_body_kp3d_ra_loss_coef = 10 * 0.1
|
90 |
+
smpl_face_kp3d_ra_loss_coef = 1 * 0.1
|
91 |
+
smpl_lhand_kp3d_ra_loss_coef = 1 * 0.1
|
92 |
+
smpl_rhand_kp3d_ra_loss_coef = 1 * 0.1
|
93 |
+
|
94 |
+
|
95 |
+
# smpl_kp2d_ba_loss_coef = 1.0
|
96 |
+
smpl_body_kp2d_loss_coef = 10.0 * 0.1
|
97 |
+
smpl_lhand_kp2d_loss_coef = 5.0 * 0.1
|
98 |
+
smpl_rhand_kp2d_loss_coef = 5.0 * 0.1
|
99 |
+
smpl_face_kp2d_loss_coef = 1.0 * 0.1
|
100 |
+
|
101 |
+
smpl_body_kp2d_ba_loss_coef = 0 * 0.1
|
102 |
+
smpl_face_kp2d_ba_loss_coef = 0 * 0.1
|
103 |
+
smpl_lhand_kp2d_ba_loss_coef = 0 * 0.1
|
104 |
+
smpl_rhand_kp2d_ba_loss_coef = 0 * 0.1
|
105 |
+
|
106 |
+
bbox_loss_coef = 5.0
|
107 |
+
body_bbox_loss_coef = 5.0
|
108 |
+
lhand_bbox_loss_coef = 5.0
|
109 |
+
rhand_bbox_loss_coef = 5.0
|
110 |
+
face_bbox_loss_coef = 5.0
|
111 |
+
|
112 |
+
giou_loss_coef = 2.0
|
113 |
+
body_giou_loss_coef = 2.0
|
114 |
+
rhand_giou_loss_coef = 2.0
|
115 |
+
lhand_giou_loss_coef = 2.0
|
116 |
+
face_giou_loss_coef = 2.0
|
117 |
+
|
118 |
+
keypoints_loss_coef = 10.0
|
119 |
+
rhand_keypoints_loss_coef = 10.0
|
120 |
+
lhand_keypoints_loss_coef = 10.0
|
121 |
+
face_keypoints_loss_coef = 10.0
|
122 |
+
|
123 |
+
oks_loss_coef=4.0
|
124 |
+
rhand_oks_loss_coef = 0.5
|
125 |
+
lhand_oks_loss_coef = 0.5
|
126 |
+
face_oks_loss_coef = 4.0
|
127 |
+
|
128 |
+
|
129 |
+
enc_loss_coef = 1.0
|
130 |
+
interm_loss_coef = 1.0
|
131 |
+
no_interm_box_loss = False
|
132 |
+
focal_alpha = 0.25
|
133 |
+
rm_self_attn_layers = None
|
134 |
+
indices_idx_list = [1, 2, 3, 4, 5, 6, 7]
|
135 |
+
|
136 |
+
decoder_sa_type = 'sa'
|
137 |
+
matcher_type = 'HungarianMatcher'
|
138 |
+
decoder_module_seq = ['sa', 'ca', 'ffn']
|
139 |
+
nms_iou_threshold = -1
|
140 |
+
|
141 |
+
dec_pred_bbox_embed_share = False
|
142 |
+
dec_pred_class_embed_share = False
|
143 |
+
dec_pred_pose_embed_share = False
|
144 |
+
body_only = True
|
145 |
+
|
146 |
+
# for dn
|
147 |
+
use_dn = True
|
148 |
+
dn_number = 100
|
149 |
+
dn_box_noise_scale = 0.4
|
150 |
+
dn_label_noise_ratio = 0.5
|
151 |
+
embed_init_tgt = False
|
152 |
+
dn_label_coef = 0.3
|
153 |
+
dn_bbox_coef = 0.5
|
154 |
+
dn_batch_gt_fuse = False
|
155 |
+
dn_attn_mask_type_list = ['match2dn', 'dn2dn', 'group2group']
|
156 |
+
dn_labelbook_size = 100
|
157 |
+
|
158 |
+
match_unstable_error = False
|
159 |
+
|
160 |
+
# for ema
|
161 |
+
use_ema = True
|
162 |
+
ema_decay = 0.9997
|
163 |
+
ema_epoch = 0
|
164 |
+
|
165 |
+
cls_no_bias = False
|
166 |
+
num_body_points = 17 # for coco
|
167 |
+
num_hand_points = 6 # for coco
|
168 |
+
num_face_points = 6 # for coco
|
169 |
+
num_group = 100
|
170 |
+
num_box_decoder_layers = 2
|
171 |
+
num_hand_face_decoder_layers = 4
|
172 |
+
no_mmpose_keypoint_evaluator = True
|
173 |
+
strong_aug = False
|
174 |
+
|
175 |
+
body_model_test=\
|
176 |
+
dict(
|
177 |
+
type='smplx',
|
178 |
+
keypoint_src='smplx',
|
179 |
+
num_expression_coeffs=10,
|
180 |
+
num_betas=10,
|
181 |
+
keypoint_dst='smplx_137',
|
182 |
+
model_path='data/body_models/smplx',
|
183 |
+
use_pca=False,
|
184 |
+
use_face_contour=True)
|
185 |
+
|
186 |
+
body_model_train = \
|
187 |
+
dict(
|
188 |
+
type='smplx',
|
189 |
+
keypoint_src='smplx',
|
190 |
+
num_expression_coeffs=10,
|
191 |
+
num_betas=10,
|
192 |
+
keypoint_dst='smplx_137',
|
193 |
+
model_path='data/body_models/smplx',
|
194 |
+
use_pca=False,
|
195 |
+
use_face_contour=True)
|
196 |
+
|
197 |
+
# will be update in exp
|
198 |
+
exp_name = 'output/exp52/dataset_debug'
|
199 |
+
|
200 |
+
|
201 |
+
end_epoch = 150
|
202 |
+
train_batch_size = 32
|
203 |
+
|
204 |
+
scheduler = 'step'
|
205 |
+
step_size = 20
|
206 |
+
gamma = 0.1
|
207 |
+
|
208 |
+
# continue
|
209 |
+
continue_train = True
|
210 |
+
pretrained_model_path = '../output/train_gta_synbody_ft_20230410_132110/model_dump/snapshot_2.pth.tar'
|
211 |
+
|
212 |
+
# dataset setting
|
213 |
+
# dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
214 |
+
# trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
215 |
+
dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
216 |
+
trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
217 |
+
trainset_2d = []
|
218 |
+
trainset_partition = {
|
219 |
+
'AGORA_MM': 0.4,
|
220 |
+
'BEDLAM': 0.7,
|
221 |
+
'COCO_NA': 1,
|
222 |
+
|
223 |
+
# 'EgoBody_Egocentric': 1,
|
224 |
+
# 'EgoBody_Kinect': 1.0,
|
225 |
+
}
|
226 |
+
trainset_humandata = []
|
227 |
+
testset = 'INFERENCE_BEDLAM'
|
228 |
+
train_sizes=[480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
|
229 |
+
train_max_size=1333
|
230 |
+
test_sizes=[800]
|
231 |
+
test_max_size=1333
|
232 |
+
no_aug=False
|
233 |
+
# model
|
234 |
+
use_cache = True
|
235 |
+
|
236 |
+
## UBody setting
|
237 |
+
train_sample_interval = 10
|
238 |
+
test_sample_interval = 100
|
239 |
+
make_same_len = False
|
240 |
+
|
241 |
+
## input, output size
|
242 |
+
input_body_shape = (256, 192)
|
243 |
+
output_hm_shape = (16, 16, 12)
|
244 |
+
input_hand_shape = (256, 256)
|
245 |
+
output_hand_hm_shape = (16, 16, 16)
|
246 |
+
output_face_hm_shape = (8, 8, 8)
|
247 |
+
input_face_shape = (192, 192)
|
248 |
+
focal = (5000, 5000) # virtual focal lengths
|
249 |
+
princpt = (input_body_shape[1] / 2, input_body_shape[0] / 2
|
250 |
+
) # virtual principal point position
|
251 |
+
body_3d_size = 2
|
252 |
+
hand_3d_size = 0.3
|
253 |
+
face_3d_size = 0.3
|
254 |
+
camera_3d_size = 2.5
|
255 |
+
|
256 |
+
bbox_ratio = 1.2
|
257 |
+
|
258 |
+
## directory
|
259 |
+
output_dir, model_dir, vis_dir, log_dir, result_dir, code_dir = None, None, None, None, None, None
|
260 |
+
|
261 |
+
agora_benchmark = 'na' # 'agora_model', 'test_only'
|
262 |
+
|
263 |
+
# strategy
|
264 |
+
data_strategy = 'balance' # 'balance' need to define total_data_len
|
265 |
+
total_data_len = 'auto'
|
config/aios_smplx_demo.py
ADDED
@@ -0,0 +1,259 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
num_classes = 2
|
3 |
+
lr = 0.0001*1.414/10
|
4 |
+
param_dict_type = 'default'
|
5 |
+
lr_backbone = 1e-05*1.414/10
|
6 |
+
lr_backbone_names = ['backbone.0']
|
7 |
+
lr_linear_proj_names = ['reference_points', 'sampling_offsets']
|
8 |
+
lr_linear_proj_mult = 0.1
|
9 |
+
ddetr_lr_param = False
|
10 |
+
batch_size = 2
|
11 |
+
weight_decay = 0.0001
|
12 |
+
epochs = 200
|
13 |
+
lr_drop = 11
|
14 |
+
save_checkpoint_interval = 1
|
15 |
+
clip_max_norm = 0.1
|
16 |
+
onecyclelr = False
|
17 |
+
multi_step_lr = True
|
18 |
+
lr_drop_list = [30, 60]
|
19 |
+
|
20 |
+
modelname = 'aios_smplx'
|
21 |
+
frozen_weights = None
|
22 |
+
backbone = 'resnet50'
|
23 |
+
use_checkpoint = False
|
24 |
+
|
25 |
+
dilation = False
|
26 |
+
position_embedding = 'sine'
|
27 |
+
pe_temperatureH = 20
|
28 |
+
pe_temperatureW = 20
|
29 |
+
return_interm_indices = [1, 2, 3]
|
30 |
+
backbone_freeze_keywords = None
|
31 |
+
enc_layers = 6
|
32 |
+
dec_layers = 6
|
33 |
+
pre_norm = False
|
34 |
+
dim_feedforward = 2048
|
35 |
+
hidden_dim = 256
|
36 |
+
dropout = 0.0
|
37 |
+
nheads = 8
|
38 |
+
num_queries = 900
|
39 |
+
query_dim = 4
|
40 |
+
num_patterns = 0
|
41 |
+
random_refpoints_xy = False
|
42 |
+
fix_refpoints_hw = -1
|
43 |
+
dec_layer_number = None
|
44 |
+
num_feature_levels = 4
|
45 |
+
enc_n_points = 4
|
46 |
+
dec_n_points = 4
|
47 |
+
dln_xy_noise = 0.2
|
48 |
+
dln_hw_noise = 0.2
|
49 |
+
two_stage_type = 'standard'
|
50 |
+
two_stage_bbox_embed_share = False
|
51 |
+
two_stage_class_embed_share = False
|
52 |
+
two_stage_learn_wh = False
|
53 |
+
two_stage_default_hw = 0.05
|
54 |
+
two_stage_keep_all_tokens = False
|
55 |
+
rm_detach = None
|
56 |
+
num_select = 50
|
57 |
+
transformer_activation = 'relu'
|
58 |
+
batch_norm_type = 'FrozenBatchNorm2d'
|
59 |
+
|
60 |
+
masks = False
|
61 |
+
losses = ["smpl_pose", "smpl_beta", "smpl_expr",
|
62 |
+
"smpl_kp2d","smpl_kp3d","smpl_kp3d_ra",'labels', 'boxes', "keypoints"]
|
63 |
+
# losses = ['labels', 'boxes', "keypoints"]
|
64 |
+
aux_loss = True
|
65 |
+
set_cost_class = 2.0
|
66 |
+
set_cost_bbox = 5.0
|
67 |
+
set_cost_giou = 2.0
|
68 |
+
set_cost_keypoints = 10.0
|
69 |
+
set_cost_kpvis = 0.0
|
70 |
+
set_cost_oks = 4.0
|
71 |
+
cls_loss_coef = 2.0
|
72 |
+
# keypoints_loss_coef = 10.0
|
73 |
+
|
74 |
+
smpl_pose_loss_root_coef = 10 * 0.1
|
75 |
+
smpl_pose_loss_body_coef = 1 * 0.1
|
76 |
+
smpl_pose_loss_lhand_coef = 1 * 0.1
|
77 |
+
smpl_pose_loss_rhand_coef = 1 * 0.1
|
78 |
+
smpl_pose_loss_jaw_coef = 1 * 0.1
|
79 |
+
smpl_beta_loss_coef = 0.01
|
80 |
+
smpl_expr_loss_coef = 0.01
|
81 |
+
|
82 |
+
# smpl_kp3d_loss_coef = 10
|
83 |
+
smpl_body_kp3d_loss_coef = 10.0 * 0.1
|
84 |
+
smpl_face_kp3d_loss_coef = 1.0 * 0.1
|
85 |
+
smpl_lhand_kp3d_loss_coef = 1 * 0.1
|
86 |
+
smpl_rhand_kp3d_loss_coef = 1 * 0.1
|
87 |
+
|
88 |
+
# kp3d ra
|
89 |
+
smpl_body_kp3d_ra_loss_coef = 10 * 0.1
|
90 |
+
smpl_face_kp3d_ra_loss_coef = 1 * 0.1
|
91 |
+
smpl_lhand_kp3d_ra_loss_coef = 1 * 0.1
|
92 |
+
smpl_rhand_kp3d_ra_loss_coef = 1 * 0.1
|
93 |
+
|
94 |
+
|
95 |
+
# smpl_kp2d_ba_loss_coef = 1.0
|
96 |
+
smpl_body_kp2d_loss_coef = 10.0 * 0.1
|
97 |
+
smpl_lhand_kp2d_loss_coef = 5.0 * 0.1
|
98 |
+
smpl_rhand_kp2d_loss_coef = 5.0 * 0.1
|
99 |
+
smpl_face_kp2d_loss_coef = 1.0 * 0.1
|
100 |
+
|
101 |
+
smpl_body_kp2d_ba_loss_coef = 0 * 0.1
|
102 |
+
smpl_face_kp2d_ba_loss_coef = 0 * 0.1
|
103 |
+
smpl_lhand_kp2d_ba_loss_coef = 0 * 0.1
|
104 |
+
smpl_rhand_kp2d_ba_loss_coef = 0 * 0.1
|
105 |
+
|
106 |
+
bbox_loss_coef = 5.0
|
107 |
+
body_bbox_loss_coef = 5.0
|
108 |
+
lhand_bbox_loss_coef = 5.0
|
109 |
+
rhand_bbox_loss_coef = 5.0
|
110 |
+
face_bbox_loss_coef = 5.0
|
111 |
+
|
112 |
+
giou_loss_coef = 2.0
|
113 |
+
body_giou_loss_coef = 2.0
|
114 |
+
rhand_giou_loss_coef = 2.0
|
115 |
+
lhand_giou_loss_coef = 2.0
|
116 |
+
face_giou_loss_coef = 2.0
|
117 |
+
|
118 |
+
keypoints_loss_coef = 10.0
|
119 |
+
rhand_keypoints_loss_coef = 10.0
|
120 |
+
lhand_keypoints_loss_coef = 10.0
|
121 |
+
face_keypoints_loss_coef = 10.0
|
122 |
+
|
123 |
+
oks_loss_coef=4.0
|
124 |
+
rhand_oks_loss_coef = 0.5
|
125 |
+
lhand_oks_loss_coef = 0.5
|
126 |
+
face_oks_loss_coef = 4.0
|
127 |
+
|
128 |
+
|
129 |
+
enc_loss_coef = 1.0
|
130 |
+
interm_loss_coef = 1.0
|
131 |
+
no_interm_box_loss = False
|
132 |
+
focal_alpha = 0.25
|
133 |
+
rm_self_attn_layers = None
|
134 |
+
indices_idx_list = [1, 2, 3, 4, 5, 6, 7]
|
135 |
+
|
136 |
+
decoder_sa_type = 'sa'
|
137 |
+
matcher_type = 'HungarianMatcher'
|
138 |
+
decoder_module_seq = ['sa', 'ca', 'ffn']
|
139 |
+
nms_iou_threshold = -1
|
140 |
+
|
141 |
+
dec_pred_bbox_embed_share = False
|
142 |
+
dec_pred_class_embed_share = False
|
143 |
+
dec_pred_pose_embed_share = False
|
144 |
+
body_only = True
|
145 |
+
|
146 |
+
# for dn
|
147 |
+
use_dn = True
|
148 |
+
dn_number = 100
|
149 |
+
dn_box_noise_scale = 0.4
|
150 |
+
dn_label_noise_ratio = 0.5
|
151 |
+
embed_init_tgt = False
|
152 |
+
dn_label_coef = 0.3
|
153 |
+
dn_bbox_coef = 0.5
|
154 |
+
dn_batch_gt_fuse = False
|
155 |
+
dn_attn_mask_type_list = ['match2dn', 'dn2dn', 'group2group']
|
156 |
+
dn_labelbook_size = 100
|
157 |
+
|
158 |
+
match_unstable_error = False
|
159 |
+
|
160 |
+
# for ema
|
161 |
+
use_ema = True
|
162 |
+
ema_decay = 0.9997
|
163 |
+
ema_epoch = 0
|
164 |
+
|
165 |
+
cls_no_bias = False
|
166 |
+
num_body_points = 17 # for coco
|
167 |
+
num_hand_points = 6 # for coco
|
168 |
+
num_face_points = 6 # for coco
|
169 |
+
num_group = 100
|
170 |
+
num_box_decoder_layers = 2
|
171 |
+
num_hand_face_decoder_layers = 4
|
172 |
+
no_mmpose_keypoint_evaluator = True
|
173 |
+
strong_aug = False
|
174 |
+
|
175 |
+
body_model_test=\
|
176 |
+
dict(
|
177 |
+
type='smplx',
|
178 |
+
keypoint_src='smplx',
|
179 |
+
num_expression_coeffs=10,
|
180 |
+
num_betas=10,
|
181 |
+
keypoint_dst='smplx_137',
|
182 |
+
model_path='data/body_models/smplx',
|
183 |
+
use_pca=False,
|
184 |
+
use_face_contour=True)
|
185 |
+
|
186 |
+
body_model_train = \
|
187 |
+
dict(
|
188 |
+
type='smplx',
|
189 |
+
keypoint_src='smplx',
|
190 |
+
num_expression_coeffs=10,
|
191 |
+
num_betas=10,
|
192 |
+
keypoint_dst='smplx_137',
|
193 |
+
model_path='data/body_models/smplx',
|
194 |
+
use_pca=False,
|
195 |
+
use_face_contour=True)
|
196 |
+
|
197 |
+
# will be update in exp
|
198 |
+
exp_name = 'output/exp52/dataset_debug'
|
199 |
+
|
200 |
+
|
201 |
+
end_epoch = 150
|
202 |
+
train_batch_size = 32
|
203 |
+
|
204 |
+
scheduler = 'step'
|
205 |
+
step_size = 20
|
206 |
+
gamma = 0.1
|
207 |
+
|
208 |
+
# continue
|
209 |
+
continue_train = True
|
210 |
+
pretrained_model_path = '../output/train_gta_synbody_ft_20230410_132110/model_dump/snapshot_2.pth.tar'
|
211 |
+
|
212 |
+
# dataset setting
|
213 |
+
# dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
214 |
+
# trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
215 |
+
dataset_list = ['INFERENCE_demo']
|
216 |
+
trainset_3d = []
|
217 |
+
trainset_2d = []
|
218 |
+
trainset_partition = {
|
219 |
+
}
|
220 |
+
trainset_humandata = []
|
221 |
+
testset = 'INFERENCE_demo'
|
222 |
+
train_sizes=[480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
|
223 |
+
train_max_size=1333
|
224 |
+
test_sizes=[800]
|
225 |
+
test_max_size=1333
|
226 |
+
no_aug=False
|
227 |
+
# model
|
228 |
+
use_cache = True
|
229 |
+
|
230 |
+
## UBody setting
|
231 |
+
train_sample_interval = 10
|
232 |
+
test_sample_interval = 100
|
233 |
+
make_same_len = False
|
234 |
+
|
235 |
+
## input, output size
|
236 |
+
input_body_shape = (256, 192)
|
237 |
+
output_hm_shape = (16, 16, 12)
|
238 |
+
input_hand_shape = (256, 256)
|
239 |
+
output_hand_hm_shape = (16, 16, 16)
|
240 |
+
output_face_hm_shape = (8, 8, 8)
|
241 |
+
input_face_shape = (192, 192)
|
242 |
+
focal = (5000, 5000) # virtual focal lengths
|
243 |
+
princpt = (input_body_shape[1] / 2, input_body_shape[0] / 2
|
244 |
+
) # virtual principal point position
|
245 |
+
body_3d_size = 2
|
246 |
+
hand_3d_size = 0.3
|
247 |
+
face_3d_size = 0.3
|
248 |
+
camera_3d_size = 2.5
|
249 |
+
|
250 |
+
bbox_ratio = 1.2
|
251 |
+
|
252 |
+
## directory
|
253 |
+
output_dir, model_dir, vis_dir, log_dir, result_dir, code_dir = None, None, None, None, None, None
|
254 |
+
|
255 |
+
agora_benchmark = 'na' # 'agora_model', 'test_only'
|
256 |
+
|
257 |
+
# strategy
|
258 |
+
data_strategy = 'balance' # 'balance' need to define total_data_len
|
259 |
+
total_data_len = 'auto'
|
config/aios_smplx_inference.py
ADDED
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
num_classes = 2
|
3 |
+
lr = 0.0001*1.414/10
|
4 |
+
param_dict_type = 'default'
|
5 |
+
lr_backbone = 1e-05*1.414/10
|
6 |
+
lr_backbone_names = ['backbone.0']
|
7 |
+
lr_linear_proj_names = ['reference_points', 'sampling_offsets']
|
8 |
+
lr_linear_proj_mult = 0.1
|
9 |
+
ddetr_lr_param = False
|
10 |
+
batch_size = 2
|
11 |
+
weight_decay = 0.0001
|
12 |
+
epochs = 200
|
13 |
+
lr_drop = 11
|
14 |
+
save_checkpoint_interval = 1
|
15 |
+
clip_max_norm = 0.1
|
16 |
+
onecyclelr = False
|
17 |
+
multi_step_lr = True
|
18 |
+
lr_drop_list = [30, 60]
|
19 |
+
|
20 |
+
modelname = 'aios_smplx'
|
21 |
+
frozen_weights = None
|
22 |
+
backbone = 'resnet50'
|
23 |
+
use_checkpoint = False
|
24 |
+
|
25 |
+
dilation = False
|
26 |
+
position_embedding = 'sine'
|
27 |
+
pe_temperatureH = 20
|
28 |
+
pe_temperatureW = 20
|
29 |
+
return_interm_indices = [1, 2, 3]
|
30 |
+
backbone_freeze_keywords = None
|
31 |
+
enc_layers = 6
|
32 |
+
dec_layers = 6
|
33 |
+
pre_norm = False
|
34 |
+
dim_feedforward = 2048
|
35 |
+
hidden_dim = 256
|
36 |
+
dropout = 0.0
|
37 |
+
nheads = 8
|
38 |
+
num_queries = 900
|
39 |
+
query_dim = 4
|
40 |
+
num_patterns = 0
|
41 |
+
random_refpoints_xy = False
|
42 |
+
fix_refpoints_hw = -1
|
43 |
+
dec_layer_number = None
|
44 |
+
num_feature_levels = 4
|
45 |
+
enc_n_points = 4
|
46 |
+
dec_n_points = 4
|
47 |
+
dln_xy_noise = 0.2
|
48 |
+
dln_hw_noise = 0.2
|
49 |
+
two_stage_type = 'standard'
|
50 |
+
two_stage_bbox_embed_share = False
|
51 |
+
two_stage_class_embed_share = False
|
52 |
+
two_stage_learn_wh = False
|
53 |
+
two_stage_default_hw = 0.05
|
54 |
+
two_stage_keep_all_tokens = False
|
55 |
+
rm_detach = None
|
56 |
+
num_select = 50
|
57 |
+
transformer_activation = 'relu'
|
58 |
+
batch_norm_type = 'FrozenBatchNorm2d'
|
59 |
+
|
60 |
+
masks = False
|
61 |
+
losses = ["smpl_pose", "smpl_beta", "smpl_expr",
|
62 |
+
"smpl_kp2d","smpl_kp3d","smpl_kp3d_ra",'labels', 'boxes', "keypoints"]
|
63 |
+
# losses = ['labels', 'boxes', "keypoints"]
|
64 |
+
aux_loss = True
|
65 |
+
set_cost_class = 2.0
|
66 |
+
set_cost_bbox = 5.0
|
67 |
+
set_cost_giou = 2.0
|
68 |
+
set_cost_keypoints = 10.0
|
69 |
+
set_cost_kpvis = 0.0
|
70 |
+
set_cost_oks = 4.0
|
71 |
+
cls_loss_coef = 2.0
|
72 |
+
# keypoints_loss_coef = 10.0
|
73 |
+
|
74 |
+
smpl_pose_loss_root_coef = 10 * 0.1
|
75 |
+
smpl_pose_loss_body_coef = 1 * 0.1
|
76 |
+
smpl_pose_loss_lhand_coef = 1 * 0.1
|
77 |
+
smpl_pose_loss_rhand_coef = 1 * 0.1
|
78 |
+
smpl_pose_loss_jaw_coef = 1 * 0.1
|
79 |
+
smpl_beta_loss_coef = 0.01
|
80 |
+
smpl_expr_loss_coef = 0.01
|
81 |
+
|
82 |
+
# smpl_kp3d_loss_coef = 10
|
83 |
+
smpl_body_kp3d_loss_coef = 10.0 * 0.1
|
84 |
+
smpl_face_kp3d_loss_coef = 1.0 * 0.1
|
85 |
+
smpl_lhand_kp3d_loss_coef = 1 * 0.1
|
86 |
+
smpl_rhand_kp3d_loss_coef = 1 * 0.1
|
87 |
+
|
88 |
+
# kp3d ra
|
89 |
+
smpl_body_kp3d_ra_loss_coef = 10 * 0.1
|
90 |
+
smpl_face_kp3d_ra_loss_coef = 1 * 0.1
|
91 |
+
smpl_lhand_kp3d_ra_loss_coef = 1 * 0.1
|
92 |
+
smpl_rhand_kp3d_ra_loss_coef = 1 * 0.1
|
93 |
+
|
94 |
+
|
95 |
+
# smpl_kp2d_ba_loss_coef = 1.0
|
96 |
+
smpl_body_kp2d_loss_coef = 10.0 * 0.1
|
97 |
+
smpl_lhand_kp2d_loss_coef = 5.0 * 0.1
|
98 |
+
smpl_rhand_kp2d_loss_coef = 5.0 * 0.1
|
99 |
+
smpl_face_kp2d_loss_coef = 1.0 * 0.1
|
100 |
+
|
101 |
+
smpl_body_kp2d_ba_loss_coef = 0 * 0.1
|
102 |
+
smpl_face_kp2d_ba_loss_coef = 0 * 0.1
|
103 |
+
smpl_lhand_kp2d_ba_loss_coef = 0 * 0.1
|
104 |
+
smpl_rhand_kp2d_ba_loss_coef = 0 * 0.1
|
105 |
+
|
106 |
+
bbox_loss_coef = 5.0
|
107 |
+
body_bbox_loss_coef = 5.0
|
108 |
+
lhand_bbox_loss_coef = 5.0
|
109 |
+
rhand_bbox_loss_coef = 5.0
|
110 |
+
face_bbox_loss_coef = 5.0
|
111 |
+
|
112 |
+
giou_loss_coef = 2.0
|
113 |
+
body_giou_loss_coef = 2.0
|
114 |
+
rhand_giou_loss_coef = 2.0
|
115 |
+
lhand_giou_loss_coef = 2.0
|
116 |
+
face_giou_loss_coef = 2.0
|
117 |
+
|
118 |
+
keypoints_loss_coef = 10.0
|
119 |
+
rhand_keypoints_loss_coef = 10.0
|
120 |
+
lhand_keypoints_loss_coef = 10.0
|
121 |
+
face_keypoints_loss_coef = 10.0
|
122 |
+
|
123 |
+
oks_loss_coef=4.0
|
124 |
+
rhand_oks_loss_coef = 0.5
|
125 |
+
lhand_oks_loss_coef = 0.5
|
126 |
+
face_oks_loss_coef = 4.0
|
127 |
+
|
128 |
+
|
129 |
+
enc_loss_coef = 1.0
|
130 |
+
interm_loss_coef = 1.0
|
131 |
+
no_interm_box_loss = False
|
132 |
+
focal_alpha = 0.25
|
133 |
+
rm_self_attn_layers = None
|
134 |
+
indices_idx_list = [1, 2, 3, 4, 5, 6, 7]
|
135 |
+
|
136 |
+
decoder_sa_type = 'sa'
|
137 |
+
matcher_type = 'HungarianMatcher'
|
138 |
+
decoder_module_seq = ['sa', 'ca', 'ffn']
|
139 |
+
nms_iou_threshold = -1
|
140 |
+
|
141 |
+
dec_pred_bbox_embed_share = False
|
142 |
+
dec_pred_class_embed_share = False
|
143 |
+
dec_pred_pose_embed_share = False
|
144 |
+
body_only = True
|
145 |
+
|
146 |
+
# for dn
|
147 |
+
use_dn = True
|
148 |
+
dn_number = 100
|
149 |
+
dn_box_noise_scale = 0.4
|
150 |
+
dn_label_noise_ratio = 0.5
|
151 |
+
embed_init_tgt = False
|
152 |
+
dn_label_coef = 0.3
|
153 |
+
dn_bbox_coef = 0.5
|
154 |
+
dn_batch_gt_fuse = False
|
155 |
+
dn_attn_mask_type_list = ['match2dn', 'dn2dn', 'group2group']
|
156 |
+
dn_labelbook_size = 100
|
157 |
+
|
158 |
+
match_unstable_error = False
|
159 |
+
|
160 |
+
# for ema
|
161 |
+
use_ema = True
|
162 |
+
ema_decay = 0.9997
|
163 |
+
ema_epoch = 0
|
164 |
+
|
165 |
+
cls_no_bias = False
|
166 |
+
num_body_points = 17 # for coco
|
167 |
+
num_hand_points = 6 # for coco
|
168 |
+
num_face_points = 6 # for coco
|
169 |
+
num_group = 100
|
170 |
+
num_box_decoder_layers = 2
|
171 |
+
num_hand_face_decoder_layers = 4
|
172 |
+
no_mmpose_keypoint_evaluator = True
|
173 |
+
strong_aug = False
|
174 |
+
|
175 |
+
body_model_test=\
|
176 |
+
dict(
|
177 |
+
type='smplx',
|
178 |
+
keypoint_src='smplx',
|
179 |
+
num_expression_coeffs=10,
|
180 |
+
num_betas=10,
|
181 |
+
keypoint_dst='smplx_137',
|
182 |
+
model_path='data/body_models/smplx',
|
183 |
+
use_pca=False,
|
184 |
+
use_face_contour=True)
|
185 |
+
|
186 |
+
body_model_train = \
|
187 |
+
dict(
|
188 |
+
type='smplx',
|
189 |
+
keypoint_src='smplx',
|
190 |
+
num_expression_coeffs=10,
|
191 |
+
num_betas=10,
|
192 |
+
keypoint_dst='smplx_137',
|
193 |
+
model_path='data/body_models/smplx',
|
194 |
+
use_pca=False,
|
195 |
+
use_face_contour=True)
|
196 |
+
|
197 |
+
# will be update in exp
|
198 |
+
exp_name = 'output/exp52/dataset_debug'
|
199 |
+
|
200 |
+
|
201 |
+
end_epoch = 150
|
202 |
+
train_batch_size = 32
|
203 |
+
|
204 |
+
scheduler = 'step'
|
205 |
+
step_size = 20
|
206 |
+
gamma = 0.1
|
207 |
+
|
208 |
+
# continue
|
209 |
+
continue_train = True
|
210 |
+
pretrained_model_path = '../output/train_gta_synbody_ft_20230410_132110/model_dump/snapshot_2.pth.tar'
|
211 |
+
|
212 |
+
# dataset setting
|
213 |
+
# dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
214 |
+
# trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
215 |
+
dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
216 |
+
trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
217 |
+
trainset_2d = []
|
218 |
+
trainset_partition = {
|
219 |
+
'AGORA_MM': 0.4,
|
220 |
+
'BEDLAM': 0.7,
|
221 |
+
'COCO_NA': 1,
|
222 |
+
|
223 |
+
# 'EgoBody_Egocentric': 1,
|
224 |
+
# 'EgoBody_Kinect': 1.0,
|
225 |
+
}
|
226 |
+
trainset_humandata = []
|
227 |
+
testset = 'INFERENCE'
|
228 |
+
train_sizes=[480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
|
229 |
+
train_max_size=1333
|
230 |
+
test_sizes=[800]
|
231 |
+
test_max_size=1333
|
232 |
+
no_aug=False
|
233 |
+
# model
|
234 |
+
use_cache = True
|
235 |
+
|
236 |
+
## UBody setting
|
237 |
+
train_sample_interval = 10
|
238 |
+
test_sample_interval = 100
|
239 |
+
make_same_len = False
|
240 |
+
|
241 |
+
## input, output size
|
242 |
+
input_body_shape = (256, 192)
|
243 |
+
output_hm_shape = (16, 16, 12)
|
244 |
+
input_hand_shape = (256, 256)
|
245 |
+
output_hand_hm_shape = (16, 16, 16)
|
246 |
+
output_face_hm_shape = (8, 8, 8)
|
247 |
+
input_face_shape = (192, 192)
|
248 |
+
focal = (5000, 5000) # virtual focal lengths
|
249 |
+
princpt = (input_body_shape[1] / 2, input_body_shape[0] / 2
|
250 |
+
) # virtual principal point position
|
251 |
+
body_3d_size = 2
|
252 |
+
hand_3d_size = 0.3
|
253 |
+
face_3d_size = 0.3
|
254 |
+
camera_3d_size = 2.5
|
255 |
+
|
256 |
+
bbox_ratio = 1.2
|
257 |
+
|
258 |
+
## directory
|
259 |
+
output_dir, model_dir, vis_dir, log_dir, result_dir, code_dir = None, None, None, None, None, None
|
260 |
+
|
261 |
+
agora_benchmark = 'na' # 'agora_model', 'test_only'
|
262 |
+
|
263 |
+
# strategy
|
264 |
+
data_strategy = 'balance' # 'balance' need to define total_data_len
|
265 |
+
total_data_len = 'auto'
|
config/aios_smplx_pretrain.py
ADDED
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
num_classes = 2
|
2 |
+
lr = 0.0001
|
3 |
+
param_dict_type = 'default'
|
4 |
+
lr_backbone = 1e-05
|
5 |
+
lr_backbone_names = ['backbone.0']
|
6 |
+
lr_linear_proj_names = ['reference_points', 'sampling_offsets']
|
7 |
+
lr_linear_proj_mult = 0.1
|
8 |
+
ddetr_lr_param = False
|
9 |
+
batch_size = 2
|
10 |
+
weight_decay = 0.0001
|
11 |
+
epochs = 200
|
12 |
+
lr_drop = 11
|
13 |
+
save_checkpoint_interval = 1
|
14 |
+
clip_max_norm = 0.1
|
15 |
+
onecyclelr = False
|
16 |
+
multi_step_lr = True
|
17 |
+
lr_drop_list = [30, 60]
|
18 |
+
|
19 |
+
modelname = 'aios_smplx'
|
20 |
+
frozen_weights = None
|
21 |
+
backbone = 'resnet50'
|
22 |
+
use_checkpoint = False
|
23 |
+
|
24 |
+
dilation = False
|
25 |
+
position_embedding = 'sine'
|
26 |
+
pe_temperatureH = 20
|
27 |
+
pe_temperatureW = 20
|
28 |
+
return_interm_indices = [1, 2, 3]
|
29 |
+
backbone_freeze_keywords = None
|
30 |
+
enc_layers = 6
|
31 |
+
dec_layers = 6
|
32 |
+
pre_norm = False
|
33 |
+
dim_feedforward = 2048
|
34 |
+
hidden_dim = 256
|
35 |
+
dropout = 0.0
|
36 |
+
nheads = 8
|
37 |
+
num_queries = 900
|
38 |
+
query_dim = 4
|
39 |
+
num_patterns = 0
|
40 |
+
random_refpoints_xy = False
|
41 |
+
fix_refpoints_hw = -1
|
42 |
+
dec_layer_number = None
|
43 |
+
num_feature_levels = 4
|
44 |
+
enc_n_points = 4
|
45 |
+
dec_n_points = 4
|
46 |
+
dln_xy_noise = 0.2
|
47 |
+
dln_hw_noise = 0.2
|
48 |
+
two_stage_type = 'standard'
|
49 |
+
two_stage_bbox_embed_share = False
|
50 |
+
two_stage_class_embed_share = False
|
51 |
+
two_stage_learn_wh = False
|
52 |
+
two_stage_default_hw = 0.05
|
53 |
+
two_stage_keep_all_tokens = False
|
54 |
+
rm_detach = None
|
55 |
+
num_select = 50
|
56 |
+
transformer_activation = 'relu'
|
57 |
+
batch_norm_type = 'FrozenBatchNorm2d'
|
58 |
+
|
59 |
+
masks = False
|
60 |
+
losses = ["smpl_pose", "smpl_beta", "smpl_expr",
|
61 |
+
"smpl_kp2d","smpl_kp3d","smpl_kp3d_ra",'labels', 'boxes', "keypoints"]
|
62 |
+
# losses = ['labels', 'boxes', "keypoints"]
|
63 |
+
aux_loss = True
|
64 |
+
set_cost_class = 2.0
|
65 |
+
set_cost_bbox = 5.0
|
66 |
+
set_cost_giou = 2.0
|
67 |
+
set_cost_keypoints = 10.0
|
68 |
+
set_cost_kpvis = 0.0
|
69 |
+
set_cost_oks = 4.0
|
70 |
+
cls_loss_coef = 2.0
|
71 |
+
# keypoints_loss_coef = 10.0
|
72 |
+
|
73 |
+
smpl_pose_loss_root_coef = 10 * 0.1
|
74 |
+
smpl_pose_loss_body_coef = 1 * 0.1
|
75 |
+
smpl_pose_loss_lhand_coef = 1 * 0.1
|
76 |
+
smpl_pose_loss_rhand_coef = 1 * 0.1
|
77 |
+
smpl_pose_loss_jaw_coef = 1 * 0.1
|
78 |
+
smpl_beta_loss_coef = 0.01
|
79 |
+
smpl_expr_loss_coef = 0.01
|
80 |
+
|
81 |
+
# smpl_kp3d_loss_coef = 10
|
82 |
+
smpl_body_kp3d_loss_coef = 10.0 * 0.1
|
83 |
+
smpl_face_kp3d_loss_coef = 1.0 * 0.1
|
84 |
+
smpl_lhand_kp3d_loss_coef = 1 * 0.1
|
85 |
+
smpl_rhand_kp3d_loss_coef = 1 * 0.1
|
86 |
+
|
87 |
+
# kp3d ra
|
88 |
+
smpl_body_kp3d_ra_loss_coef = 10 * 0.1
|
89 |
+
smpl_face_kp3d_ra_loss_coef = 1 * 0.1
|
90 |
+
smpl_lhand_kp3d_ra_loss_coef = 1 * 0.1
|
91 |
+
smpl_rhand_kp3d_ra_loss_coef = 1 * 0.1
|
92 |
+
|
93 |
+
|
94 |
+
# smpl_kp2d_ba_loss_coef = 1.0
|
95 |
+
smpl_body_kp2d_loss_coef = 10.0 * 0.1
|
96 |
+
smpl_lhand_kp2d_loss_coef = 5.0 * 0.1
|
97 |
+
smpl_rhand_kp2d_loss_coef = 5.0 * 0.1
|
98 |
+
smpl_face_kp2d_loss_coef = 1.0 * 0.1
|
99 |
+
|
100 |
+
smpl_body_kp2d_ba_loss_coef = 0 * 0.1
|
101 |
+
smpl_face_kp2d_ba_loss_coef = 0 * 0.1
|
102 |
+
smpl_lhand_kp2d_ba_loss_coef = 0 * 0.1
|
103 |
+
smpl_rhand_kp2d_ba_loss_coef = 0 * 0.1
|
104 |
+
|
105 |
+
bbox_loss_coef = 5.0
|
106 |
+
body_bbox_loss_coef = 5.0
|
107 |
+
lhand_bbox_loss_coef = 5.0
|
108 |
+
rhand_bbox_loss_coef = 5.0
|
109 |
+
face_bbox_loss_coef = 5.0
|
110 |
+
|
111 |
+
giou_loss_coef = 2.0
|
112 |
+
body_giou_loss_coef = 2.0
|
113 |
+
rhand_giou_loss_coef = 2.0
|
114 |
+
lhand_giou_loss_coef = 2.0
|
115 |
+
face_giou_loss_coef = 2.0
|
116 |
+
|
117 |
+
keypoints_loss_coef = 10.0
|
118 |
+
rhand_keypoints_loss_coef = 10.0
|
119 |
+
lhand_keypoints_loss_coef = 10.0
|
120 |
+
face_keypoints_loss_coef = 10.0
|
121 |
+
|
122 |
+
oks_loss_coef=4.0
|
123 |
+
rhand_oks_loss_coef = 0.5
|
124 |
+
lhand_oks_loss_coef = 0.5
|
125 |
+
face_oks_loss_coef = 4.0
|
126 |
+
|
127 |
+
|
128 |
+
enc_loss_coef = 1.0
|
129 |
+
interm_loss_coef = 1.0
|
130 |
+
no_interm_box_loss = False
|
131 |
+
focal_alpha = 0.25
|
132 |
+
rm_self_attn_layers = None
|
133 |
+
indices_idx_list = [1, 2, 3, 4, 5, 6, 7]
|
134 |
+
|
135 |
+
decoder_sa_type = 'sa'
|
136 |
+
matcher_type = 'HungarianMatcher'
|
137 |
+
decoder_module_seq = ['sa', 'ca', 'ffn']
|
138 |
+
nms_iou_threshold = -1
|
139 |
+
|
140 |
+
dec_pred_bbox_embed_share = False
|
141 |
+
dec_pred_class_embed_share = False
|
142 |
+
dec_pred_pose_embed_share = False
|
143 |
+
body_only = True
|
144 |
+
|
145 |
+
# for dn
|
146 |
+
use_dn = True
|
147 |
+
dn_number = 100
|
148 |
+
dn_box_noise_scale = 0.4
|
149 |
+
dn_label_noise_ratio = 0.5
|
150 |
+
embed_init_tgt = False
|
151 |
+
dn_label_coef = 0.3
|
152 |
+
dn_bbox_coef = 0.5
|
153 |
+
dn_batch_gt_fuse = False
|
154 |
+
dn_attn_mask_type_list = ['match2dn', 'dn2dn', 'group2group']
|
155 |
+
dn_labelbook_size = 100
|
156 |
+
|
157 |
+
match_unstable_error = False
|
158 |
+
|
159 |
+
# for ema
|
160 |
+
use_ema = True
|
161 |
+
ema_decay = 0.9997
|
162 |
+
ema_epoch = 0
|
163 |
+
|
164 |
+
cls_no_bias = False
|
165 |
+
num_body_points = 17 # for coco
|
166 |
+
num_hand_points = 6 # for coco
|
167 |
+
num_face_points = 6 # for coco
|
168 |
+
num_group = 100
|
169 |
+
num_box_decoder_layers = 2
|
170 |
+
num_hand_face_decoder_layers = 4
|
171 |
+
no_mmpose_keypoint_evaluator = True
|
172 |
+
strong_aug = False
|
173 |
+
|
174 |
+
body_model_test=\
|
175 |
+
dict(
|
176 |
+
type='smplx',
|
177 |
+
keypoint_src='smplx',
|
178 |
+
num_expression_coeffs=10,
|
179 |
+
num_betas=10,
|
180 |
+
keypoint_dst='smplx_137',
|
181 |
+
model_path='data/body_models/smplx',
|
182 |
+
use_pca=False,
|
183 |
+
use_face_contour=True)
|
184 |
+
|
185 |
+
body_model_train = \
|
186 |
+
dict(
|
187 |
+
type='smplx',
|
188 |
+
keypoint_src='smplx',
|
189 |
+
num_expression_coeffs=10,
|
190 |
+
num_betas=10,
|
191 |
+
keypoint_dst='smplx_137',
|
192 |
+
model_path='data/body_models/smplx',
|
193 |
+
use_pca=False,
|
194 |
+
use_face_contour=True)
|
195 |
+
|
196 |
+
# will be update in exp
|
197 |
+
exp_name = 'output/exp52/dataset_debug'
|
198 |
+
|
199 |
+
|
200 |
+
end_epoch = 150
|
201 |
+
train_batch_size = 32
|
202 |
+
|
203 |
+
scheduler = 'step'
|
204 |
+
step_size = 20
|
205 |
+
gamma = 0.1
|
206 |
+
|
207 |
+
# continue
|
208 |
+
continue_train = True
|
209 |
+
pretrained_model_path = '../output/train_gta_synbody_ft_20230410_132110/model_dump/snapshot_2.pth.tar'
|
210 |
+
|
211 |
+
# dataset setting
|
212 |
+
# dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
213 |
+
# trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
214 |
+
dataset_list = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
215 |
+
trainset_3d = ['AGORA_MM','BEDLAM', 'COCO_NA']
|
216 |
+
trainset_2d = []
|
217 |
+
trainset_partition = {
|
218 |
+
'AGORA_MM': 0.4,
|
219 |
+
'BEDLAM': 0.7,
|
220 |
+
'COCO_NA': 1,
|
221 |
+
|
222 |
+
# 'EgoBody_Egocentric': 1,
|
223 |
+
# 'EgoBody_Kinect': 1.0,
|
224 |
+
}
|
225 |
+
trainset_humandata = []
|
226 |
+
testset = 'AGORA_MM'
|
227 |
+
train_sizes=[480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800]
|
228 |
+
train_max_size=1333
|
229 |
+
test_sizes=[800]
|
230 |
+
test_max_size=1333
|
231 |
+
no_aug=False
|
232 |
+
# model
|
233 |
+
use_cache = True
|
234 |
+
|
235 |
+
## UBody setting
|
236 |
+
train_sample_interval = 10
|
237 |
+
test_sample_interval = 100
|
238 |
+
make_same_len = False
|
239 |
+
|
240 |
+
## input, output size
|
241 |
+
input_body_shape = (256, 192)
|
242 |
+
output_hm_shape = (16, 16, 12)
|
243 |
+
input_hand_shape = (256, 256)
|
244 |
+
output_hand_hm_shape = (16, 16, 16)
|
245 |
+
output_face_hm_shape = (8, 8, 8)
|
246 |
+
input_face_shape = (192, 192)
|
247 |
+
focal = (5000, 5000) # virtual focal lengths
|
248 |
+
princpt = (input_body_shape[1] / 2, input_body_shape[0] / 2
|
249 |
+
) # virtual principal point position
|
250 |
+
body_3d_size = 2
|
251 |
+
hand_3d_size = 0.3
|
252 |
+
face_3d_size = 0.3
|
253 |
+
camera_3d_size = 2.5
|
254 |
+
|
255 |
+
bbox_ratio = 1.2
|
256 |
+
|
257 |
+
## directory
|
258 |
+
output_dir, model_dir, vis_dir, log_dir, result_dir, code_dir = None, None, None, None, None, None
|
259 |
+
|
260 |
+
agora_benchmark = 'na' # 'agora_model', 'test_only'
|
261 |
+
|
262 |
+
# strategy
|
263 |
+
data_strategy = 'balance' # 'balance' need to define total_data_len
|
264 |
+
total_data_len = 'auto'
|
config/config.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import os.path as osp
|
3 |
+
import sys
|
4 |
+
import datetime
|
5 |
+
from mmcv import Config as MMConfig
|
6 |
+
|
7 |
+
class Config(MMConfig):
|
8 |
+
def __init__(self, cfg_dict=None, cfg_text=None, filename=None):
|
9 |
+
super().__init__(cfg_dict, cfg_text, filename)
|
10 |
+
|
11 |
+
def get_config_fromfile(self, config_path):
|
12 |
+
self.config_path = config_path
|
13 |
+
|
14 |
+
cfg, _ = MMConfig._file2dict(self.config_path)
|
15 |
+
|
16 |
+
self.merge_from_dict(cfg)
|
17 |
+
# #import ipdb;ipdb.set_trace()
|
18 |
+
# self.__dict__.update(dict(cfg))
|
19 |
+
# # update dir
|
20 |
+
dir_dict = {}
|
21 |
+
exp_name = 'exps62'
|
22 |
+
time_str = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
|
23 |
+
dir_dict['cur_dir'] = osp.dirname(os.path.abspath(__file__))
|
24 |
+
dir_dict['root_dir'] = osp.join(dir_dict['cur_dir'], '..')
|
25 |
+
dir_dict['output_dir'] = osp.join(dir_dict['root_dir'], exp_name)
|
26 |
+
dir_dict['result_dir'] = osp.join(dir_dict['output_dir'], 'result')
|
27 |
+
dir_dict['data_dir'] = osp.join(dir_dict['root_dir'], 'dataset')
|
28 |
+
dir_dict['human_model_path'] = osp.join('data/body_models')
|
29 |
+
self.merge_from_dict(dir_dict)
|
30 |
+
#
|
31 |
+
# ## add some paths to the system root dir
|
32 |
+
sys.path.insert(0, osp.join(self.root_dir, 'common'))
|
33 |
+
sys.path.insert(0, osp.join(self.root_dir, 'united-perception_utils'))
|
34 |
+
sys.path.insert(0, osp.join(self.cur_dir, 'humanbench_utils'))
|
35 |
+
sys.path.insert(0, osp.join(self.cur_dir, 'dinov2_utils'))
|
36 |
+
sys.path.insert(0, osp.join(self.cur_dir, 'lora_utils'))
|
37 |
+
sys.path.insert(0, osp.join(self.cur_dir, 'vit_adapter_utils'))
|
38 |
+
from util.dir import add_pypath
|
39 |
+
# add_pypath(osp.join(self.data_dir))
|
40 |
+
for dataset in os.listdir('datasets'):
|
41 |
+
if dataset not in ['humandata.py', '__pycache__', 'dataset.py']:
|
42 |
+
add_pypath(osp.join(self.root_dir, 'data', dataset))
|
43 |
+
add_pypath('datasets')
|
44 |
+
add_pypath(self.data_dir)
|
45 |
+
|
46 |
+
def prepare_dirs(self, exp_name):
|
47 |
+
time_str = datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
|
48 |
+
self.output_dir = osp.join(self.root_dir, f'{exp_name}_{time_str}')
|
49 |
+
self.model_dir = osp.join(self.output_dir, 'model_dump')
|
50 |
+
self.vis_dir = osp.join(self.output_dir, 'vis')
|
51 |
+
self.log_dir = osp.join(self.output_dir, 'log')
|
52 |
+
self.code_dir = osp.join(self.output_dir, 'code')
|
53 |
+
self.result_dir = osp.join(self.output_dir.split('/')[:-1])
|
54 |
+
from util.dir import make_folder
|
55 |
+
make_folder(self.model_dir)
|
56 |
+
make_folder(self.vis_dir)
|
57 |
+
make_folder(self.log_dir)
|
58 |
+
make_folder(self.code_dir)
|
59 |
+
make_folder(self.result_dir)
|
60 |
+
|
61 |
+
## copy some code to log dir as a backup
|
62 |
+
copy_files = [
|
63 |
+
'main/train.py', 'main/test.py', 'common/base.py', 'main/OSX.py',
|
64 |
+
'common/nets', 'main/OSX_WoDecoder.py', 'data/dataset.py',
|
65 |
+
'data/MSCOCO/MSCOCO.py', 'data/AGORA/AGORA.py'
|
66 |
+
]
|
67 |
+
for file in copy_files:
|
68 |
+
os.system(f'cp -r {self.root_dir}/{file} {self.code_dir}')
|
69 |
+
|
70 |
+
def update_test_config(self, testset, agora_benchmark, shapy_eval_split,
|
71 |
+
pretrained_model_path, use_cache):
|
72 |
+
self.testset = testset
|
73 |
+
self.agora_benchmark = agora_benchmark
|
74 |
+
self.pretrained_model_path = pretrained_model_path
|
75 |
+
self.shapy_eval_split = shapy_eval_split
|
76 |
+
self.use_cache = use_cache
|
77 |
+
|
78 |
+
def update_config(self, num_gpus, exp_name):
|
79 |
+
self.num_gpus = num_gpus
|
80 |
+
self.exp_name = exp_name
|
81 |
+
|
82 |
+
self.prepare_dirs(self.exp_name)
|
83 |
+
|
84 |
+
# Save
|
85 |
+
cfg_save = MMConfig(self.__dict__)
|
86 |
+
cfg_save.dump(osp.join(self.code_dir, 'config_base.py'))
|
87 |
+
|
88 |
+
|
89 |
+
cfg = Config()
|
90 |
+
cfg.get_config_fromfile('config/aios_smplx.py')
|
91 |
+
|
data/body_models/J_regressor_extra.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc968ea4f9855571e82f90203280836b01f13ee42a8e1b89d8d580b801242a89
|
3 |
+
size 496160
|
data/body_models/J_regressor_h36m.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c655cd7013d7829eb9acbebf0e43f952a3fa0305a53c35880e39192bfb6444a0
|
3 |
+
size 937168
|
data/body_models/J_regressor_mano_LEFT.txt
ADDED
@@ -0,0 +1,1902 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# 21 778
|
2 |
+
0 4 0.0019103600293901542
|
3 |
+
0 5 0.0027920646583394562
|
4 |
+
0 6 0.00029390154298310065
|
5 |
+
0 7 0.00014695077149155033
|
6 |
+
0 25 0.0016164584864070536
|
7 |
+
0 26 0.000440852314474651
|
8 |
+
0 32 0.011756061719324026
|
9 |
+
0 33 0.021234386480529024
|
10 |
+
0 34 0.019838354151359296
|
11 |
+
0 35 0.016311535635562088
|
12 |
+
0 36 0.015870683321087434
|
13 |
+
0 37 0.02343864805290228
|
14 |
+
0 38 0.01671565025716385
|
15 |
+
0 39 0.020499632623071272
|
16 |
+
0 40 0.005437178545187362
|
17 |
+
0 41 0.010139603232916973
|
18 |
+
0 42 0.002645113886847906
|
19 |
+
0 43 0.00014695077149155033
|
20 |
+
0 44 0.02005878030859662
|
21 |
+
0 45 0.02233651726671565
|
22 |
+
0 50 0.01763409257898604
|
23 |
+
0 51 0.01704628949301984
|
24 |
+
0 52 0.019838354151359296
|
25 |
+
0 53 0.02079353416605437
|
26 |
+
0 54 0.00822924320352682
|
27 |
+
0 55 0.00822924320352682
|
28 |
+
0 78 0.011572373254959589
|
29 |
+
0 79 0.011939750183688464
|
30 |
+
0 84 0.01704628949301984
|
31 |
+
0 85 0.019691403379867745
|
32 |
+
0 88 0.005437178545187362
|
33 |
+
0 89 0.0007347538574577516
|
34 |
+
0 90 0.014548126377663484
|
35 |
+
0 91 0.018736223365172666
|
36 |
+
0 92 0.011645848640705364
|
37 |
+
0 106 0.018515797207935343
|
38 |
+
0 107 0.02204261572373255
|
39 |
+
0 108 0.012417340191036004
|
40 |
+
0 109 0.009992652461425423
|
41 |
+
0 110 0.016311535635562088
|
42 |
+
0 111 0.01880969875091844
|
43 |
+
0 112 0.0073475385745775165
|
44 |
+
0 113 0.0014695077149155032
|
45 |
+
0 114 0.005731080088170463
|
46 |
+
0 116 0.02204261572373255
|
47 |
+
0 117 0.012123438648052902
|
48 |
+
0 118 0.013005143277002204
|
49 |
+
0 119 0.016385011021307863
|
50 |
+
0 120 0.008155767817781044
|
51 |
+
0 121 0.011315209404849376
|
52 |
+
0 122 0.009037472446730345
|
53 |
+
0 130 0.0073475385745775165
|
54 |
+
0 131 0.00911094783247612
|
55 |
+
0 178 0.001763409257898604
|
56 |
+
0 179 0.002351212343864805
|
57 |
+
0 190 0.019544452608376194
|
58 |
+
0 191 0.019691403379867745
|
59 |
+
0 192 0.01704628949301984
|
60 |
+
0 193 0.016605437178545186
|
61 |
+
0 200 0.002351212343864805
|
62 |
+
0 203 0.00822924320352682
|
63 |
+
0 204 0.007641440117560617
|
64 |
+
0 205 0.01704628949301984
|
65 |
+
0 207 0.001763409257898604
|
66 |
+
0 208 0.005290227773695812
|
67 |
+
0 209 0.01763409257898604
|
68 |
+
0 210 0.019691403379867745
|
69 |
+
0 211 0.019691403379867745
|
70 |
+
0 214 0.011315209404849376
|
71 |
+
0 215 0.011315209404849376
|
72 |
+
0 216 0.007641440117560617
|
73 |
+
0 217 0.00822924320352682
|
74 |
+
0 218 0.002351212343864805
|
75 |
+
0 219 0.0011756061719324026
|
76 |
+
0 227 0.002351212343864805
|
77 |
+
0 229 0.007788390889052168
|
78 |
+
0 231 0.002204261572373255
|
79 |
+
0 232 0.016311535635562088
|
80 |
+
0 233 0.006759735488611315
|
81 |
+
0 234 0.011168258633357825
|
82 |
+
0 235 0.019544452608376194
|
83 |
+
0 236 0.0016164584864070536
|
84 |
+
0 239 0.011315209404849376
|
85 |
+
0 241 0.0007347538574577516
|
86 |
+
0 242 0.002351212343864805
|
87 |
+
0 243 0.0036737692872887582
|
88 |
+
0 244 0.0011756061719324026
|
89 |
+
0 254 0.0064658339456282144
|
90 |
+
0 255 0.0038207200587803084
|
91 |
+
0 256 0.002351212343864805
|
92 |
+
0 257 0.002351212343864805
|
93 |
+
0 264 0.014107274063188832
|
94 |
+
0 265 0.00440852314474651
|
95 |
+
0 279 0.011315209404849376
|
96 |
+
0 284 0.00896399706098457
|
97 |
+
0 285 0.0029390154298310064
|
98 |
+
1 0 0.014595751184471957
|
99 |
+
1 1 0.025294207550053488
|
100 |
+
1 2 0.019180803912578332
|
101 |
+
1 3 0.01039278618370778
|
102 |
+
1 4 0.03156044627846554
|
103 |
+
1 5 0.025752712822864135
|
104 |
+
1 6 0.014977838911814154
|
105 |
+
1 7 0.023307351367874065
|
106 |
+
1 8 0.005654898364664528
|
107 |
+
1 9 0.009170105456212748
|
108 |
+
1 10 0.002063273727647868
|
109 |
+
1 11 0.0006113403637475165
|
110 |
+
1 12 0.0018340210912425497
|
111 |
+
1 14 0.001222680727495033
|
112 |
+
1 15 7.641754546843957e-05
|
113 |
+
1 16 0.0011462631820265935
|
114 |
+
1 17 0.0004585052728106374
|
115 |
+
1 18 0.00015283509093687913
|
116 |
+
1 19 0.0003820877273421978
|
117 |
+
1 22 7.641754546843957e-05
|
118 |
+
1 24 0.01413724591166132
|
119 |
+
1 25 0.019257221458046772
|
120 |
+
1 26 0.024377197004432218
|
121 |
+
1 27 0.017346782821335782
|
122 |
+
1 28 0.0007641754546843956
|
123 |
+
1 29 0.0022161088185847473
|
124 |
+
1 30 0.0006877579092159561
|
125 |
+
1 31 0.0005349228182790769
|
126 |
+
1 32 0.0005349228182790768
|
127 |
+
1 33 0.0005349228182790769
|
128 |
+
1 34 0.0024071526822558465
|
129 |
+
1 35 0.002445361454990066
|
130 |
+
1 36 0.029802842732691428
|
131 |
+
1 37 0.022122879413113253
|
132 |
+
1 38 0.010029802842732692
|
133 |
+
1 39 0.02334556014060829
|
134 |
+
1 40 0.029344337459880795
|
135 |
+
1 41 0.032171786642213054
|
136 |
+
1 42 0.02009781445819961
|
137 |
+
1 43 0.009934280910897143
|
138 |
+
1 60 0.004355800091701055
|
139 |
+
1 61 0.00855876509246523
|
140 |
+
1 62 0.0004585052728106374
|
141 |
+
1 63 0.003285954455142901
|
142 |
+
1 64 0.0012990982729634726
|
143 |
+
1 65 7.641754546843957e-05
|
144 |
+
1 66 0.0019868561821794286
|
145 |
+
1 67 0.004814305364511693
|
146 |
+
1 68 0.008253094910591475
|
147 |
+
1 69 0.0018340210912425497
|
148 |
+
1 70 0.0003820877273421978
|
149 |
+
1 71 7.641754546843957e-05
|
150 |
+
1 88 0.021320495185694635
|
151 |
+
1 89 0.013907993275256002
|
152 |
+
1 90 0.01986856182179429
|
153 |
+
1 91 0.013564114320648022
|
154 |
+
1 92 0.003763564114320649
|
155 |
+
1 93 0.0004585052728106374
|
156 |
+
1 94 0.008329512456059913
|
157 |
+
1 95 0.007565337001375517
|
158 |
+
1 104 0.0027510316368638244
|
159 |
+
1 105 0.0072596668195017595
|
160 |
+
1 109 0.009705028274491823
|
161 |
+
1 110 0.005654898364664528
|
162 |
+
1 111 0.015436344184624792
|
163 |
+
1 112 0.019180803912578332
|
164 |
+
1 113 0.03339446736970809
|
165 |
+
1 114 0.0340058077334556
|
166 |
+
1 115 0.02559987773192725
|
167 |
+
1 116 0.008405930001528351
|
168 |
+
1 117 0.0017767079321412199
|
169 |
+
1 118 0.00527281063732233
|
170 |
+
1 119 0.00032477456824086816
|
171 |
+
1 122 0.004967140455448571
|
172 |
+
1 123 0.007259666819501758
|
173 |
+
1 124 0.0016811860003056705
|
174 |
+
1 125 0.0025217790004585057
|
175 |
+
1 126 0.008176677365123033
|
176 |
+
1 129 0.00030567018187375826
|
177 |
+
1 145 0.00030567018187375826
|
178 |
+
1 146 0.0006877579092159561
|
179 |
+
1 147 7.641754546843957e-05
|
180 |
+
1 152 7.641754546843957e-05
|
181 |
+
1 157 0.002063273727647868
|
182 |
+
1 158 0.0016047684548372307
|
183 |
+
1 159 0.0032095369096744614
|
184 |
+
1 188 0.0007641754546843956
|
185 |
+
1 190 0.0019868561821794286
|
186 |
+
1 191 0.0004585052728106374
|
187 |
+
1 192 0.0016047684548372307
|
188 |
+
1 193 0.005884151001069847
|
189 |
+
1 207 0.00015283509093687913
|
190 |
+
1 208 7.641754546843957e-05
|
191 |
+
1 209 0.00030567018187375826
|
192 |
+
1 216 0.0008405930001528353
|
193 |
+
1 217 0.003897294818890417
|
194 |
+
1 218 0.0008405930001528353
|
195 |
+
1 219 0.0014519333639003516
|
196 |
+
1 227 0.005502063273727648
|
197 |
+
1 229 0.008635182637933671
|
198 |
+
1 230 0.004126547455295736
|
199 |
+
1 231 0.009705028274491824
|
200 |
+
1 232 0.01245605991135565
|
201 |
+
1 233 0.016888277548525142
|
202 |
+
1 234 0.001413724591166132
|
203 |
+
1 235 0.005654898364664528
|
204 |
+
1 236 0.012838147638697846
|
205 |
+
1 239 0.00026746140913953847
|
206 |
+
1 240 0.01543634418462479
|
207 |
+
1 241 0.0006877579092159561
|
208 |
+
1 242 0.0032095369096744614
|
209 |
+
1 248 0.004890722909980132
|
210 |
+
1 249 0.0005349228182790769
|
211 |
+
1 250 0.0015283509093687911
|
212 |
+
1 251 0.0009170105456212748
|
213 |
+
1 252 0.0029038667278007036
|
214 |
+
1 253 0.005502063273727649
|
215 |
+
1 254 0.0019868561821794286
|
216 |
+
1 255 0.0002292526364053187
|
217 |
+
1 264 0.028885832187070158
|
218 |
+
1 265 0.029650007641754548
|
219 |
+
1 266 0.006953996637628001
|
220 |
+
1 267 0.002445361454990066
|
221 |
+
1 268 0.00015283509093687913
|
222 |
+
1 285 0.010087116001834023
|
223 |
+
1 286 0.007794589637780836
|
224 |
+
1 287 0.0025981965459269452
|
225 |
+
1 697 0.0004585052728106374
|
226 |
+
1 699 7.641754546843957e-05
|
227 |
+
1 700 0.00030567018187375826
|
228 |
+
1 704 0.0002292526364053187
|
229 |
+
1 705 0.0008405930001528353
|
230 |
+
1 706 7.641754546843957e-05
|
231 |
+
2 0 0.0027531810402559712
|
232 |
+
2 1 0.0034972840241089364
|
233 |
+
2 2 0.007887491628841432
|
234 |
+
2 3 0.0056551826772825355
|
235 |
+
2 4 0.009152466701391472
|
236 |
+
2 5 0.01674231713669172
|
237 |
+
2 6 0.02708534861224793
|
238 |
+
2 7 0.02209985862043307
|
239 |
+
2 8 0.00833395341915321
|
240 |
+
2 9 0.009152466701391472
|
241 |
+
2 10 0.011682416846491553
|
242 |
+
2 11 0.0055063620805119425
|
243 |
+
2 12 0.005431951782126646
|
244 |
+
2 13 0.0011161544757794478
|
245 |
+
2 14 0.006176054765979612
|
246 |
+
2 15 0.0017858471612471167
|
247 |
+
2 16 0.0007441029838529652
|
248 |
+
2 19 0.0003720514919264826
|
249 |
+
2 26 0.000967333879008855
|
250 |
+
2 27 0.0008929235806235583
|
251 |
+
2 28 0.013245033112582783
|
252 |
+
2 29 0.013765905201279856
|
253 |
+
2 30 0.009970979983629735
|
254 |
+
2 31 0.011384775652950369
|
255 |
+
2 36 0.0023811295483294886
|
256 |
+
2 37 0.00014882059677059304
|
257 |
+
2 38 7.441029838529652e-05
|
258 |
+
2 39 0.0020834883547883026
|
259 |
+
2 40 0.0055063620805119425
|
260 |
+
2 41 0.009896569685244438
|
261 |
+
2 42 0.022843961604286034
|
262 |
+
2 43 0.032666120991145166
|
263 |
+
2 60 0.00364610462087953
|
264 |
+
2 61 0.0017858471612471167
|
265 |
+
2 62 0.0002976411935411861
|
266 |
+
2 63 0.000967333879008855
|
267 |
+
2 64 0.0014882059677059304
|
268 |
+
2 65 0.0004464617903117792
|
269 |
+
2 68 0.0002976411935411861
|
270 |
+
2 69 7.441029838529652e-05
|
271 |
+
2 88 0.01562616266091227
|
272 |
+
2 89 0.027234169209018527
|
273 |
+
2 90 0.00513431058858546
|
274 |
+
2 91 0.0006696926854676687
|
275 |
+
2 93 7.441029838529652e-05
|
276 |
+
2 94 0.0005952823870823722
|
277 |
+
2 104 0.025225091152615526
|
278 |
+
2 105 0.017858471612471165
|
279 |
+
2 113 0.0035716943224942334
|
280 |
+
2 114 0.002604360443485378
|
281 |
+
2 115 0.010566262370712107
|
282 |
+
2 123 0.026787707418706754
|
283 |
+
2 124 0.021504576233350697
|
284 |
+
2 125 0.01882580549148002
|
285 |
+
2 126 0.02083488354788303
|
286 |
+
2 127 0.0002232308951558896
|
287 |
+
2 128 0.0002976411935411861
|
288 |
+
2 129 0.0017114368628618197
|
289 |
+
2 144 0.0002232308951558896
|
290 |
+
2 145 0.0013393853709353374
|
291 |
+
2 158 0.002604360443485378
|
292 |
+
2 193 0.0003720514919264826
|
293 |
+
2 217 0.0007441029838529652
|
294 |
+
2 219 0.0004464617903117792
|
295 |
+
2 227 0.003199642830567751
|
296 |
+
2 229 0.003125232532182454
|
297 |
+
2 230 0.008854825507850286
|
298 |
+
2 231 0.00982215938685914
|
299 |
+
2 232 0.002009078056403006
|
300 |
+
2 233 0.007813081330456134
|
301 |
+
2 235 7.441029838529652e-05
|
302 |
+
2 236 0.01912344668502121
|
303 |
+
2 240 0.01480764937867401
|
304 |
+
2 248 0.03318699307984225
|
305 |
+
2 249 0.01823052310439765
|
306 |
+
2 250 0.02887119577349505
|
307 |
+
2 251 0.02500186025745963
|
308 |
+
2 252 0.02864796487833916
|
309 |
+
2 253 0.032889351886301064
|
310 |
+
2 259 0.00014882059677059304
|
311 |
+
2 264 0.0002232308951558896
|
312 |
+
2 265 0.0005952823870823722
|
313 |
+
2 266 0.015402931765756382
|
314 |
+
2 267 0.01622144504799464
|
315 |
+
2 286 0.02805268249125679
|
316 |
+
2 287 0.025820373539697895
|
317 |
+
2 697 0.014510008185132822
|
318 |
+
2 698 0.008631594612694398
|
319 |
+
2 699 0.011161544757794479
|
320 |
+
2 700 0.01049185207232681
|
321 |
+
2 701 0.00811072252399732
|
322 |
+
2 702 0.013393853709353377
|
323 |
+
2 703 0.010938313862638589
|
324 |
+
2 704 0.008185132822382618
|
325 |
+
2 705 0.02187662772527718
|
326 |
+
2 706 0.018825805491480024
|
327 |
+
2 707 0.011905647741647447
|
328 |
+
2 708 0.007217798943373763
|
329 |
+
2 709 0.005059900290200163
|
330 |
+
2 710 0.003199642830567751
|
331 |
+
2 711 0.0019346677580177095
|
332 |
+
2 712 0.005952823870823722
|
333 |
+
2 713 0.00364610462087953
|
334 |
+
2 714 0.00364610462087953
|
335 |
+
2 715 0.0026787707418706747
|
336 |
+
2 716 0.0021578986531735995
|
337 |
+
2 721 0.0006696926854676687
|
338 |
+
2 722 0.0002232308951558896
|
339 |
+
2 723 0.0002232308951558896
|
340 |
+
2 725 0.0004464617903117792
|
341 |
+
2 731 0.0032740531289530473
|
342 |
+
2 732 0.0008185132822382618
|
343 |
+
2 741 0.0005952823870823722
|
344 |
+
2 742 0.0005208720886970756
|
345 |
+
2 746 0.0002232308951558896
|
346 |
+
2 749 0.0005208720886970756
|
347 |
+
2 753 0.0034972840241089364
|
348 |
+
2 754 0.004018156112806012
|
349 |
+
2 755 0.0014882059677059304
|
350 |
+
2 757 0.0008929235806235583
|
351 |
+
2 758 0.0014137956693206339
|
352 |
+
2 759 0.0003720514919264826
|
353 |
+
2 760 7.441029838529652e-05
|
354 |
+
3 6 0.0019164148301024542
|
355 |
+
3 7 0.0014004569912287167
|
356 |
+
3 8 0.000884499152354979
|
357 |
+
3 9 0.00029483305078499295
|
358 |
+
3 10 0.004422495761774894
|
359 |
+
3 11 0.0011793322031399718
|
360 |
+
3 12 0.0005896661015699859
|
361 |
+
3 14 0.0011056239404437236
|
362 |
+
3 28 0.011203655929829732
|
363 |
+
3 29 0.0037591213975086604
|
364 |
+
3 30 0.004496204024471142
|
365 |
+
3 31 0.011645905506007222
|
366 |
+
3 43 0.0019164148301024544
|
367 |
+
3 89 0.0005896661015699859
|
368 |
+
3 104 0.009729490675904768
|
369 |
+
3 105 0.002137539618191199
|
370 |
+
3 123 0.006412618854573597
|
371 |
+
3 124 0.0187956069875433
|
372 |
+
3 125 0.013414903810717178
|
373 |
+
3 126 0.004938453600648632
|
374 |
+
3 230 0.0007370826269624824
|
375 |
+
3 231 0.00022112478808874474
|
376 |
+
3 236 0.0005159578388737376
|
377 |
+
3 240 0.0008844991523549787
|
378 |
+
3 248 0.007665659320409817
|
379 |
+
3 249 0.013120070759932186
|
380 |
+
3 250 0.009434657625119773
|
381 |
+
3 251 0.012088155082184712
|
382 |
+
3 252 0.004348787499078646
|
383 |
+
3 253 0.003022038770546178
|
384 |
+
3 266 0.0029483305078499295
|
385 |
+
3 267 0.0125304046583622
|
386 |
+
3 286 0.002727205719761185
|
387 |
+
3 287 0.005896661015699859
|
388 |
+
3 697 0.01805852436058082
|
389 |
+
3 698 0.019016731775632047
|
390 |
+
3 699 0.021375396181911987
|
391 |
+
3 700 0.01968010613989828
|
392 |
+
3 701 0.023512935800103187
|
393 |
+
3 702 0.01975381440259453
|
394 |
+
3 703 0.021965062283481978
|
395 |
+
3 704 0.019164148301024544
|
396 |
+
3 705 0.015331318640819633
|
397 |
+
3 706 0.017837399572492075
|
398 |
+
3 707 0.02889363897692931
|
399 |
+
3 708 0.02130168791921574
|
400 |
+
3 709 0.027050932409523103
|
401 |
+
3 710 0.024544851477850665
|
402 |
+
3 711 0.0209331466057345
|
403 |
+
3 712 0.0232181027493182
|
404 |
+
3 713 0.023070686223925697
|
405 |
+
3 714 0.024102601901673175
|
406 |
+
3 715 0.018353357411365814
|
407 |
+
3 716 0.017026608682833344
|
408 |
+
3 717 0.0016952900420137097
|
409 |
+
3 718 0.0062652023291811
|
410 |
+
3 719 0.0033168718213311705
|
411 |
+
3 720 0.00125304046583622
|
412 |
+
3 721 0.016879192157440846
|
413 |
+
3 722 0.01090882287904474
|
414 |
+
3 723 0.008402741947372299
|
415 |
+
3 724 0.004717328812559887
|
416 |
+
3 725 0.010982531141740989
|
417 |
+
3 726 0.0033168718213311705
|
418 |
+
3 727 0.0008107908896587306
|
419 |
+
3 730 7.370826269624824e-05
|
420 |
+
3 731 0.022775853173140702
|
421 |
+
3 732 0.018279649148669565
|
422 |
+
3 733 0.009803198938601014
|
423 |
+
3 734 0.003022038770546178
|
424 |
+
3 735 0.0003685413134812412
|
425 |
+
3 736 0.011719613768703471
|
426 |
+
3 737 0.003906537922901157
|
427 |
+
3 738 0.0008107908896587306
|
428 |
+
3 739 0.013488612073413427
|
429 |
+
3 740 0.005306994914129874
|
430 |
+
3 741 0.021301687919215745
|
431 |
+
3 742 0.019606397877202027
|
432 |
+
3 743 0.0022112478808874476
|
433 |
+
3 746 0.006338910591877348
|
434 |
+
3 747 0.00125304046583622
|
435 |
+
3 748 0.0016952900420137097
|
436 |
+
3 749 0.009876907201297264
|
437 |
+
3 750 0.003022038770546178
|
438 |
+
3 751 7.370826269624824e-05
|
439 |
+
3 753 0.025208225842116898
|
440 |
+
3 754 0.0209331466057345
|
441 |
+
3 755 0.023291811012014444
|
442 |
+
3 756 0.017837399572492075
|
443 |
+
3 757 0.021449104444608236
|
444 |
+
3 758 0.01975381440259453
|
445 |
+
3 759 0.01171961376870347
|
446 |
+
3 760 0.01348861207341343
|
447 |
+
3 761 0.003906537922901157
|
448 |
+
3 762 0.005306994914129872
|
449 |
+
3 763 0.007960492371194809
|
450 |
+
3 764 0.0008107908896587306
|
451 |
+
3 765 0.0003685413134812412
|
452 |
+
3 767 0.0022112478808874476
|
453 |
+
3 768 0.0011056239404437238
|
454 |
+
4 745 1.0
|
455 |
+
5 0 0.0012638674343491084
|
456 |
+
5 1 0.0001404297149276787
|
457 |
+
5 2 0.00035107428731919675
|
458 |
+
5 3 0.002808594298553574
|
459 |
+
5 8 0.004072461732902682
|
460 |
+
5 9 0.0007723634321022329
|
461 |
+
5 10 0.004774610307541076
|
462 |
+
5 11 0.01418340120769555
|
463 |
+
5 12 0.012357814913635726
|
464 |
+
5 13 0.01930908580255582
|
465 |
+
5 14 0.007934278893413846
|
466 |
+
5 15 0.020011234377194213
|
467 |
+
5 16 0.0021064457239151806
|
468 |
+
5 17 0.0006319337171745541
|
469 |
+
5 18 0.0022468754388428594
|
470 |
+
5 19 0.009127931470299114
|
471 |
+
5 21 0.00042128914478303613
|
472 |
+
5 24 0.0009127931470299115
|
473 |
+
5 25 7.021485746383936e-05
|
474 |
+
5 26 0.0001404297149276787
|
475 |
+
5 27 0.0010532228619575903
|
476 |
+
5 28 0.0004212891447830361
|
477 |
+
5 29 0.0015447268642044658
|
478 |
+
5 30 0.003932032017975004
|
479 |
+
5 31 0.0009127931470299115
|
480 |
+
5 46 0.0006319337171745542
|
481 |
+
5 47 0.00035107428731919675
|
482 |
+
5 48 0.003721387445583485
|
483 |
+
5 49 0.0027383794410897346
|
484 |
+
5 56 0.0002808594298553574
|
485 |
+
5 57 7.021485746383936e-05
|
486 |
+
5 58 0.0010532228619575903
|
487 |
+
5 59 0.0028788091560174134
|
488 |
+
5 60 0.010040724617329027
|
489 |
+
5 61 0.005687403454570988
|
490 |
+
5 62 0.029981744137059403
|
491 |
+
5 63 0.017483499508496
|
492 |
+
5 64 0.02029209380704957
|
493 |
+
5 65 0.024294340682488414
|
494 |
+
5 66 0.0029490240134812527
|
495 |
+
5 67 0.0011234377194214297
|
496 |
+
5 68 0.005827833169498665
|
497 |
+
5 69 0.00975986518747367
|
498 |
+
5 74 0.00217666058137902
|
499 |
+
5 75 0.0010532228619575903
|
500 |
+
5 76 0.00035107428731919675
|
501 |
+
5 77 0.00021064457239151807
|
502 |
+
5 86 0.0007723634321022329
|
503 |
+
5 87 0.0021064457239151806
|
504 |
+
5 93 0.018536722370453586
|
505 |
+
5 94 0.0016851565791321445
|
506 |
+
5 95 0.0001404297149276787
|
507 |
+
5 104 7.021485746383936e-05
|
508 |
+
5 105 0.0001404297149276787
|
509 |
+
5 127 0.023592192107850022
|
510 |
+
5 128 0.02710293498104199
|
511 |
+
5 129 0.020713382951832608
|
512 |
+
5 132 0.023030473248139307
|
513 |
+
5 133 0.005195899452324112
|
514 |
+
5 134 0.005195899452324112
|
515 |
+
5 135 0.01305996348827412
|
516 |
+
5 136 0.008495997753124563
|
517 |
+
5 137 0.014323830922623225
|
518 |
+
5 138 0.01818564808313439
|
519 |
+
5 139 0.011515236624069652
|
520 |
+
5 140 0.008215138323269205
|
521 |
+
5 143 0.010742873191967421
|
522 |
+
5 144 0.016991995506249125
|
523 |
+
5 145 0.010040724617329027
|
524 |
+
5 146 0.00035107428731919675
|
525 |
+
5 147 0.0011234377194214297
|
526 |
+
5 149 0.013832326920376354
|
527 |
+
5 150 0.016430276646538407
|
528 |
+
5 151 0.010181154332256704
|
529 |
+
5 152 0.011023732621822779
|
530 |
+
5 155 0.00035107428731919675
|
531 |
+
5 156 0.001966016008987502
|
532 |
+
5 157 7.021485746383936e-05
|
533 |
+
5 158 0.003932032017975004
|
534 |
+
5 164 0.0034405280157281284
|
535 |
+
5 165 0.005195899452324111
|
536 |
+
5 166 0.0014745120067406266
|
537 |
+
5 167 0.0014745120067406264
|
538 |
+
5 168 0.026049712119084405
|
539 |
+
5 169 0.02927959556242101
|
540 |
+
5 170 0.023873051537705376
|
541 |
+
5 171 0.016008987501755372
|
542 |
+
5 172 0.027102934981041993
|
543 |
+
5 173 0.016921780648785283
|
544 |
+
5 174 0.005546973739643309
|
545 |
+
5 175 0.005406544024715631
|
546 |
+
5 176 0.013551467490520995
|
547 |
+
5 177 0.00758320460609465
|
548 |
+
5 183 7.021485746383936e-05
|
549 |
+
5 185 0.009127931470299114
|
550 |
+
5 186 0.017834573795815194
|
551 |
+
5 187 0.008074708608341525
|
552 |
+
5 189 0.007161915461311614
|
553 |
+
5 194 0.010602443477039742
|
554 |
+
5 195 0.01060244347703974
|
555 |
+
5 206 0.0013340822918129478
|
556 |
+
5 212 0.007091700603847775
|
557 |
+
5 213 0.0013340822918129476
|
558 |
+
5 219 0.0002808594298553574
|
559 |
+
5 220 0.00435332116275804
|
560 |
+
5 222 0.0002808594298553574
|
561 |
+
5 223 0.00042128914478303613
|
562 |
+
5 225 0.0016851565791321445
|
563 |
+
5 226 0.00042128914478303613
|
564 |
+
5 227 0.000983008004493751
|
565 |
+
5 228 0.00975986518747367
|
566 |
+
5 230 0.001825586294059823
|
567 |
+
5 231 7.021485746383936e-05
|
568 |
+
5 246 0.00035107428731919675
|
569 |
+
5 258 0.020924027524224127
|
570 |
+
5 259 0.022398539530964757
|
571 |
+
5 260 0.015587698356972338
|
572 |
+
5 261 0.012568459486027245
|
573 |
+
5 262 0.009619435472545991
|
574 |
+
5 263 0.01305996348827412
|
575 |
+
5 266 0.0010532228619575903
|
576 |
+
5 267 0.0005617188597107148
|
577 |
+
5 268 0.004283106305294201
|
578 |
+
5 269 0.0017553714365959837
|
579 |
+
5 270 0.005266114309787951
|
580 |
+
5 271 0.004844825165004915
|
581 |
+
5 274 0.018045218368206713
|
582 |
+
5 276 0.0002808594298553574
|
583 |
+
5 277 0.00021064457239151807
|
584 |
+
5 280 0.0001404297149276787
|
585 |
+
5 288 0.00540654402471563
|
586 |
+
5 290 7.021485746383936e-05
|
587 |
+
5 358 0.0002808594298553574
|
588 |
+
5 359 0.00035107428731919675
|
589 |
+
5 362 0.00021064457239151807
|
590 |
+
5 363 0.0002808594298553574
|
591 |
+
5 365 7.021485746383936e-05
|
592 |
+
5 366 0.0009127931470299116
|
593 |
+
5 367 0.0013340822918129476
|
594 |
+
5 368 0.005125684594860273
|
595 |
+
5 369 0.0034405280157281284
|
596 |
+
5 370 0.0013340822918129476
|
597 |
+
5 371 0.00021064457239151807
|
598 |
+
5 373 0.00042128914478303613
|
599 |
+
5 375 0.00035107428731919675
|
600 |
+
5 378 0.004493750877685719
|
601 |
+
5 379 0.0034405280157281284
|
602 |
+
5 380 0.004634180592613397
|
603 |
+
5 383 0.00042128914478303613
|
604 |
+
5 385 0.0016149417216683051
|
605 |
+
5 386 0.001404297149276787
|
606 |
+
5 387 0.0016851565791321445
|
607 |
+
5 388 0.0002808594298553574
|
608 |
+
5 399 0.0014745120067406264
|
609 |
+
6 46 0.019904998869034157
|
610 |
+
6 47 0.01960340797707909
|
611 |
+
6 48 0.025559828093191583
|
612 |
+
6 49 0.02352408957249491
|
613 |
+
6 56 0.022166930558697125
|
614 |
+
6 57 0.020131192038000453
|
615 |
+
6 58 0.02194073738973083
|
616 |
+
6 59 0.028952725627686037
|
617 |
+
6 62 0.0005277840609213601
|
618 |
+
6 65 0.00022619316896629722
|
619 |
+
6 86 0.02382568046444997
|
620 |
+
6 87 0.022543919173640955
|
621 |
+
6 127 0.0012063635678202518
|
622 |
+
6 128 0.0007539772298876573
|
623 |
+
6 132 0.0006031817839101259
|
624 |
+
6 133 0.017643067179371183
|
625 |
+
6 134 0.02382568046444997
|
626 |
+
6 135 0.01379778330694413
|
627 |
+
6 136 0.01259141973912388
|
628 |
+
6 137 0.004448465656337178
|
629 |
+
6 138 0.003091306642539395
|
630 |
+
6 139 0.009424715373595717
|
631 |
+
6 140 0.012214431124180048
|
632 |
+
6 143 0.0005277840609213601
|
633 |
+
6 144 0.0012817612908090175
|
634 |
+
6 150 0.0008293749528764231
|
635 |
+
6 155 0.019678805700067855
|
636 |
+
6 156 0.0244288622483601
|
637 |
+
6 164 0.019980396592022914
|
638 |
+
6 165 0.017944658071326246
|
639 |
+
6 166 0.023222498680539848
|
640 |
+
6 167 0.023901078187438737
|
641 |
+
6 168 0.002789715750584332
|
642 |
+
6 169 0.002186533966674206
|
643 |
+
6 170 0.00987710171152831
|
644 |
+
6 171 0.005881022393123726
|
645 |
+
6 172 0.004071477041393349
|
646 |
+
6 173 0.011837442509236221
|
647 |
+
6 174 0.022166930558697128
|
648 |
+
6 175 0.02382568046444997
|
649 |
+
6 176 0.019377214808112796
|
650 |
+
6 177 0.013119203800045236
|
651 |
+
6 185 0.0016587499057528462
|
652 |
+
6 186 0.004448465656337178
|
653 |
+
6 187 0.0005277840609213601
|
654 |
+
6 189 0.020809771544899342
|
655 |
+
6 194 0.015154942320741913
|
656 |
+
6 195 0.01839704440925884
|
657 |
+
6 212 0.021262157882831936
|
658 |
+
6 213 0.022317726004674656
|
659 |
+
6 221 0.006333408731056322
|
660 |
+
6 222 0.016210510442584633
|
661 |
+
6 223 0.018472442132247607
|
662 |
+
6 224 0.00987710171152831
|
663 |
+
6 225 0.02744477116791073
|
664 |
+
6 226 0.020583578375933047
|
665 |
+
6 228 0.0005277840609213602
|
666 |
+
6 237 0.012516022016135112
|
667 |
+
6 238 0.011912840232224985
|
668 |
+
6 245 0.011912840232224985
|
669 |
+
6 258 0.0052024428862248355
|
670 |
+
6 259 0.002337329412651738
|
671 |
+
6 260 0.007162783683932745
|
672 |
+
6 261 0.013043806077056472
|
673 |
+
6 262 0.0016587499057528462
|
674 |
+
6 263 0.007388976852899043
|
675 |
+
6 272 0.014174771921887958
|
676 |
+
6 273 0.012817612908090177
|
677 |
+
6 274 0.0059564201161124925
|
678 |
+
6 280 0.019301817085124028
|
679 |
+
6 281 0.011385056171303627
|
680 |
+
6 282 0.011460453894292393
|
681 |
+
6 283 0.017643067179371186
|
682 |
+
6 294 0.003920681595415819
|
683 |
+
6 295 0.0069365905149664465
|
684 |
+
6 296 0.0037698861494382865
|
685 |
+
6 297 0.00512704516323607
|
686 |
+
6 298 0.006634999623011385
|
687 |
+
6 299 0.002789715750584332
|
688 |
+
6 300 0.0021865339666742064
|
689 |
+
6 301 0.0038452838724270517
|
690 |
+
6 302 0.0005277840609213601
|
691 |
+
6 303 0.0006031817839101259
|
692 |
+
6 305 0.00030159089195506294
|
693 |
+
6 316 0.0016587499057528462
|
694 |
+
6 321 0.0009047726758651889
|
695 |
+
6 330 0.0021111362436854408
|
696 |
+
6 331 0.0015079544597753145
|
697 |
+
6 340 0.00512704516323607
|
698 |
+
6 341 0.004599261102314709
|
699 |
+
6 342 0.0011309658448314859
|
700 |
+
6 344 0.0007539772298876573
|
701 |
+
6 345 0.00022619316896629722
|
702 |
+
7 46 0.008690077640857611
|
703 |
+
7 47 0.009188688653037966
|
704 |
+
7 48 0.0033478167960680964
|
705 |
+
7 49 0.0034902770852624832
|
706 |
+
7 56 0.010898212123370611
|
707 |
+
7 57 0.012322815015314481
|
708 |
+
7 58 0.004202578531234419
|
709 |
+
7 59 0.003276586651470902
|
710 |
+
7 86 0.00648194315834461
|
711 |
+
7 87 0.0016382933257354513
|
712 |
+
7 133 0.00035615072298596765
|
713 |
+
7 134 0.0015670631811382577
|
714 |
+
7 155 0.009829759954412709
|
715 |
+
7 156 0.004131348386637225
|
716 |
+
7 164 0.0009259918797635161
|
717 |
+
7 165 0.0006410713013747418
|
718 |
+
7 166 0.003917657952845645
|
719 |
+
7 167 0.0050573402664007405
|
720 |
+
7 174 0.001638293325735451
|
721 |
+
7 175 0.0014246028919438706
|
722 |
+
7 189 0.0009259918797635161
|
723 |
+
7 194 0.00028492057838877413
|
724 |
+
7 195 0.0006410713013747418
|
725 |
+
7 212 0.00042738086758316123
|
726 |
+
7 213 0.0037039675190540643
|
727 |
+
7 221 0.019517059619631027
|
728 |
+
7 222 0.016739083980340477
|
729 |
+
7 223 0.0143172590640359
|
730 |
+
7 224 0.02443193959683738
|
731 |
+
7 225 0.00683809388133058
|
732 |
+
7 226 0.01111190255716219
|
733 |
+
7 237 0.016739083980340477
|
734 |
+
7 238 0.018092456727687157
|
735 |
+
7 245 0.01367618776266116
|
736 |
+
7 272 0.02236626540351877
|
737 |
+
7 273 0.01923213904124225
|
738 |
+
7 280 0.011040672412564997
|
739 |
+
7 281 0.020086900776408578
|
740 |
+
7 282 0.01859106773986751
|
741 |
+
7 283 0.0165253935465489
|
742 |
+
7 294 0.024004558729254222
|
743 |
+
7 295 0.024075788873851416
|
744 |
+
7 296 0.02443193959683738
|
745 |
+
7 297 0.025357931476600898
|
746 |
+
7 298 0.026283923356364414
|
747 |
+
7 299 0.023933328584657028
|
748 |
+
7 300 0.022722416126504736
|
749 |
+
7 301 0.02514424104280932
|
750 |
+
7 302 0.01738015528171522
|
751 |
+
7 303 0.020941662511574897
|
752 |
+
7 304 0.007835315905691288
|
753 |
+
7 305 0.017380155281715225
|
754 |
+
7 306 0.011396823135550965
|
755 |
+
7 307 0.0036327373744568705
|
756 |
+
7 308 0.0012821426027494836
|
757 |
+
7 309 0.002777975639290548
|
758 |
+
7 310 0.011966664292328516
|
759 |
+
7 311 0.005342260844789515
|
760 |
+
7 312 0.0038464278082484507
|
761 |
+
7 313 0.0014958330365410642
|
762 |
+
7 314 0.0007835315905691288
|
763 |
+
7 315 0.008191466628677256
|
764 |
+
7 316 0.022651185981907542
|
765 |
+
7 317 0.00035615072298596765
|
766 |
+
7 321 0.02101289265617209
|
767 |
+
7 322 0.01225158487071729
|
768 |
+
7 323 0.007764085761094094
|
769 |
+
7 324 0.002564285205498967
|
770 |
+
7 325 0.01994444048721419
|
771 |
+
7 326 0.008690077640857611
|
772 |
+
7 327 0.0024218249163045803
|
773 |
+
7 328 0.0165253935465489
|
774 |
+
7 329 0.006980554170524965
|
775 |
+
7 330 0.028064676971294254
|
776 |
+
7 331 0.021084122800769284
|
777 |
+
7 332 0.0019232139041242254
|
778 |
+
7 333 0.00021369043379158061
|
779 |
+
7 334 0.010969442267967804
|
780 |
+
7 335 0.0024930550609017737
|
781 |
+
7 336 0.008690077640857611
|
782 |
+
7 337 0.003988888097442838
|
783 |
+
7 338 0.00028492057838877413
|
784 |
+
7 340 0.019588289764228224
|
785 |
+
7 341 0.0242182491630458
|
786 |
+
7 342 0.021867654391338417
|
787 |
+
7 343 0.014103568630244322
|
788 |
+
7 344 0.018662297884464708
|
789 |
+
7 345 0.014673409787021868
|
790 |
+
7 346 0.006125792435358643
|
791 |
+
7 347 0.009758529809815513
|
792 |
+
7 348 0.0017095234703326447
|
793 |
+
7 349 0.0031341263622765153
|
794 |
+
7 350 0.004772419688011967
|
795 |
+
7 351 0.0006410713013747418
|
796 |
+
7 352 0.0008547617351663223
|
797 |
+
7 353 0.00042738086758316123
|
798 |
+
7 354 0.001068452168957903
|
799 |
+
7 355 0.0009972220243607095
|
800 |
+
8 317 1.0
|
801 |
+
9 11 0.0002498906728306366
|
802 |
+
9 13 0.0002498906728306366
|
803 |
+
9 14 0.0009995626913225464
|
804 |
+
9 15 0.0022490160554757294
|
805 |
+
9 16 0.0029986880739676387
|
806 |
+
9 17 0.002249016055475729
|
807 |
+
9 18 0.007746610857749733
|
808 |
+
9 19 0.00949584556756419
|
809 |
+
9 20 0.0013743987005685012
|
810 |
+
9 21 0.00437308677453614
|
811 |
+
9 22 0.0009995626913225461
|
812 |
+
9 23 0.00018741800462297744
|
813 |
+
9 48 0.0004997813456612732
|
814 |
+
9 59 0.0002498906728306366
|
815 |
+
9 62 0.0014368713687761604
|
816 |
+
9 63 0.000874617354907228
|
817 |
+
9 64 6.247266820765915e-05
|
818 |
+
9 65 6.247266820765915e-05
|
819 |
+
9 66 0.0024989067283063657
|
820 |
+
9 67 0.000437308677453614
|
821 |
+
9 68 0.0006871993502842506
|
822 |
+
9 69 0.0029986880739676387
|
823 |
+
9 71 0.0004997813456612732
|
824 |
+
9 74 0.015555694383707127
|
825 |
+
9 75 0.017867183107390515
|
826 |
+
9 76 0.017242456425313923
|
827 |
+
9 77 0.00868370088086462
|
828 |
+
9 83 6.247266820765915e-05
|
829 |
+
9 87 0.0004997813456612732
|
830 |
+
9 93 0.0033110514150059348
|
831 |
+
9 127 0.0006247266820765914
|
832 |
+
9 132 0.004810395451989753
|
833 |
+
9 133 0.0006247266820765914
|
834 |
+
9 135 0.0001249453364153183
|
835 |
+
9 136 0.0004997813456612732
|
836 |
+
9 137 0.015555694383707127
|
837 |
+
9 138 0.007246829512088461
|
838 |
+
9 139 0.005997376147935278
|
839 |
+
9 140 0.008683700880864622
|
840 |
+
9 141 0.005997376147935278
|
841 |
+
9 142 0.0025613793965140247
|
842 |
+
9 143 0.015743112388330104
|
843 |
+
9 144 0.009558318235771848
|
844 |
+
9 145 0.0032485787467982754
|
845 |
+
9 146 0.0015618167051914785
|
846 |
+
9 147 0.006122321484350596
|
847 |
+
9 148 0.0025613793965140247
|
848 |
+
9 149 0.0071843568438808006
|
849 |
+
9 150 0.01243206097332417
|
850 |
+
9 151 0.013993877678515648
|
851 |
+
9 152 0.007809083525957393
|
852 |
+
9 157 0.0001249453364153183
|
853 |
+
9 158 0.0023114887236833884
|
854 |
+
9 160 0.0019991253826450927
|
855 |
+
9 161 0.0002498906728306366
|
856 |
+
9 162 0.0005622540138689324
|
857 |
+
9 163 0.0021240707190604106
|
858 |
+
9 164 0.0029362154057599797
|
859 |
+
9 165 0.002561379396514025
|
860 |
+
9 166 0.0007496720184919098
|
861 |
+
9 167 0.0007496720184919097
|
862 |
+
9 168 0.002124070719060411
|
863 |
+
9 169 0.0003123633410382957
|
864 |
+
9 170 0.0006871993502842506
|
865 |
+
9 171 0.002249016055475729
|
866 |
+
9 174 0.0028737427375523207
|
867 |
+
9 175 0.0018741800462297744
|
868 |
+
9 176 0.009433372899356529
|
869 |
+
9 177 0.006247266820765914
|
870 |
+
9 181 0.00018741800462297744
|
871 |
+
9 182 0.0009995626913225464
|
872 |
+
9 183 0.004248141438120822
|
873 |
+
9 185 0.019179109139751356
|
874 |
+
9 186 0.01661772974323733
|
875 |
+
9 187 0.019054163803336036
|
876 |
+
9 194 0.0015618167051914785
|
877 |
+
9 195 0.0001249453364153183
|
878 |
+
9 196 0.0004997813456612732
|
879 |
+
9 197 0.0014993440369838195
|
880 |
+
9 198 0.0003748360092459549
|
881 |
+
9 199 0.0001249453364153183
|
882 |
+
9 202 6.247266820765915e-05
|
883 |
+
9 206 0.013181732991816079
|
884 |
+
9 207 6.247266820765915e-05
|
885 |
+
9 212 0.0018741800462297742
|
886 |
+
9 213 0.0002498906728306366
|
887 |
+
9 218 0.0003123633410382957
|
888 |
+
9 219 0.0006871993502842506
|
889 |
+
9 220 0.014868495033422876
|
890 |
+
9 225 0.0006247266820765914
|
891 |
+
9 227 0.0006871993502842506
|
892 |
+
9 228 0.021802961204473042
|
893 |
+
9 230 0.0002498906728306366
|
894 |
+
9 246 0.020803398513150495
|
895 |
+
9 247 0.017304929093521583
|
896 |
+
9 258 0.0004997813456612732
|
897 |
+
9 259 0.0027487974011370024
|
898 |
+
9 260 0.0017492347098144558
|
899 |
+
9 261 0.002623852064721684
|
900 |
+
9 262 0.01974136315362029
|
901 |
+
9 263 0.01655525707502967
|
902 |
+
9 268 0.007746610857749734
|
903 |
+
9 269 0.02167801586805772
|
904 |
+
9 270 0.019054163803336036
|
905 |
+
9 271 0.011932279627662898
|
906 |
+
9 274 0.0066221028300118695
|
907 |
+
9 275 0.0007496720184919098
|
908 |
+
9 276 0.016742675079652648
|
909 |
+
9 277 0.02205285187730368
|
910 |
+
9 288 0.022427687886549634
|
911 |
+
9 289 0.0003123633410382957
|
912 |
+
9 290 0.00730930218029612
|
913 |
+
9 291 0.005685012806896982
|
914 |
+
9 292 0.0057474854751046415
|
915 |
+
9 293 0.008933591553695257
|
916 |
+
9 356 0.0014993440369838195
|
917 |
+
9 357 0.0014993440369838193
|
918 |
+
9 358 0.00668457549821953
|
919 |
+
9 359 0.004685450115574436
|
920 |
+
9 360 0.0007496720184919098
|
921 |
+
9 361 0.0007496720184919098
|
922 |
+
9 362 0.0024989067283063657
|
923 |
+
9 363 0.0038733054288748667
|
924 |
+
9 364 0.0014368713687761604
|
925 |
+
9 365 0.004498032110951459
|
926 |
+
9 366 0.009933154245017804
|
927 |
+
9 367 0.010245517586056099
|
928 |
+
9 368 0.015993003061160742
|
929 |
+
9 369 0.015993003061160742
|
930 |
+
9 370 0.021115761854188793
|
931 |
+
9 371 0.01693009308427563
|
932 |
+
9 372 0.0009995626913225464
|
933 |
+
9 373 0.0037483600924595483
|
934 |
+
9 374 0.008996064221902918
|
935 |
+
9 375 0.012432060973324168
|
936 |
+
9 376 0.004498032110951458
|
937 |
+
9 377 0.0031861060785906164
|
938 |
+
9 378 0.017554819766352217
|
939 |
+
9 379 0.01749234709814456
|
940 |
+
9 380 0.01649278440682201
|
941 |
+
9 381 0.008308864871618667
|
942 |
+
9 382 0.006434684825388891
|
943 |
+
9 383 0.016055475729368402
|
944 |
+
9 384 0.012557006309739488
|
945 |
+
9 385 0.01018304491784844
|
946 |
+
9 386 0.015180858374461174
|
947 |
+
9 387 0.01155744361841694
|
948 |
+
9 388 0.009058536890110576
|
949 |
+
9 389 0.0028112700693446614
|
950 |
+
9 391 0.00018741800462297744
|
951 |
+
9 392 0.0005622540138689324
|
952 |
+
9 394 0.0018117073780221152
|
953 |
+
9 395 0.0004997813456612732
|
954 |
+
9 399 0.01611794839757606
|
955 |
+
9 402 0.0008746173549072279
|
956 |
+
9 470 0.0007496720184919098
|
957 |
+
9 471 0.0004997813456612732
|
958 |
+
9 478 0.0007496720184919098
|
959 |
+
9 479 0.0004997813456612732
|
960 |
+
9 480 0.0026863247329293434
|
961 |
+
9 481 0.002623852064721684
|
962 |
+
9 483 0.0001249453364153183
|
963 |
+
9 484 0.0001249453364153183
|
964 |
+
9 485 0.0014993440369838195
|
965 |
+
9 486 0.0004997813456612732
|
966 |
+
9 488 0.008996064221902916
|
967 |
+
9 489 0.006059848816142937
|
968 |
+
9 490 0.006497157493596552
|
969 |
+
9 491 0.0001249453364153183
|
970 |
+
9 492 0.0003748360092459549
|
971 |
+
9 493 0.001311926032360842
|
972 |
+
9 494 0.000437308677453614
|
973 |
+
9 495 0.0017492347098144558
|
974 |
+
9 496 0.002623852064721684
|
975 |
+
9 497 0.0027487974011370024
|
976 |
+
9 498 0.0006247266820765914
|
977 |
+
9 509 0.0020615980508527517
|
978 |
+
9 510 0.0003748360092459549
|
979 |
+
9 579 0.0019991253826450927
|
980 |
+
10 74 0.0005264345341054373
|
981 |
+
10 75 0.0021809430698653833
|
982 |
+
10 76 0.000752049334436339
|
983 |
+
10 137 0.000827254267879973
|
984 |
+
10 143 0.0006016394675490712
|
985 |
+
10 150 0.0003008197337745356
|
986 |
+
10 151 0.0006016394675490712
|
987 |
+
10 185 0.004361886139730767
|
988 |
+
10 186 0.0010528690682108748
|
989 |
+
10 187 0.003910656539068963
|
990 |
+
10 206 0.0001504098668872678
|
991 |
+
10 220 0.0003008197337745356
|
992 |
+
10 228 0.0030834022711889904
|
993 |
+
10 246 0.003985861472512596
|
994 |
+
10 247 0.0012784838685417762
|
995 |
+
10 262 0.003910656539068963
|
996 |
+
10 263 0.0011280740016545085
|
997 |
+
10 269 0.0032338121380762574
|
998 |
+
10 270 0.002857787470858088
|
999 |
+
10 271 0.0003008197337745356
|
1000 |
+
10 276 0.000902459201323607
|
1001 |
+
10 277 0.00556516507482891
|
1002 |
+
10 288 0.0027825825374144545
|
1003 |
+
10 356 0.020305332029781156
|
1004 |
+
10 357 0.019703692562232082
|
1005 |
+
10 358 0.02549447243739189
|
1006 |
+
10 359 0.023764758968188315
|
1007 |
+
10 360 0.02587049710461006
|
1008 |
+
10 361 0.022486275099646538
|
1009 |
+
10 362 0.022411070166202904
|
1010 |
+
10 363 0.02278709483342107
|
1011 |
+
10 364 0.026321726705271865
|
1012 |
+
10 365 0.02007971722945025
|
1013 |
+
10 366 0.016093855756937656
|
1014 |
+
10 367 0.022260660299315636
|
1015 |
+
10 368 0.011882379484094157
|
1016 |
+
10 369 0.009400616680454237
|
1017 |
+
10 370 0.00962623148078514
|
1018 |
+
10 371 0.011431149883432353
|
1019 |
+
10 372 0.021583815898322933
|
1020 |
+
10 373 0.024742423102955553
|
1021 |
+
10 374 0.01947807776190118
|
1022 |
+
10 375 0.01789877415958487
|
1023 |
+
10 376 0.023388734300970146
|
1024 |
+
10 377 0.023689554034744677
|
1025 |
+
10 378 0.009400616680454237
|
1026 |
+
10 379 0.005865984808603443
|
1027 |
+
10 380 0.01135594494998872
|
1028 |
+
10 381 0.022486275099646538
|
1029 |
+
10 382 0.015341806422501316
|
1030 |
+
10 383 0.01135594494998872
|
1031 |
+
10 384 0.01158155975031962
|
1032 |
+
10 385 0.019703692562232082
|
1033 |
+
10 386 0.01504098668872678
|
1034 |
+
10 387 0.018124388959915774
|
1035 |
+
10 388 0.010077461081446944
|
1036 |
+
10 389 0.02293750470030834
|
1037 |
+
10 390 0.01383770775362864
|
1038 |
+
10 391 0.017372339625479433
|
1039 |
+
10 392 0.019703692562232086
|
1040 |
+
10 393 0.011882379484094157
|
1041 |
+
10 394 0.024667218169511923
|
1042 |
+
10 395 0.024667218169511916
|
1043 |
+
10 396 0.012333609084755958
|
1044 |
+
10 397 0.011506354816875987
|
1045 |
+
10 398 0.013236068286079568
|
1046 |
+
10 399 0.0070692637437015865
|
1047 |
+
10 400 0.01940287282845755
|
1048 |
+
10 401 0.016093855756937656
|
1049 |
+
10 402 0.020530946830112053
|
1050 |
+
10 403 0.008197337745356097
|
1051 |
+
10 404 0.01759795442581033
|
1052 |
+
10 405 0.021508610964879295
|
1053 |
+
10 406 0.008197337745356095
|
1054 |
+
10 407 0.013988117620515906
|
1055 |
+
10 408 0.008949387079792434
|
1056 |
+
10 409 0.006467624276152515
|
1057 |
+
10 410 0.005264345341054373
|
1058 |
+
10 411 0.005565165074828909
|
1059 |
+
10 412 0.003835451605625329
|
1060 |
+
10 413 0.002105738136421749
|
1061 |
+
10 414 0.0012784838685417764
|
1062 |
+
10 415 0.002556967737083553
|
1063 |
+
10 417 7.52049334436339e-05
|
1064 |
+
10 420 0.0020305332029781154
|
1065 |
+
10 421 0.0006016394675490712
|
1066 |
+
10 422 0.0006016394675490712
|
1067 |
+
10 427 7.52049334436339e-05
|
1068 |
+
10 430 0.004737910806948936
|
1069 |
+
10 431 0.002331352936752651
|
1070 |
+
10 432 0.0001504098668872678
|
1071 |
+
10 440 0.0010528690682108748
|
1072 |
+
10 441 0.0021057381364217496
|
1073 |
+
10 446 7.52049334436339e-05
|
1074 |
+
10 452 0.004512296006618034
|
1075 |
+
10 453 0.003609836805294428
|
1076 |
+
10 454 0.0006016394675490712
|
1077 |
+
10 456 0.0006016394675490712
|
1078 |
+
10 457 0.0004512296006618035
|
1079 |
+
11 356 0.011297349184080336
|
1080 |
+
11 357 0.011888060252528984
|
1081 |
+
11 358 0.004430333013364838
|
1082 |
+
11 359 0.004430333013364838
|
1083 |
+
11 360 0.009229860444510078
|
1084 |
+
11 361 0.011371188067636416
|
1085 |
+
11 362 0.0038396219449161927
|
1086 |
+
11 363 0.002805877575131064
|
1087 |
+
11 364 0.005759432917374288
|
1088 |
+
11 365 0.0014767776711216124
|
1089 |
+
11 366 0.0003691944177804031
|
1090 |
+
11 367 0.0014029387875655322
|
1091 |
+
11 372 0.011371188067636418
|
1092 |
+
11 373 0.004504171896920917
|
1093 |
+
11 374 0.0012552610204533705
|
1094 |
+
11 375 0.0011075832533412094
|
1095 |
+
11 376 0.005316399616037805
|
1096 |
+
11 377 0.005685594033818208
|
1097 |
+
11 381 0.001772133205345935
|
1098 |
+
11 382 0.0003691944177804031
|
1099 |
+
11 385 0.00118142213689729
|
1100 |
+
11 386 0.0005168721848925644
|
1101 |
+
11 387 0.0011075832533412094
|
1102 |
+
11 388 7.383888355608063e-05
|
1103 |
+
11 389 0.0031012331093553864
|
1104 |
+
11 390 0.019345787491693123
|
1105 |
+
11 391 0.010928154766299934
|
1106 |
+
11 392 0.01299564350587019
|
1107 |
+
11 393 0.02082256516281474
|
1108 |
+
11 394 0.0057594329173742895
|
1109 |
+
11 395 0.00945137709517832
|
1110 |
+
11 396 0.017352137635678947
|
1111 |
+
11 397 0.02001033744369785
|
1112 |
+
11 398 0.018238204238351912
|
1113 |
+
11 400 0.01794284870412759
|
1114 |
+
11 401 0.019124270841024884
|
1115 |
+
11 402 0.016170715498781657
|
1116 |
+
11 403 0.022816215018828915
|
1117 |
+
11 404 0.01727829875212287
|
1118 |
+
11 405 0.014546260060547885
|
1119 |
+
11 406 0.0239976371557262
|
1120 |
+
11 407 0.022963892785941076
|
1121 |
+
11 408 0.02695119249796943
|
1122 |
+
11 409 0.023776120505057962
|
1123 |
+
11 410 0.019493465258805284
|
1124 |
+
11 411 0.023849959388614037
|
1125 |
+
11 412 0.026581998080189025
|
1126 |
+
11 413 0.020601048512146496
|
1127 |
+
11 414 0.019493465258805288
|
1128 |
+
11 415 0.02163479288193162
|
1129 |
+
11 416 0.004873366314701322
|
1130 |
+
11 417 0.007900760540500627
|
1131 |
+
11 418 0.0042088163626965965
|
1132 |
+
11 419 0.0016982943217898545
|
1133 |
+
11 420 0.018238204238351912
|
1134 |
+
11 421 0.012035738019641142
|
1135 |
+
11 422 0.012331093553865465
|
1136 |
+
11 423 0.0055379162667060465
|
1137 |
+
11 424 0.004061138595584434
|
1138 |
+
11 425 0.0016982943217898542
|
1139 |
+
11 426 0.0008122277191168869
|
1140 |
+
11 427 0.00834379384183711
|
1141 |
+
11 428 0.0005168721848925643
|
1142 |
+
11 429 0.0015506165546776932
|
1143 |
+
11 430 0.023406926087277558
|
1144 |
+
11 431 0.019124270841024884
|
1145 |
+
11 432 0.016392232149449903
|
1146 |
+
11 433 0.005907110684486449
|
1147 |
+
11 434 0.0019198109724580966
|
1148 |
+
11 435 0.015432326663220851
|
1149 |
+
11 436 0.006940855054271579
|
1150 |
+
11 437 0.0013290999040094513
|
1151 |
+
11 438 0.013364837923650594
|
1152 |
+
11 439 0.00694085505427158
|
1153 |
+
11 440 0.02126559846415122
|
1154 |
+
11 441 0.02355460385438972
|
1155 |
+
11 442 0.002732038691574983
|
1156 |
+
11 444 7.383888355608063e-05
|
1157 |
+
11 446 0.010854315882743852
|
1158 |
+
11 447 0.0031012331093553864
|
1159 |
+
11 448 0.007753082773388465
|
1160 |
+
11 449 0.0018459720889020155
|
1161 |
+
11 450 0.00044303330133648377
|
1162 |
+
11 451 0.00044303330133648377
|
1163 |
+
11 452 0.023776120505057962
|
1164 |
+
11 453 0.02229934283393635
|
1165 |
+
11 454 0.02126559846415122
|
1166 |
+
11 455 0.013290999040094512
|
1167 |
+
11 456 0.018385882005464073
|
1168 |
+
11 457 0.015580004430333012
|
1169 |
+
11 458 0.010189765930739126
|
1170 |
+
11 459 0.012035738019641142
|
1171 |
+
11 460 0.0034704275271357893
|
1172 |
+
11 461 0.004578010780476998
|
1173 |
+
11 462 0.005907110684486449
|
1174 |
+
11 463 0.000590711068448645
|
1175 |
+
11 464 0.000590711068448645
|
1176 |
+
11 465 0.0002953555342243225
|
1177 |
+
11 466 0.0019936498560141768
|
1178 |
+
11 467 0.0013290999040094513
|
1179 |
+
12 445 1.0
|
1180 |
+
13 16 0.0014635288607891346
|
1181 |
+
13 17 0.002575810794988877
|
1182 |
+
13 18 0.005737033134293408
|
1183 |
+
13 19 0.001990399250673223
|
1184 |
+
13 20 0.007785973539398196
|
1185 |
+
13 21 0.008664090855871677
|
1186 |
+
13 22 0.002985598876009834
|
1187 |
+
13 23 0.002224563868399485
|
1188 |
+
13 63 5.854115443156538e-05
|
1189 |
+
13 66 0.0018147757873785268
|
1190 |
+
13 67 0.0006439526987472192
|
1191 |
+
13 68 0.0002927057721578269
|
1192 |
+
13 69 0.0008195761620419153
|
1193 |
+
13 70 0.0007024938531787846
|
1194 |
+
13 71 0.0033953869570307925
|
1195 |
+
13 72 0.0024001873316941806
|
1196 |
+
13 73 0.00023416461772626153
|
1197 |
+
13 74 0.009308043554618896
|
1198 |
+
13 75 0.007551808921671934
|
1199 |
+
13 76 0.01890879288139562
|
1200 |
+
13 77 0.013230300901533777
|
1201 |
+
13 80 0.0013464465519260039
|
1202 |
+
13 81 0.0002927057721578269
|
1203 |
+
13 82 0.0016976934785153963
|
1204 |
+
13 83 0.0040978808102095764
|
1205 |
+
13 93 0.00017562346329469617
|
1206 |
+
13 100 0.00017562346329469617
|
1207 |
+
13 102 0.00011708230886313077
|
1208 |
+
13 103 0.00035124692658939234
|
1209 |
+
13 137 0.00011708230886313077
|
1210 |
+
13 141 0.020021074815595362
|
1211 |
+
13 142 0.016625687858564567
|
1212 |
+
13 143 0.0016391523240838306
|
1213 |
+
13 144 0.0005268703898840885
|
1214 |
+
13 145 0.0002927057721578269
|
1215 |
+
13 146 0.002868516567146704
|
1216 |
+
13 147 0.006673691605198454
|
1217 |
+
13 148 0.008839714319166374
|
1218 |
+
13 149 0.0002927057721578269
|
1219 |
+
13 150 0.0002927057721578269
|
1220 |
+
13 151 0.0012293642430628731
|
1221 |
+
13 152 0.0011122819341997424
|
1222 |
+
13 157 0.0008781173164734808
|
1223 |
+
13 158 0.0004097880810209577
|
1224 |
+
13 160 0.02681184872965695
|
1225 |
+
13 161 0.023592085235920848
|
1226 |
+
13 162 0.03096827069429809
|
1227 |
+
13 163 0.02476290832455216
|
1228 |
+
13 178 0.0002927057721578269
|
1229 |
+
13 179 5.854115443156538e-05
|
1230 |
+
13 180 0.0009366584709050461
|
1231 |
+
13 181 0.00444912773679897
|
1232 |
+
13 182 0.013464465519260038
|
1233 |
+
13 183 0.0167427701674277
|
1234 |
+
13 184 0.00017562346329469617
|
1235 |
+
13 185 5.854115443156538e-05
|
1236 |
+
13 186 0.0002927057721578269
|
1237 |
+
13 187 0.0008195761620419153
|
1238 |
+
13 196 0.017503805175038047
|
1239 |
+
13 197 0.023416461772626154
|
1240 |
+
13 198 0.023416461772626154
|
1241 |
+
13 199 0.02921203606135113
|
1242 |
+
13 201 0.0018733169418100922
|
1243 |
+
13 202 0.006439526987472192
|
1244 |
+
13 206 0.015162158997775435
|
1245 |
+
13 207 0.0006439526987472192
|
1246 |
+
13 218 0.0007610350076103501
|
1247 |
+
13 219 0.00046832923545252306
|
1248 |
+
13 220 0.006673691605198454
|
1249 |
+
13 227 0.00011708230886313077
|
1250 |
+
13 228 0.0009951996253366115
|
1251 |
+
13 246 0.0106544901065449
|
1252 |
+
13 247 0.014576747453459781
|
1253 |
+
13 262 0.00011708230886313077
|
1254 |
+
13 268 0.0033368458025992264
|
1255 |
+
13 269 0.010420325488818641
|
1256 |
+
13 270 0.0035710104203254887
|
1257 |
+
13 271 0.002985598876009834
|
1258 |
+
13 275 0.009834913944502985
|
1259 |
+
13 276 0.02142606252195293
|
1260 |
+
13 277 0.01164968973188151
|
1261 |
+
13 278 0.00035124692658939234
|
1262 |
+
13 288 0.004741833508956796
|
1263 |
+
13 289 0.014693829762322912
|
1264 |
+
13 290 0.02207001522070015
|
1265 |
+
13 291 0.017913593256059006
|
1266 |
+
13 292 0.011005737033134292
|
1267 |
+
13 293 0.010478866643250203
|
1268 |
+
13 358 0.0003512469265893923
|
1269 |
+
13 363 5.854115443156538e-05
|
1270 |
+
13 365 0.00035124692658939234
|
1271 |
+
13 366 0.0007024938531787846
|
1272 |
+
13 367 0.00017562346329469617
|
1273 |
+
13 368 0.00017562346329469617
|
1274 |
+
13 369 0.0009951996253366115
|
1275 |
+
13 370 0.005151621589977754
|
1276 |
+
13 371 0.005385786207704015
|
1277 |
+
13 374 0.0016976934785153963
|
1278 |
+
13 375 0.0017562346329469615
|
1279 |
+
13 376 0.0004097880810209577
|
1280 |
+
13 377 0.0003512469265893923
|
1281 |
+
13 378 0.00046832923545252306
|
1282 |
+
13 379 0.0015220700152206996
|
1283 |
+
13 381 0.0014635288607891346
|
1284 |
+
13 382 0.0009951996253366115
|
1285 |
+
13 383 0.00532724505327245
|
1286 |
+
13 384 0.0037466338836201845
|
1287 |
+
13 386 0.0011708230886313077
|
1288 |
+
13 387 0.00011708230886313077
|
1289 |
+
13 388 0.0012293642430628731
|
1290 |
+
13 389 0.0002927057721578269
|
1291 |
+
13 394 0.00017562346329469617
|
1292 |
+
13 399 0.0033953869570307925
|
1293 |
+
13 468 5.854115443156538e-05
|
1294 |
+
13 469 0.0011122819341997424
|
1295 |
+
13 470 0.0027514342582835734
|
1296 |
+
13 471 0.0012879053974944384
|
1297 |
+
13 474 5.854115443156538e-05
|
1298 |
+
13 475 0.0002927057721578269
|
1299 |
+
13 476 0.0002927057721578269
|
1300 |
+
13 477 0.0018147757873785268
|
1301 |
+
13 478 0.0020489404051047887
|
1302 |
+
13 479 0.0011122819341997424
|
1303 |
+
13 480 0.004332045427935838
|
1304 |
+
13 481 0.006556609296335323
|
1305 |
+
13 483 0.00046832923545252306
|
1306 |
+
13 484 0.012352183585060298
|
1307 |
+
13 485 0.014869453225617611
|
1308 |
+
13 486 0.005912656597588104
|
1309 |
+
13 487 0.004214963119072708
|
1310 |
+
13 488 0.01164968973188151
|
1311 |
+
13 489 0.015806111696522657
|
1312 |
+
13 490 0.008312843929282283
|
1313 |
+
13 491 0.009834913944502985
|
1314 |
+
13 492 0.006146821215314366
|
1315 |
+
13 493 0.015513405924364829
|
1316 |
+
13 494 0.02007961597002693
|
1317 |
+
13 495 0.0024001873316941806
|
1318 |
+
13 496 0.008956796628029503
|
1319 |
+
13 497 0.004741833508956796
|
1320 |
+
13 498 0.003512469265893923
|
1321 |
+
13 499 0.002517269640557311
|
1322 |
+
13 501 0.0005854115443156538
|
1323 |
+
13 502 0.0004097880810209577
|
1324 |
+
13 504 0.001990399250673223
|
1325 |
+
13 505 0.00040978808102095764
|
1326 |
+
13 509 0.010478866643250205
|
1327 |
+
13 510 0.02207001522070015
|
1328 |
+
13 513 0.0012293642430628731
|
1329 |
+
13 579 0.021660227139679192
|
1330 |
+
13 580 0.0002927057721578269
|
1331 |
+
13 581 0.00011708230886313077
|
1332 |
+
13 582 0.0012879053974944388
|
1333 |
+
13 583 0.0018147757873785272
|
1334 |
+
13 584 5.854115443156538e-05
|
1335 |
+
13 585 0.00011708230886313077
|
1336 |
+
13 586 0.0011122819341997422
|
1337 |
+
13 587 0.0008195761620419154
|
1338 |
+
13 589 0.0007610350076103501
|
1339 |
+
13 590 0.003395386957030792
|
1340 |
+
13 591 0.0026928931038520073
|
1341 |
+
13 592 0.009834913944502985
|
1342 |
+
13 593 0.009834913944502985
|
1343 |
+
13 594 0.00011708230886313077
|
1344 |
+
13 595 0.0013464465519260039
|
1345 |
+
13 596 0.0015806111696522653
|
1346 |
+
13 597 0.0002927057721578269
|
1347 |
+
13 598 0.00023416461772626153
|
1348 |
+
13 599 0.0009951996253366115
|
1349 |
+
13 600 0.0002927057721578269
|
1350 |
+
13 601 0.0012293642430628731
|
1351 |
+
13 602 0.00046832923545252306
|
1352 |
+
13 603 0.00011708230886313077
|
1353 |
+
13 604 0.003980798501346446
|
1354 |
+
13 605 0.013523006673691603
|
1355 |
+
13 606 0.011591148577449948
|
1356 |
+
13 607 0.006263903524177495
|
1357 |
+
13 608 0.014693829762322912
|
1358 |
+
13 610 0.0003512469265893923
|
1359 |
+
13 611 0.0012293642430628734
|
1360 |
+
13 612 5.854115443156538e-05
|
1361 |
+
13 613 0.005327245053272449
|
1362 |
+
13 614 0.0019318580962416575
|
1363 |
+
13 615 0.006615150450766888
|
1364 |
+
13 616 0.0026928931038520073
|
1365 |
+
13 617 0.0002927057721578269
|
1366 |
+
13 627 0.005268703898840884
|
1367 |
+
13 630 0.00011708230886313077
|
1368 |
+
13 696 0.00023416461772626153
|
1369 |
+
13 769 0.00076103500761035
|
1370 |
+
13 770 0.004683292354525231
|
1371 |
+
13 771 0.0011122819341997424
|
1372 |
+
13 772 5.854115443156538e-05
|
1373 |
+
13 774 0.00076103500761035
|
1374 |
+
13 775 0.003512469265893923
|
1375 |
+
13 776 0.008020138157124457
|
1376 |
+
14 74 0.0005157677571470676
|
1377 |
+
14 75 0.0005157677571470676
|
1378 |
+
14 76 0.004273504273504274
|
1379 |
+
14 77 0.0008104921898025347
|
1380 |
+
14 141 0.002799882110226938
|
1381 |
+
14 142 0.0003684055408193339
|
1382 |
+
14 160 0.001326259946949602
|
1383 |
+
14 161 0.0005894488653109342
|
1384 |
+
14 162 0.004420866489832007
|
1385 |
+
14 163 0.0050103153551429415
|
1386 |
+
14 196 7.368110816386678e-05
|
1387 |
+
14 197 0.0014736221632773356
|
1388 |
+
14 198 0.0030209254347185383
|
1389 |
+
14 199 0.0009578544061302684
|
1390 |
+
14 206 7.368110816386678e-05
|
1391 |
+
14 246 0.0013262599469496023
|
1392 |
+
14 247 0.0061155319776009425
|
1393 |
+
14 269 7.368110816386678e-05
|
1394 |
+
14 276 0.0034630120837017397
|
1395 |
+
14 277 0.0008841732979664015
|
1396 |
+
14 290 0.001399941055113469
|
1397 |
+
14 291 0.0052313586796345415
|
1398 |
+
14 292 0.0058944886531093425
|
1399 |
+
14 293 0.008989095195991748
|
1400 |
+
14 468 0.0199675803124079
|
1401 |
+
14 469 0.02460949012673151
|
1402 |
+
14 470 0.021220159151193633
|
1403 |
+
14 471 0.02586206896551724
|
1404 |
+
14 472 0.020704391394046565
|
1405 |
+
14 473 0.017978190391983492
|
1406 |
+
14 474 0.020114942528735632
|
1407 |
+
14 475 0.02586206896551724
|
1408 |
+
14 476 0.02291482463896257
|
1409 |
+
14 477 0.02475685234305924
|
1410 |
+
14 478 0.021293840259357502
|
1411 |
+
14 479 0.026009431181844976
|
1412 |
+
14 480 0.019451812555260833
|
1413 |
+
14 481 0.014294134983790155
|
1414 |
+
14 482 0.01422045387562629
|
1415 |
+
14 483 0.02726201002063071
|
1416 |
+
14 484 0.02026230474506337
|
1417 |
+
14 485 0.015694076038903628
|
1418 |
+
14 486 0.02726201002063071
|
1419 |
+
14 487 0.02733569112879458
|
1420 |
+
14 488 0.01215738284703802
|
1421 |
+
14 489 0.009652225169466549
|
1422 |
+
14 490 0.015767757147067494
|
1423 |
+
14 491 0.02460949012673151
|
1424 |
+
14 492 0.020114942528735635
|
1425 |
+
14 493 0.013704686118479222
|
1426 |
+
14 494 0.01333628057765989
|
1427 |
+
14 495 0.022988505747126436
|
1428 |
+
14 496 0.018272914824638966
|
1429 |
+
14 497 0.020851753610374304
|
1430 |
+
14 498 0.016578249336870028
|
1431 |
+
14 499 0.025567344532861774
|
1432 |
+
14 500 0.007515473032714411
|
1433 |
+
14 501 0.019157088122605366
|
1434 |
+
14 502 0.015104627173592693
|
1435 |
+
14 503 0.00987326849395815
|
1436 |
+
14 504 0.021293840259357502
|
1437 |
+
14 505 0.020999115826702035
|
1438 |
+
14 506 0.013262599469496024
|
1439 |
+
14 507 0.013483642793987621
|
1440 |
+
14 508 0.010389036251105217
|
1441 |
+
14 509 0.011715296198054817
|
1442 |
+
14 510 0.010167992926613616
|
1443 |
+
14 511 0.011199528440907752
|
1444 |
+
14 512 0.009357500736811082
|
1445 |
+
14 513 0.020335985853227233
|
1446 |
+
14 514 0.010683760683760684
|
1447 |
+
14 515 0.01215738284703802
|
1448 |
+
14 516 0.016357206012378427
|
1449 |
+
14 517 0.004052460949012673
|
1450 |
+
14 518 0.006704980842911877
|
1451 |
+
14 519 0.004273504273504274
|
1452 |
+
14 520 0.0036103743000294726
|
1453 |
+
14 521 0.004494547597995874
|
1454 |
+
14 522 0.003020925434718538
|
1455 |
+
14 523 0.002136752136752137
|
1456 |
+
14 524 0.0037577365163572064
|
1457 |
+
14 525 0.0005894488653109342
|
1458 |
+
14 526 0.0008104921898025347
|
1459 |
+
14 531 0.0002947244326554671
|
1460 |
+
14 541 0.0016209843796050694
|
1461 |
+
14 542 0.0006631299734748011
|
1462 |
+
14 551 0.0019157088122605363
|
1463 |
+
14 552 0.0009578544061302684
|
1464 |
+
14 563 0.005010315355142941
|
1465 |
+
14 564 0.004715590922487474
|
1466 |
+
14 565 0.0010315355142941351
|
1467 |
+
14 567 0.000663129973474801
|
1468 |
+
14 568 0.00022104332449160037
|
1469 |
+
14 579 0.006115531977600943
|
1470 |
+
15 468 0.01103996467211305
|
1471 |
+
15 469 0.010230367262824759
|
1472 |
+
15 470 0.0023551924633841174
|
1473 |
+
15 471 0.004121586810922205
|
1474 |
+
15 472 0.009199970560094207
|
1475 |
+
15 473 0.011334363730036065
|
1476 |
+
15 474 0.004047987046441452
|
1477 |
+
15 475 0.0027967910502686394
|
1478 |
+
15 476 0.0059615809229410476
|
1479 |
+
15 477 0.0014719952896150733
|
1480 |
+
15 478 0.0003679988224037683
|
1481 |
+
15 479 0.0016191948185765807
|
1482 |
+
15 482 0.011187164201074557
|
1483 |
+
15 483 0.0056671818650180315
|
1484 |
+
15 484 0.0014719952896150733
|
1485 |
+
15 485 0.0003679988224037683
|
1486 |
+
15 486 0.004563185397806727
|
1487 |
+
15 487 0.0073599764480753675
|
1488 |
+
15 491 0.002134393169941856
|
1489 |
+
15 492 0.0003679988224037683
|
1490 |
+
15 495 0.0011775962316920587
|
1491 |
+
15 496 0.0005151983513652757
|
1492 |
+
15 497 0.0005151983513652757
|
1493 |
+
15 498 7.359976448075367e-05
|
1494 |
+
15 499 0.0032383896371531613
|
1495 |
+
15 500 0.019945536174284243
|
1496 |
+
15 501 0.01781114300434239
|
1497 |
+
15 502 0.014204754544785456
|
1498 |
+
15 503 0.02524471921689851
|
1499 |
+
15 504 0.005446382571575771
|
1500 |
+
15 505 0.010524766320747773
|
1501 |
+
15 506 0.01832634135570766
|
1502 |
+
15 507 0.01884153970707294
|
1503 |
+
15 508 0.018473540884669168
|
1504 |
+
15 511 0.01781114300434239
|
1505 |
+
15 512 0.019356738058438214
|
1506 |
+
15 513 0.012143961139324354
|
1507 |
+
15 514 0.020755133583572533
|
1508 |
+
15 515 0.01862074041363068
|
1509 |
+
15 516 0.015014351954073748
|
1510 |
+
15 517 0.024361522043129462
|
1511 |
+
15 518 0.02333112534039891
|
1512 |
+
15 519 0.027011113564436594
|
1513 |
+
15 520 0.02465592110105248
|
1514 |
+
15 521 0.024067122985206444
|
1515 |
+
15 522 0.024508721572090966
|
1516 |
+
15 523 0.023478324869360415
|
1517 |
+
15 524 0.025980716861706044
|
1518 |
+
15 525 0.018031942297784646
|
1519 |
+
15 526 0.020607934054611025
|
1520 |
+
15 527 0.0012511959961728123
|
1521 |
+
15 528 0.0059615809229410476
|
1522 |
+
15 529 0.0025023919923456246
|
1523 |
+
15 530 0.0009567969382497977
|
1524 |
+
15 531 0.018179141826746157
|
1525 |
+
15 532 0.006255979980864061
|
1526 |
+
15 533 0.011187164201074557
|
1527 |
+
15 534 0.005225583278133511
|
1528 |
+
15 535 0.004710384926768235
|
1529 |
+
15 536 0.0016927945830573343
|
1530 |
+
15 537 0.0007359976448075366
|
1531 |
+
15 538 0.013247957606535658
|
1532 |
+
15 540 7.359976448075367e-05
|
1533 |
+
15 541 0.02340472510487966
|
1534 |
+
15 542 0.02031353499668801
|
1535 |
+
15 543 0.010745565614190034
|
1536 |
+
15 544 0.0032383896371531613
|
1537 |
+
15 545 0.0003679988224037683
|
1538 |
+
15 546 0.015529550305439023
|
1539 |
+
15 547 0.005593582100537278
|
1540 |
+
15 548 0.001103996467211305
|
1541 |
+
15 549 0.019356738058438214
|
1542 |
+
15 550 0.009126370795613454
|
1543 |
+
15 551 0.025465518510340766
|
1544 |
+
15 552 0.022374328402149115
|
1545 |
+
15 553 0.0029439905792301465
|
1546 |
+
15 557 0.011923161845882095
|
1547 |
+
15 558 0.0029439905792301465
|
1548 |
+
15 559 0.00942076985353647
|
1549 |
+
15 560 0.003679988224037683
|
1550 |
+
15 561 0.0002943990579230147
|
1551 |
+
15 563 0.019356738058438214
|
1552 |
+
15 564 0.024582321336571723
|
1553 |
+
15 565 0.02244792816662987
|
1554 |
+
15 566 0.015382350776477514
|
1555 |
+
15 567 0.019503937587399718
|
1556 |
+
15 568 0.015161551483035255
|
1557 |
+
15 569 0.0059615809229410476
|
1558 |
+
15 570 0.01023036726282476
|
1559 |
+
15 571 0.0030175903437109006
|
1560 |
+
15 572 0.003459188930595423
|
1561 |
+
15 573 0.005519982336056524
|
1562 |
+
15 574 0.0008095974092882903
|
1563 |
+
15 575 0.0008095974092882903
|
1564 |
+
15 576 7.359976448075367e-05
|
1565 |
+
15 577 0.0013247957606535659
|
1566 |
+
15 578 0.0008095974092882903
|
1567 |
+
16 556 1.0
|
1568 |
+
17 17 0.0004919184820801125
|
1569 |
+
17 18 0.0006324666198172875
|
1570 |
+
17 20 0.005762473647224175
|
1571 |
+
17 21 0.0021082220660576245
|
1572 |
+
17 22 0.0014757554462403375
|
1573 |
+
17 23 0.0024595924104005625
|
1574 |
+
17 70 0.000140548137737175
|
1575 |
+
17 71 0.000983836964160225
|
1576 |
+
17 72 0.0023190442726633872
|
1577 |
+
17 73 0.0004919184820801125
|
1578 |
+
17 76 0.000140548137737175
|
1579 |
+
17 77 0.0006324666198172875
|
1580 |
+
17 80 0.008151791988756148
|
1581 |
+
17 81 0.006676036542515813
|
1582 |
+
17 82 0.016303583977512297
|
1583 |
+
17 83 0.012297962052002813
|
1584 |
+
17 96 0.002178496134926213
|
1585 |
+
17 97 0.0007027406886858749
|
1586 |
+
17 98 7.02740688685875e-05
|
1587 |
+
17 99 0.0011243851018974
|
1588 |
+
17 100 0.009065354884047786
|
1589 |
+
17 101 0.007308503162333099
|
1590 |
+
17 102 0.015038650737877725
|
1591 |
+
17 103 0.017919887561489812
|
1592 |
+
17 141 0.0018271257905832748
|
1593 |
+
17 142 0.003794799718903725
|
1594 |
+
17 148 0.0013352073085031624
|
1595 |
+
17 153 0.001546029515108925
|
1596 |
+
17 154 0.0024595924104005625
|
1597 |
+
17 160 0.0134926212227688
|
1598 |
+
17 161 0.01883345045678145
|
1599 |
+
17 162 0.012438510189739986
|
1600 |
+
17 163 0.005200281096275476
|
1601 |
+
17 178 0.0007730147575544624
|
1602 |
+
17 179 0.0007730147575544624
|
1603 |
+
17 180 0.003021784961349263
|
1604 |
+
17 181 0.00758959943780745
|
1605 |
+
17 182 0.01377371749824315
|
1606 |
+
17 183 0.007238229093464512
|
1607 |
+
17 184 0.0026001405481377374
|
1608 |
+
17 196 0.0123682361208714
|
1609 |
+
17 197 0.007449051300070275
|
1610 |
+
17 198 0.0071679550245959235
|
1611 |
+
17 199 0.0202389318341532
|
1612 |
+
17 200 0.0004919184820801125
|
1613 |
+
17 201 0.027406886858749122
|
1614 |
+
17 202 0.020028109627547436
|
1615 |
+
17 206 0.0019676739283204497
|
1616 |
+
17 207 0.0004919184820801125
|
1617 |
+
17 218 0.000140548137737175
|
1618 |
+
17 220 0.00028109627547435
|
1619 |
+
17 247 7.02740688685875e-05
|
1620 |
+
17 256 0.000140548137737175
|
1621 |
+
17 257 0.0004919184820801125
|
1622 |
+
17 269 0.00035137034434293746
|
1623 |
+
17 275 0.005270555165144062
|
1624 |
+
17 276 0.0010541110330288123
|
1625 |
+
17 277 7.02740688685875e-05
|
1626 |
+
17 278 0.02508784258608574
|
1627 |
+
17 289 0.019465917076598734
|
1628 |
+
17 290 0.0044975404075896
|
1629 |
+
17 291 0.001546029515108925
|
1630 |
+
17 292 0.0002108222066057625
|
1631 |
+
17 293 7.02740688685875e-05
|
1632 |
+
17 484 0.0009135628952916374
|
1633 |
+
17 485 0.0007730147575544624
|
1634 |
+
17 489 0.000421644413211525
|
1635 |
+
17 491 0.0004919184820801125
|
1636 |
+
17 492 7.02740688685875e-05
|
1637 |
+
17 493 0.0013352073085031622
|
1638 |
+
17 494 0.002951510892480675
|
1639 |
+
17 509 0.0002108222066057625
|
1640 |
+
17 510 0.0033731553056922
|
1641 |
+
17 579 0.0027406886858749122
|
1642 |
+
17 580 0.003162333099086437
|
1643 |
+
17 581 0.0023190442726633877
|
1644 |
+
17 582 0.009978917779339425
|
1645 |
+
17 583 0.009065354884047788
|
1646 |
+
17 584 0.0010541110330288125
|
1647 |
+
17 585 0.0016865776528460997
|
1648 |
+
17 586 0.004356992269852425
|
1649 |
+
17 587 0.003513703443429375
|
1650 |
+
17 588 0.0016865776528461
|
1651 |
+
17 589 0.004567814476458187
|
1652 |
+
17 590 0.009065354884047786
|
1653 |
+
17 591 0.006886858749121575
|
1654 |
+
17 592 0.013914265635980324
|
1655 |
+
17 593 0.016795502459592413
|
1656 |
+
17 594 0.021503865073787775
|
1657 |
+
17 595 0.028742094167252288
|
1658 |
+
17 596 0.02178496134926212
|
1659 |
+
17 597 0.02059030217849614
|
1660 |
+
17 598 0.0026001405481377374
|
1661 |
+
17 599 0.004427266338721012
|
1662 |
+
17 600 0.015038650737877721
|
1663 |
+
17 601 0.015390021082220663
|
1664 |
+
17 602 0.008081517919887564
|
1665 |
+
17 603 0.007308503162333099
|
1666 |
+
17 604 0.021995783555867888
|
1667 |
+
17 605 0.02312016865776529
|
1668 |
+
17 606 0.019465917076598734
|
1669 |
+
17 607 0.025720309205903027
|
1670 |
+
17 608 0.019465917076598737
|
1671 |
+
17 609 0.0123682361208714
|
1672 |
+
17 610 0.013070976809557275
|
1673 |
+
17 611 0.02009838369641603
|
1674 |
+
17 612 0.019184820801124384
|
1675 |
+
17 613 0.012719606465214337
|
1676 |
+
17 614 0.0134926212227688
|
1677 |
+
17 615 0.0179901616303584
|
1678 |
+
17 616 0.015319747013352071
|
1679 |
+
17 617 0.005621925509486999
|
1680 |
+
17 618 0.00028109627547435
|
1681 |
+
17 619 0.0022487702037948
|
1682 |
+
17 620 0.0026704146170063252
|
1683 |
+
17 621 0.0002108222066057625
|
1684 |
+
17 622 0.007589599437807451
|
1685 |
+
17 623 0.0018271257905832748
|
1686 |
+
17 624 0.00084328882642305
|
1687 |
+
17 625 0.0007027406886858749
|
1688 |
+
17 626 0.0009135628952916376
|
1689 |
+
17 627 0.01981728742094167
|
1690 |
+
17 628 0.0002108222066057625
|
1691 |
+
17 629 0.0004919184820801124
|
1692 |
+
17 630 0.003021784961349262
|
1693 |
+
17 631 0.0006324666198172875
|
1694 |
+
17 632 7.02740688685875e-05
|
1695 |
+
17 633 7.02740688685875e-05
|
1696 |
+
17 696 0.021152494729444835
|
1697 |
+
17 769 0.0179901616303584
|
1698 |
+
17 770 0.020941672522839076
|
1699 |
+
17 771 0.011243851018974
|
1700 |
+
17 772 0.005411103302881238
|
1701 |
+
17 773 0.0028109627547434997
|
1702 |
+
17 774 0.0036542515811665496
|
1703 |
+
17 775 0.00871398453970485
|
1704 |
+
17 776 0.012157413914265636
|
1705 |
+
17 777 0.00035137034434293746
|
1706 |
+
18 82 0.0006012777151446825
|
1707 |
+
18 83 0.00015031942878617063
|
1708 |
+
18 103 0.0004509582863585119
|
1709 |
+
18 142 7.515971439308531e-05
|
1710 |
+
18 160 0.001428034573468621
|
1711 |
+
18 161 0.0071401728673431055
|
1712 |
+
18 162 0.0018789928598271326
|
1713 |
+
18 182 0.0005261180007515971
|
1714 |
+
18 196 0.0018789928598271326
|
1715 |
+
18 197 0.0009019165727170237
|
1716 |
+
18 198 0.0008267568583239384
|
1717 |
+
18 199 0.006238256294626081
|
1718 |
+
18 201 0.003006388575723412
|
1719 |
+
18 202 0.0018038331454340473
|
1720 |
+
18 275 0.00015031942878617063
|
1721 |
+
18 278 0.002179631717399474
|
1722 |
+
18 289 0.003607666290868095
|
1723 |
+
18 580 0.020142803457346866
|
1724 |
+
18 581 0.01698609545283728
|
1725 |
+
18 582 0.03013904547162721
|
1726 |
+
18 583 0.02705749718151071
|
1727 |
+
18 584 0.017737692596768138
|
1728 |
+
18 585 0.016910935738444193
|
1729 |
+
18 586 0.021270199173243146
|
1730 |
+
18 587 0.018489289740698987
|
1731 |
+
18 588 0.01924088688462984
|
1732 |
+
18 589 0.016910935738444197
|
1733 |
+
18 590 0.016234498308906428
|
1734 |
+
18 591 0.017061255167230362
|
1735 |
+
18 592 0.015182262307403233
|
1736 |
+
18 593 0.010672679443818113
|
1737 |
+
18 594 0.004509582863585118
|
1738 |
+
18 595 0.005712138293874483
|
1739 |
+
18 596 0.01570838030815483
|
1740 |
+
18 597 0.011424276587748966
|
1741 |
+
18 598 0.018714768883878245
|
1742 |
+
18 599 0.01969184517098835
|
1743 |
+
18 600 0.020593761743705377
|
1744 |
+
18 601 0.023900789177001128
|
1745 |
+
18 602 0.027583615182262308
|
1746 |
+
18 603 0.0266065388951522
|
1747 |
+
18 604 0.007591131153701616
|
1748 |
+
18 605 0.007666290868094702
|
1749 |
+
18 606 0.012701991732431419
|
1750 |
+
18 607 0.020668921458098462
|
1751 |
+
18 608 0.016309658023299513
|
1752 |
+
18 609 0.015031942878617064
|
1753 |
+
18 610 0.017061255167230362
|
1754 |
+
18 611 0.009845922585494176
|
1755 |
+
18 612 0.009845922585494176
|
1756 |
+
18 613 0.02435174746335964
|
1757 |
+
18 614 0.02450206689214581
|
1758 |
+
18 615 0.02720781661029688
|
1759 |
+
18 616 0.016459977452085682
|
1760 |
+
18 617 0.021119879744456973
|
1761 |
+
18 618 0.012251033446072902
|
1762 |
+
18 619 0.020744081172491546
|
1763 |
+
18 620 0.02247275460353251
|
1764 |
+
18 621 0.008643367155204812
|
1765 |
+
18 622 0.025779782036828264
|
1766 |
+
18 623 0.02006764374295378
|
1767 |
+
18 624 0.011724915445321307
|
1768 |
+
18 625 0.01089815858699737
|
1769 |
+
18 626 0.011800075159714395
|
1770 |
+
18 627 0.006914693724163849
|
1771 |
+
18 628 0.011499436302142051
|
1772 |
+
18 629 0.011273957158962795
|
1773 |
+
18 630 0.0214956783164224
|
1774 |
+
18 631 0.012251033446072907
|
1775 |
+
18 632 0.013077790304396842
|
1776 |
+
18 633 0.014656144306651634
|
1777 |
+
18 634 0.00496054114994363
|
1778 |
+
18 635 0.006914693724163849
|
1779 |
+
18 636 0.004735062006764375
|
1780 |
+
18 637 0.004359263434798948
|
1781 |
+
18 638 0.004208944006012777
|
1782 |
+
18 639 0.003757985719654265
|
1783 |
+
18 640 0.0021796317173994736
|
1784 |
+
18 641 0.003908305148440436
|
1785 |
+
18 642 0.0010522360015031943
|
1786 |
+
18 643 0.0006012777151446825
|
1787 |
+
18 648 0.0005261180007515971
|
1788 |
+
18 658 0.0020293122886133035
|
1789 |
+
18 659 0.0011273957158962795
|
1790 |
+
18 668 0.000751597143930853
|
1791 |
+
18 669 0.0011273957158962795
|
1792 |
+
18 680 0.002931228861330327
|
1793 |
+
18 681 0.0057872980082675695
|
1794 |
+
18 682 0.001428034573468621
|
1795 |
+
18 684 0.0006012777151446825
|
1796 |
+
18 685 0.0006012777151446825
|
1797 |
+
18 696 0.003757985719654265
|
1798 |
+
18 769 0.0009019165727170238
|
1799 |
+
18 770 0.002931228861330327
|
1800 |
+
18 771 0.00015031942878617063
|
1801 |
+
18 775 0.00022547914317925594
|
1802 |
+
18 776 0.001202555430289365
|
1803 |
+
19 580 0.012027744982290436
|
1804 |
+
19 581 0.009961629279811098
|
1805 |
+
19 582 0.0059031877213695395
|
1806 |
+
19 583 0.004574970484061393
|
1807 |
+
19 584 0.009223730814639905
|
1808 |
+
19 585 0.011289846517119244
|
1809 |
+
19 586 0.0038370720188902006
|
1810 |
+
19 587 0.0028040141676505316
|
1811 |
+
19 588 0.0057556080283353
|
1812 |
+
19 589 0.0014757969303423849
|
1813 |
+
19 590 0.0003689492325855962
|
1814 |
+
19 591 0.0008116883116883117
|
1815 |
+
19 598 0.011806375442739079
|
1816 |
+
19 599 0.004501180637544274
|
1817 |
+
19 600 0.0012544273907910272
|
1818 |
+
19 601 0.0012544273907910272
|
1819 |
+
19 602 0.005165289256198347
|
1820 |
+
19 603 0.005165289256198347
|
1821 |
+
19 609 0.0005165289256198347
|
1822 |
+
19 610 0.0008116883116883117
|
1823 |
+
19 613 0.0016971664698937428
|
1824 |
+
19 614 0.0010330578512396697
|
1825 |
+
19 615 0.0011068476977567888
|
1826 |
+
19 616 0.0003689492325855962
|
1827 |
+
19 617 0.0028040141676505316
|
1828 |
+
19 618 0.021325265643447465
|
1829 |
+
19 619 0.012322904368358915
|
1830 |
+
19 620 0.016528925619834708
|
1831 |
+
19 621 0.022432113341204252
|
1832 |
+
19 622 0.0042060212514757975
|
1833 |
+
19 623 0.009518890200708384
|
1834 |
+
19 624 0.021989374262101534
|
1835 |
+
19 625 0.015053128689492327
|
1836 |
+
19 626 0.01977567886658796
|
1837 |
+
19 628 0.017635773317591502
|
1838 |
+
19 629 0.019406729634002362
|
1839 |
+
19 630 0.010256788665879575
|
1840 |
+
19 631 0.01977567886658796
|
1841 |
+
19 632 0.018299881936245575
|
1842 |
+
19 633 0.014684179456906728
|
1843 |
+
19 634 0.027007083825265645
|
1844 |
+
19 635 0.02317001180637544
|
1845 |
+
19 636 0.026638134592680048
|
1846 |
+
19 637 0.02368654073199528
|
1847 |
+
19 638 0.02435064935064935
|
1848 |
+
19 639 0.02457201889020071
|
1849 |
+
19 640 0.021915584415584416
|
1850 |
+
19 641 0.0256788665879575
|
1851 |
+
19 642 0.018816410861865408
|
1852 |
+
19 643 0.01682408500590319
|
1853 |
+
19 644 0.002656434474616293
|
1854 |
+
19 645 0.009445100354191263
|
1855 |
+
19 646 0.0031729634002361272
|
1856 |
+
19 647 0.0025826446280991736
|
1857 |
+
19 648 0.016602715466351833
|
1858 |
+
19 649 0.006419716646989375
|
1859 |
+
19 650 0.010478158205430934
|
1860 |
+
19 651 0.004870129870129871
|
1861 |
+
19 652 0.003246753246753247
|
1862 |
+
19 653 0.0014757969303423849
|
1863 |
+
19 654 0.0013282172373081465
|
1864 |
+
19 655 0.010847107438016527
|
1865 |
+
19 656 0.0005903187721369539
|
1866 |
+
19 657 0.0005165289256198347
|
1867 |
+
19 658 0.02169421487603306
|
1868 |
+
19 659 0.019406729634002362
|
1869 |
+
19 660 0.007747933884297522
|
1870 |
+
19 661 0.001844746162927981
|
1871 |
+
19 662 0.00014757969303423848
|
1872 |
+
19 663 0.012101534828807558
|
1873 |
+
19 664 0.0038370720188902014
|
1874 |
+
19 665 0.0007378984651711924
|
1875 |
+
19 666 0.0157172373081464
|
1876 |
+
19 667 0.006050767414403779
|
1877 |
+
19 668 0.02221074380165289
|
1878 |
+
19 669 0.021103896103896107
|
1879 |
+
19 670 0.0028040141676505316
|
1880 |
+
19 674 0.006419716646989373
|
1881 |
+
19 675 0.0014020070838252656
|
1882 |
+
19 676 0.010109208972845335
|
1883 |
+
19 677 0.0030253837072018895
|
1884 |
+
19 678 7.378984651711924e-05
|
1885 |
+
19 679 0.00014757969303423848
|
1886 |
+
19 680 0.02206316410861866
|
1887 |
+
19 681 0.022432113341204252
|
1888 |
+
19 682 0.0256788665879575
|
1889 |
+
19 683 0.01977567886658796
|
1890 |
+
19 684 0.022358323494687134
|
1891 |
+
19 685 0.02088252656434475
|
1892 |
+
19 686 0.012470484061393153
|
1893 |
+
19 687 0.01586481700118064
|
1894 |
+
19 688 0.004427390791027155
|
1895 |
+
19 689 0.006198347107438017
|
1896 |
+
19 690 0.00974025974025974
|
1897 |
+
19 691 0.000885478158205431
|
1898 |
+
19 692 0.0003689492325855962
|
1899 |
+
19 693 0.00014757969303423848
|
1900 |
+
19 694 0.002877804014167651
|
1901 |
+
19 695 0.0016233766233766235
|
1902 |
+
20 673 1.0
|
data/body_models/J_regressor_mano_RIGHT.txt
ADDED
@@ -0,0 +1,1902 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# 21 778
|
2 |
+
0 4 0.0019103600293901542
|
3 |
+
0 5 0.0027920646583394562
|
4 |
+
0 6 0.00029390154298310065
|
5 |
+
0 7 0.00014695077149155033
|
6 |
+
0 25 0.0016164584864070536
|
7 |
+
0 26 0.000440852314474651
|
8 |
+
0 32 0.011756061719324026
|
9 |
+
0 33 0.021234386480529024
|
10 |
+
0 34 0.019838354151359296
|
11 |
+
0 35 0.016311535635562088
|
12 |
+
0 36 0.015870683321087434
|
13 |
+
0 37 0.02343864805290228
|
14 |
+
0 38 0.01671565025716385
|
15 |
+
0 39 0.020499632623071272
|
16 |
+
0 40 0.005437178545187362
|
17 |
+
0 41 0.010139603232916973
|
18 |
+
0 42 0.002645113886847906
|
19 |
+
0 43 0.00014695077149155033
|
20 |
+
0 44 0.02005878030859662
|
21 |
+
0 45 0.02233651726671565
|
22 |
+
0 50 0.01763409257898604
|
23 |
+
0 51 0.01704628949301984
|
24 |
+
0 52 0.019838354151359296
|
25 |
+
0 53 0.02079353416605437
|
26 |
+
0 54 0.00822924320352682
|
27 |
+
0 55 0.00822924320352682
|
28 |
+
0 78 0.011572373254959589
|
29 |
+
0 79 0.011939750183688464
|
30 |
+
0 84 0.01704628949301984
|
31 |
+
0 85 0.019691403379867745
|
32 |
+
0 88 0.005437178545187362
|
33 |
+
0 89 0.0007347538574577516
|
34 |
+
0 90 0.014548126377663484
|
35 |
+
0 91 0.018736223365172666
|
36 |
+
0 92 0.011645848640705364
|
37 |
+
0 106 0.018515797207935343
|
38 |
+
0 107 0.02204261572373255
|
39 |
+
0 108 0.012417340191036004
|
40 |
+
0 109 0.009992652461425423
|
41 |
+
0 110 0.016311535635562088
|
42 |
+
0 111 0.01880969875091844
|
43 |
+
0 112 0.0073475385745775165
|
44 |
+
0 113 0.0014695077149155032
|
45 |
+
0 114 0.005731080088170463
|
46 |
+
0 116 0.02204261572373255
|
47 |
+
0 117 0.012123438648052902
|
48 |
+
0 118 0.013005143277002204
|
49 |
+
0 119 0.016385011021307863
|
50 |
+
0 120 0.008155767817781044
|
51 |
+
0 121 0.011315209404849376
|
52 |
+
0 122 0.009037472446730345
|
53 |
+
0 130 0.0073475385745775165
|
54 |
+
0 131 0.00911094783247612
|
55 |
+
0 178 0.001763409257898604
|
56 |
+
0 179 0.002351212343864805
|
57 |
+
0 190 0.019544452608376194
|
58 |
+
0 191 0.019691403379867745
|
59 |
+
0 192 0.01704628949301984
|
60 |
+
0 193 0.016605437178545186
|
61 |
+
0 200 0.002351212343864805
|
62 |
+
0 203 0.00822924320352682
|
63 |
+
0 204 0.007641440117560617
|
64 |
+
0 205 0.01704628949301984
|
65 |
+
0 207 0.001763409257898604
|
66 |
+
0 208 0.005290227773695812
|
67 |
+
0 209 0.01763409257898604
|
68 |
+
0 210 0.019691403379867745
|
69 |
+
0 211 0.019691403379867745
|
70 |
+
0 214 0.011315209404849376
|
71 |
+
0 215 0.011315209404849376
|
72 |
+
0 216 0.007641440117560617
|
73 |
+
0 217 0.00822924320352682
|
74 |
+
0 218 0.002351212343864805
|
75 |
+
0 219 0.0011756061719324026
|
76 |
+
0 227 0.002351212343864805
|
77 |
+
0 229 0.007788390889052168
|
78 |
+
0 231 0.002204261572373255
|
79 |
+
0 232 0.016311535635562088
|
80 |
+
0 233 0.006759735488611315
|
81 |
+
0 234 0.011168258633357825
|
82 |
+
0 235 0.019544452608376194
|
83 |
+
0 236 0.0016164584864070536
|
84 |
+
0 239 0.011315209404849376
|
85 |
+
0 241 0.0007347538574577516
|
86 |
+
0 242 0.002351212343864805
|
87 |
+
0 243 0.0036737692872887582
|
88 |
+
0 244 0.0011756061719324026
|
89 |
+
0 254 0.0064658339456282144
|
90 |
+
0 255 0.0038207200587803084
|
91 |
+
0 256 0.002351212343864805
|
92 |
+
0 257 0.002351212343864805
|
93 |
+
0 264 0.014107274063188832
|
94 |
+
0 265 0.00440852314474651
|
95 |
+
0 279 0.011315209404849376
|
96 |
+
0 284 0.00896399706098457
|
97 |
+
0 285 0.0029390154298310064
|
98 |
+
1 0 0.014595751184471957
|
99 |
+
1 1 0.025294207550053488
|
100 |
+
1 2 0.019180803912578332
|
101 |
+
1 3 0.01039278618370778
|
102 |
+
1 4 0.03156044627846554
|
103 |
+
1 5 0.025752712822864135
|
104 |
+
1 6 0.014977838911814154
|
105 |
+
1 7 0.023307351367874065
|
106 |
+
1 8 0.005654898364664528
|
107 |
+
1 9 0.009170105456212748
|
108 |
+
1 10 0.002063273727647868
|
109 |
+
1 11 0.0006113403637475165
|
110 |
+
1 12 0.0018340210912425497
|
111 |
+
1 14 0.001222680727495033
|
112 |
+
1 15 7.641754546843957e-05
|
113 |
+
1 16 0.0011462631820265935
|
114 |
+
1 17 0.0004585052728106374
|
115 |
+
1 18 0.00015283509093687913
|
116 |
+
1 19 0.0003820877273421978
|
117 |
+
1 22 7.641754546843957e-05
|
118 |
+
1 24 0.01413724591166132
|
119 |
+
1 25 0.019257221458046772
|
120 |
+
1 26 0.024377197004432218
|
121 |
+
1 27 0.017346782821335782
|
122 |
+
1 28 0.0007641754546843956
|
123 |
+
1 29 0.0022161088185847473
|
124 |
+
1 30 0.0006877579092159561
|
125 |
+
1 31 0.0005349228182790769
|
126 |
+
1 32 0.0005349228182790768
|
127 |
+
1 33 0.0005349228182790769
|
128 |
+
1 34 0.0024071526822558465
|
129 |
+
1 35 0.002445361454990066
|
130 |
+
1 36 0.029802842732691428
|
131 |
+
1 37 0.022122879413113253
|
132 |
+
1 38 0.010029802842732692
|
133 |
+
1 39 0.02334556014060829
|
134 |
+
1 40 0.029344337459880795
|
135 |
+
1 41 0.032171786642213054
|
136 |
+
1 42 0.02009781445819961
|
137 |
+
1 43 0.009934280910897143
|
138 |
+
1 60 0.004355800091701055
|
139 |
+
1 61 0.00855876509246523
|
140 |
+
1 62 0.0004585052728106374
|
141 |
+
1 63 0.003285954455142901
|
142 |
+
1 64 0.0012990982729634726
|
143 |
+
1 65 7.641754546843957e-05
|
144 |
+
1 66 0.0019868561821794286
|
145 |
+
1 67 0.004814305364511693
|
146 |
+
1 68 0.008253094910591475
|
147 |
+
1 69 0.0018340210912425497
|
148 |
+
1 70 0.0003820877273421978
|
149 |
+
1 71 7.641754546843957e-05
|
150 |
+
1 88 0.021320495185694635
|
151 |
+
1 89 0.013907993275256002
|
152 |
+
1 90 0.01986856182179429
|
153 |
+
1 91 0.013564114320648022
|
154 |
+
1 92 0.003763564114320649
|
155 |
+
1 93 0.0004585052728106374
|
156 |
+
1 94 0.008329512456059913
|
157 |
+
1 95 0.007565337001375517
|
158 |
+
1 104 0.0027510316368638244
|
159 |
+
1 105 0.0072596668195017595
|
160 |
+
1 109 0.009705028274491823
|
161 |
+
1 110 0.005654898364664528
|
162 |
+
1 111 0.015436344184624792
|
163 |
+
1 112 0.019180803912578332
|
164 |
+
1 113 0.03339446736970809
|
165 |
+
1 114 0.0340058077334556
|
166 |
+
1 115 0.02559987773192725
|
167 |
+
1 116 0.008405930001528351
|
168 |
+
1 117 0.0017767079321412199
|
169 |
+
1 118 0.00527281063732233
|
170 |
+
1 119 0.00032477456824086816
|
171 |
+
1 122 0.004967140455448571
|
172 |
+
1 123 0.007259666819501758
|
173 |
+
1 124 0.0016811860003056705
|
174 |
+
1 125 0.0025217790004585057
|
175 |
+
1 126 0.008176677365123033
|
176 |
+
1 129 0.00030567018187375826
|
177 |
+
1 145 0.00030567018187375826
|
178 |
+
1 146 0.0006877579092159561
|
179 |
+
1 147 7.641754546843957e-05
|
180 |
+
1 152 7.641754546843957e-05
|
181 |
+
1 157 0.002063273727647868
|
182 |
+
1 158 0.0016047684548372307
|
183 |
+
1 159 0.0032095369096744614
|
184 |
+
1 188 0.0007641754546843956
|
185 |
+
1 190 0.0019868561821794286
|
186 |
+
1 191 0.0004585052728106374
|
187 |
+
1 192 0.0016047684548372307
|
188 |
+
1 193 0.005884151001069847
|
189 |
+
1 207 0.00015283509093687913
|
190 |
+
1 208 7.641754546843957e-05
|
191 |
+
1 209 0.00030567018187375826
|
192 |
+
1 216 0.0008405930001528353
|
193 |
+
1 217 0.003897294818890417
|
194 |
+
1 218 0.0008405930001528353
|
195 |
+
1 219 0.0014519333639003516
|
196 |
+
1 227 0.005502063273727648
|
197 |
+
1 229 0.008635182637933671
|
198 |
+
1 230 0.004126547455295736
|
199 |
+
1 231 0.009705028274491824
|
200 |
+
1 232 0.01245605991135565
|
201 |
+
1 233 0.016888277548525142
|
202 |
+
1 234 0.001413724591166132
|
203 |
+
1 235 0.005654898364664528
|
204 |
+
1 236 0.012838147638697846
|
205 |
+
1 239 0.00026746140913953847
|
206 |
+
1 240 0.01543634418462479
|
207 |
+
1 241 0.0006877579092159561
|
208 |
+
1 242 0.0032095369096744614
|
209 |
+
1 248 0.004890722909980132
|
210 |
+
1 249 0.0005349228182790769
|
211 |
+
1 250 0.0015283509093687911
|
212 |
+
1 251 0.0009170105456212748
|
213 |
+
1 252 0.0029038667278007036
|
214 |
+
1 253 0.005502063273727649
|
215 |
+
1 254 0.0019868561821794286
|
216 |
+
1 255 0.0002292526364053187
|
217 |
+
1 264 0.028885832187070158
|
218 |
+
1 265 0.029650007641754548
|
219 |
+
1 266 0.006953996637628001
|
220 |
+
1 267 0.002445361454990066
|
221 |
+
1 268 0.00015283509093687913
|
222 |
+
1 285 0.010087116001834023
|
223 |
+
1 286 0.007794589637780836
|
224 |
+
1 287 0.0025981965459269452
|
225 |
+
1 697 0.0004585052728106374
|
226 |
+
1 699 7.641754546843957e-05
|
227 |
+
1 700 0.00030567018187375826
|
228 |
+
1 704 0.0002292526364053187
|
229 |
+
1 705 0.0008405930001528353
|
230 |
+
1 706 7.641754546843957e-05
|
231 |
+
2 0 0.0027531810402559712
|
232 |
+
2 1 0.0034972840241089364
|
233 |
+
2 2 0.007887491628841432
|
234 |
+
2 3 0.0056551826772825355
|
235 |
+
2 4 0.009152466701391472
|
236 |
+
2 5 0.01674231713669172
|
237 |
+
2 6 0.02708534861224793
|
238 |
+
2 7 0.02209985862043307
|
239 |
+
2 8 0.00833395341915321
|
240 |
+
2 9 0.009152466701391472
|
241 |
+
2 10 0.011682416846491553
|
242 |
+
2 11 0.0055063620805119425
|
243 |
+
2 12 0.005431951782126646
|
244 |
+
2 13 0.0011161544757794478
|
245 |
+
2 14 0.006176054765979612
|
246 |
+
2 15 0.0017858471612471167
|
247 |
+
2 16 0.0007441029838529652
|
248 |
+
2 19 0.0003720514919264826
|
249 |
+
2 26 0.000967333879008855
|
250 |
+
2 27 0.0008929235806235583
|
251 |
+
2 28 0.013245033112582783
|
252 |
+
2 29 0.013765905201279856
|
253 |
+
2 30 0.009970979983629735
|
254 |
+
2 31 0.011384775652950369
|
255 |
+
2 36 0.0023811295483294886
|
256 |
+
2 37 0.00014882059677059304
|
257 |
+
2 38 7.441029838529652e-05
|
258 |
+
2 39 0.0020834883547883026
|
259 |
+
2 40 0.0055063620805119425
|
260 |
+
2 41 0.009896569685244438
|
261 |
+
2 42 0.022843961604286034
|
262 |
+
2 43 0.032666120991145166
|
263 |
+
2 60 0.00364610462087953
|
264 |
+
2 61 0.0017858471612471167
|
265 |
+
2 62 0.0002976411935411861
|
266 |
+
2 63 0.000967333879008855
|
267 |
+
2 64 0.0014882059677059304
|
268 |
+
2 65 0.0004464617903117792
|
269 |
+
2 68 0.0002976411935411861
|
270 |
+
2 69 7.441029838529652e-05
|
271 |
+
2 88 0.01562616266091227
|
272 |
+
2 89 0.027234169209018527
|
273 |
+
2 90 0.00513431058858546
|
274 |
+
2 91 0.0006696926854676687
|
275 |
+
2 93 7.441029838529652e-05
|
276 |
+
2 94 0.0005952823870823722
|
277 |
+
2 104 0.025225091152615526
|
278 |
+
2 105 0.017858471612471165
|
279 |
+
2 113 0.0035716943224942334
|
280 |
+
2 114 0.002604360443485378
|
281 |
+
2 115 0.010566262370712107
|
282 |
+
2 123 0.026787707418706754
|
283 |
+
2 124 0.021504576233350697
|
284 |
+
2 125 0.01882580549148002
|
285 |
+
2 126 0.02083488354788303
|
286 |
+
2 127 0.0002232308951558896
|
287 |
+
2 128 0.0002976411935411861
|
288 |
+
2 129 0.0017114368628618197
|
289 |
+
2 144 0.0002232308951558896
|
290 |
+
2 145 0.0013393853709353374
|
291 |
+
2 158 0.002604360443485378
|
292 |
+
2 193 0.0003720514919264826
|
293 |
+
2 217 0.0007441029838529652
|
294 |
+
2 219 0.0004464617903117792
|
295 |
+
2 227 0.003199642830567751
|
296 |
+
2 229 0.003125232532182454
|
297 |
+
2 230 0.008854825507850286
|
298 |
+
2 231 0.00982215938685914
|
299 |
+
2 232 0.002009078056403006
|
300 |
+
2 233 0.007813081330456134
|
301 |
+
2 235 7.441029838529652e-05
|
302 |
+
2 236 0.01912344668502121
|
303 |
+
2 240 0.01480764937867401
|
304 |
+
2 248 0.03318699307984225
|
305 |
+
2 249 0.01823052310439765
|
306 |
+
2 250 0.02887119577349505
|
307 |
+
2 251 0.02500186025745963
|
308 |
+
2 252 0.02864796487833916
|
309 |
+
2 253 0.032889351886301064
|
310 |
+
2 259 0.00014882059677059304
|
311 |
+
2 264 0.0002232308951558896
|
312 |
+
2 265 0.0005952823870823722
|
313 |
+
2 266 0.015402931765756382
|
314 |
+
2 267 0.01622144504799464
|
315 |
+
2 286 0.02805268249125679
|
316 |
+
2 287 0.025820373539697895
|
317 |
+
2 697 0.014510008185132822
|
318 |
+
2 698 0.008631594612694398
|
319 |
+
2 699 0.011161544757794479
|
320 |
+
2 700 0.01049185207232681
|
321 |
+
2 701 0.00811072252399732
|
322 |
+
2 702 0.013393853709353377
|
323 |
+
2 703 0.010938313862638589
|
324 |
+
2 704 0.008185132822382618
|
325 |
+
2 705 0.02187662772527718
|
326 |
+
2 706 0.018825805491480024
|
327 |
+
2 707 0.011905647741647447
|
328 |
+
2 708 0.007217798943373763
|
329 |
+
2 709 0.005059900290200163
|
330 |
+
2 710 0.003199642830567751
|
331 |
+
2 711 0.0019346677580177095
|
332 |
+
2 712 0.005952823870823722
|
333 |
+
2 713 0.00364610462087953
|
334 |
+
2 714 0.00364610462087953
|
335 |
+
2 715 0.0026787707418706747
|
336 |
+
2 716 0.0021578986531735995
|
337 |
+
2 721 0.0006696926854676687
|
338 |
+
2 722 0.0002232308951558896
|
339 |
+
2 723 0.0002232308951558896
|
340 |
+
2 725 0.0004464617903117792
|
341 |
+
2 731 0.0032740531289530473
|
342 |
+
2 732 0.0008185132822382618
|
343 |
+
2 741 0.0005952823870823722
|
344 |
+
2 742 0.0005208720886970756
|
345 |
+
2 746 0.0002232308951558896
|
346 |
+
2 749 0.0005208720886970756
|
347 |
+
2 753 0.0034972840241089364
|
348 |
+
2 754 0.004018156112806012
|
349 |
+
2 755 0.0014882059677059304
|
350 |
+
2 757 0.0008929235806235583
|
351 |
+
2 758 0.0014137956693206339
|
352 |
+
2 759 0.0003720514919264826
|
353 |
+
2 760 7.441029838529652e-05
|
354 |
+
3 6 0.0019164148301024542
|
355 |
+
3 7 0.0014004569912287167
|
356 |
+
3 8 0.000884499152354979
|
357 |
+
3 9 0.00029483305078499295
|
358 |
+
3 10 0.004422495761774894
|
359 |
+
3 11 0.0011793322031399718
|
360 |
+
3 12 0.0005896661015699859
|
361 |
+
3 14 0.0011056239404437236
|
362 |
+
3 28 0.011203655929829732
|
363 |
+
3 29 0.0037591213975086604
|
364 |
+
3 30 0.004496204024471142
|
365 |
+
3 31 0.011645905506007222
|
366 |
+
3 43 0.0019164148301024544
|
367 |
+
3 89 0.0005896661015699859
|
368 |
+
3 104 0.009729490675904768
|
369 |
+
3 105 0.002137539618191199
|
370 |
+
3 123 0.006412618854573597
|
371 |
+
3 124 0.0187956069875433
|
372 |
+
3 125 0.013414903810717178
|
373 |
+
3 126 0.004938453600648632
|
374 |
+
3 230 0.0007370826269624824
|
375 |
+
3 231 0.00022112478808874474
|
376 |
+
3 236 0.0005159578388737376
|
377 |
+
3 240 0.0008844991523549787
|
378 |
+
3 248 0.007665659320409817
|
379 |
+
3 249 0.013120070759932186
|
380 |
+
3 250 0.009434657625119773
|
381 |
+
3 251 0.012088155082184712
|
382 |
+
3 252 0.004348787499078646
|
383 |
+
3 253 0.003022038770546178
|
384 |
+
3 266 0.0029483305078499295
|
385 |
+
3 267 0.0125304046583622
|
386 |
+
3 286 0.002727205719761185
|
387 |
+
3 287 0.005896661015699859
|
388 |
+
3 697 0.01805852436058082
|
389 |
+
3 698 0.019016731775632047
|
390 |
+
3 699 0.021375396181911987
|
391 |
+
3 700 0.01968010613989828
|
392 |
+
3 701 0.023512935800103187
|
393 |
+
3 702 0.01975381440259453
|
394 |
+
3 703 0.021965062283481978
|
395 |
+
3 704 0.019164148301024544
|
396 |
+
3 705 0.015331318640819633
|
397 |
+
3 706 0.017837399572492075
|
398 |
+
3 707 0.02889363897692931
|
399 |
+
3 708 0.02130168791921574
|
400 |
+
3 709 0.027050932409523103
|
401 |
+
3 710 0.024544851477850665
|
402 |
+
3 711 0.0209331466057345
|
403 |
+
3 712 0.0232181027493182
|
404 |
+
3 713 0.023070686223925697
|
405 |
+
3 714 0.024102601901673175
|
406 |
+
3 715 0.018353357411365814
|
407 |
+
3 716 0.017026608682833344
|
408 |
+
3 717 0.0016952900420137097
|
409 |
+
3 718 0.0062652023291811
|
410 |
+
3 719 0.0033168718213311705
|
411 |
+
3 720 0.00125304046583622
|
412 |
+
3 721 0.016879192157440846
|
413 |
+
3 722 0.01090882287904474
|
414 |
+
3 723 0.008402741947372299
|
415 |
+
3 724 0.004717328812559887
|
416 |
+
3 725 0.010982531141740989
|
417 |
+
3 726 0.0033168718213311705
|
418 |
+
3 727 0.0008107908896587306
|
419 |
+
3 730 7.370826269624824e-05
|
420 |
+
3 731 0.022775853173140702
|
421 |
+
3 732 0.018279649148669565
|
422 |
+
3 733 0.009803198938601014
|
423 |
+
3 734 0.003022038770546178
|
424 |
+
3 735 0.0003685413134812412
|
425 |
+
3 736 0.011719613768703471
|
426 |
+
3 737 0.003906537922901157
|
427 |
+
3 738 0.0008107908896587306
|
428 |
+
3 739 0.013488612073413427
|
429 |
+
3 740 0.005306994914129874
|
430 |
+
3 741 0.021301687919215745
|
431 |
+
3 742 0.019606397877202027
|
432 |
+
3 743 0.0022112478808874476
|
433 |
+
3 746 0.006338910591877348
|
434 |
+
3 747 0.00125304046583622
|
435 |
+
3 748 0.0016952900420137097
|
436 |
+
3 749 0.009876907201297264
|
437 |
+
3 750 0.003022038770546178
|
438 |
+
3 751 7.370826269624824e-05
|
439 |
+
3 753 0.025208225842116898
|
440 |
+
3 754 0.0209331466057345
|
441 |
+
3 755 0.023291811012014444
|
442 |
+
3 756 0.017837399572492075
|
443 |
+
3 757 0.021449104444608236
|
444 |
+
3 758 0.01975381440259453
|
445 |
+
3 759 0.01171961376870347
|
446 |
+
3 760 0.01348861207341343
|
447 |
+
3 761 0.003906537922901157
|
448 |
+
3 762 0.005306994914129872
|
449 |
+
3 763 0.007960492371194809
|
450 |
+
3 764 0.0008107908896587306
|
451 |
+
3 765 0.0003685413134812412
|
452 |
+
3 767 0.0022112478808874476
|
453 |
+
3 768 0.0011056239404437238
|
454 |
+
4 745 1.0
|
455 |
+
5 0 0.0012638674343491084
|
456 |
+
5 1 0.0001404297149276787
|
457 |
+
5 2 0.00035107428731919675
|
458 |
+
5 3 0.002808594298553574
|
459 |
+
5 8 0.004072461732902682
|
460 |
+
5 9 0.0007723634321022329
|
461 |
+
5 10 0.004774610307541076
|
462 |
+
5 11 0.01418340120769555
|
463 |
+
5 12 0.012357814913635726
|
464 |
+
5 13 0.01930908580255582
|
465 |
+
5 14 0.007934278893413846
|
466 |
+
5 15 0.020011234377194213
|
467 |
+
5 16 0.0021064457239151806
|
468 |
+
5 17 0.0006319337171745541
|
469 |
+
5 18 0.0022468754388428594
|
470 |
+
5 19 0.009127931470299114
|
471 |
+
5 21 0.00042128914478303613
|
472 |
+
5 24 0.0009127931470299115
|
473 |
+
5 25 7.021485746383936e-05
|
474 |
+
5 26 0.0001404297149276787
|
475 |
+
5 27 0.0010532228619575903
|
476 |
+
5 28 0.0004212891447830361
|
477 |
+
5 29 0.0015447268642044658
|
478 |
+
5 30 0.003932032017975004
|
479 |
+
5 31 0.0009127931470299115
|
480 |
+
5 46 0.0006319337171745542
|
481 |
+
5 47 0.00035107428731919675
|
482 |
+
5 48 0.003721387445583485
|
483 |
+
5 49 0.0027383794410897346
|
484 |
+
5 56 0.0002808594298553574
|
485 |
+
5 57 7.021485746383936e-05
|
486 |
+
5 58 0.0010532228619575903
|
487 |
+
5 59 0.0028788091560174134
|
488 |
+
5 60 0.010040724617329027
|
489 |
+
5 61 0.005687403454570988
|
490 |
+
5 62 0.029981744137059403
|
491 |
+
5 63 0.017483499508496
|
492 |
+
5 64 0.02029209380704957
|
493 |
+
5 65 0.024294340682488414
|
494 |
+
5 66 0.0029490240134812527
|
495 |
+
5 67 0.0011234377194214297
|
496 |
+
5 68 0.005827833169498665
|
497 |
+
5 69 0.00975986518747367
|
498 |
+
5 74 0.00217666058137902
|
499 |
+
5 75 0.0010532228619575903
|
500 |
+
5 76 0.00035107428731919675
|
501 |
+
5 77 0.00021064457239151807
|
502 |
+
5 86 0.0007723634321022329
|
503 |
+
5 87 0.0021064457239151806
|
504 |
+
5 93 0.018536722370453586
|
505 |
+
5 94 0.0016851565791321445
|
506 |
+
5 95 0.0001404297149276787
|
507 |
+
5 104 7.021485746383936e-05
|
508 |
+
5 105 0.0001404297149276787
|
509 |
+
5 127 0.023592192107850022
|
510 |
+
5 128 0.02710293498104199
|
511 |
+
5 129 0.020713382951832608
|
512 |
+
5 132 0.023030473248139307
|
513 |
+
5 133 0.005195899452324112
|
514 |
+
5 134 0.005195899452324112
|
515 |
+
5 135 0.01305996348827412
|
516 |
+
5 136 0.008495997753124563
|
517 |
+
5 137 0.014323830922623225
|
518 |
+
5 138 0.01818564808313439
|
519 |
+
5 139 0.011515236624069652
|
520 |
+
5 140 0.008215138323269205
|
521 |
+
5 143 0.010742873191967421
|
522 |
+
5 144 0.016991995506249125
|
523 |
+
5 145 0.010040724617329027
|
524 |
+
5 146 0.00035107428731919675
|
525 |
+
5 147 0.0011234377194214297
|
526 |
+
5 149 0.013832326920376354
|
527 |
+
5 150 0.016430276646538407
|
528 |
+
5 151 0.010181154332256704
|
529 |
+
5 152 0.011023732621822779
|
530 |
+
5 155 0.00035107428731919675
|
531 |
+
5 156 0.001966016008987502
|
532 |
+
5 157 7.021485746383936e-05
|
533 |
+
5 158 0.003932032017975004
|
534 |
+
5 164 0.0034405280157281284
|
535 |
+
5 165 0.005195899452324111
|
536 |
+
5 166 0.0014745120067406266
|
537 |
+
5 167 0.0014745120067406264
|
538 |
+
5 168 0.026049712119084405
|
539 |
+
5 169 0.02927959556242101
|
540 |
+
5 170 0.023873051537705376
|
541 |
+
5 171 0.016008987501755372
|
542 |
+
5 172 0.027102934981041993
|
543 |
+
5 173 0.016921780648785283
|
544 |
+
5 174 0.005546973739643309
|
545 |
+
5 175 0.005406544024715631
|
546 |
+
5 176 0.013551467490520995
|
547 |
+
5 177 0.00758320460609465
|
548 |
+
5 183 7.021485746383936e-05
|
549 |
+
5 185 0.009127931470299114
|
550 |
+
5 186 0.017834573795815194
|
551 |
+
5 187 0.008074708608341525
|
552 |
+
5 189 0.007161915461311614
|
553 |
+
5 194 0.010602443477039742
|
554 |
+
5 195 0.01060244347703974
|
555 |
+
5 206 0.0013340822918129478
|
556 |
+
5 212 0.007091700603847775
|
557 |
+
5 213 0.0013340822918129476
|
558 |
+
5 219 0.0002808594298553574
|
559 |
+
5 220 0.00435332116275804
|
560 |
+
5 222 0.0002808594298553574
|
561 |
+
5 223 0.00042128914478303613
|
562 |
+
5 225 0.0016851565791321445
|
563 |
+
5 226 0.00042128914478303613
|
564 |
+
5 227 0.000983008004493751
|
565 |
+
5 228 0.00975986518747367
|
566 |
+
5 230 0.001825586294059823
|
567 |
+
5 231 7.021485746383936e-05
|
568 |
+
5 246 0.00035107428731919675
|
569 |
+
5 258 0.020924027524224127
|
570 |
+
5 259 0.022398539530964757
|
571 |
+
5 260 0.015587698356972338
|
572 |
+
5 261 0.012568459486027245
|
573 |
+
5 262 0.009619435472545991
|
574 |
+
5 263 0.01305996348827412
|
575 |
+
5 266 0.0010532228619575903
|
576 |
+
5 267 0.0005617188597107148
|
577 |
+
5 268 0.004283106305294201
|
578 |
+
5 269 0.0017553714365959837
|
579 |
+
5 270 0.005266114309787951
|
580 |
+
5 271 0.004844825165004915
|
581 |
+
5 274 0.018045218368206713
|
582 |
+
5 276 0.0002808594298553574
|
583 |
+
5 277 0.00021064457239151807
|
584 |
+
5 280 0.0001404297149276787
|
585 |
+
5 288 0.00540654402471563
|
586 |
+
5 290 7.021485746383936e-05
|
587 |
+
5 358 0.0002808594298553574
|
588 |
+
5 359 0.00035107428731919675
|
589 |
+
5 362 0.00021064457239151807
|
590 |
+
5 363 0.0002808594298553574
|
591 |
+
5 365 7.021485746383936e-05
|
592 |
+
5 366 0.0009127931470299116
|
593 |
+
5 367 0.0013340822918129476
|
594 |
+
5 368 0.005125684594860273
|
595 |
+
5 369 0.0034405280157281284
|
596 |
+
5 370 0.0013340822918129476
|
597 |
+
5 371 0.00021064457239151807
|
598 |
+
5 373 0.00042128914478303613
|
599 |
+
5 375 0.00035107428731919675
|
600 |
+
5 378 0.004493750877685719
|
601 |
+
5 379 0.0034405280157281284
|
602 |
+
5 380 0.004634180592613397
|
603 |
+
5 383 0.00042128914478303613
|
604 |
+
5 385 0.0016149417216683051
|
605 |
+
5 386 0.001404297149276787
|
606 |
+
5 387 0.0016851565791321445
|
607 |
+
5 388 0.0002808594298553574
|
608 |
+
5 399 0.0014745120067406264
|
609 |
+
6 46 0.019904998869034157
|
610 |
+
6 47 0.01960340797707909
|
611 |
+
6 48 0.025559828093191583
|
612 |
+
6 49 0.02352408957249491
|
613 |
+
6 56 0.022166930558697125
|
614 |
+
6 57 0.020131192038000453
|
615 |
+
6 58 0.02194073738973083
|
616 |
+
6 59 0.028952725627686037
|
617 |
+
6 62 0.0005277840609213601
|
618 |
+
6 65 0.00022619316896629722
|
619 |
+
6 86 0.02382568046444997
|
620 |
+
6 87 0.022543919173640955
|
621 |
+
6 127 0.0012063635678202518
|
622 |
+
6 128 0.0007539772298876573
|
623 |
+
6 132 0.0006031817839101259
|
624 |
+
6 133 0.017643067179371183
|
625 |
+
6 134 0.02382568046444997
|
626 |
+
6 135 0.01379778330694413
|
627 |
+
6 136 0.01259141973912388
|
628 |
+
6 137 0.004448465656337178
|
629 |
+
6 138 0.003091306642539395
|
630 |
+
6 139 0.009424715373595717
|
631 |
+
6 140 0.012214431124180048
|
632 |
+
6 143 0.0005277840609213601
|
633 |
+
6 144 0.0012817612908090175
|
634 |
+
6 150 0.0008293749528764231
|
635 |
+
6 155 0.019678805700067855
|
636 |
+
6 156 0.0244288622483601
|
637 |
+
6 164 0.019980396592022914
|
638 |
+
6 165 0.017944658071326246
|
639 |
+
6 166 0.023222498680539848
|
640 |
+
6 167 0.023901078187438737
|
641 |
+
6 168 0.002789715750584332
|
642 |
+
6 169 0.002186533966674206
|
643 |
+
6 170 0.00987710171152831
|
644 |
+
6 171 0.005881022393123726
|
645 |
+
6 172 0.004071477041393349
|
646 |
+
6 173 0.011837442509236221
|
647 |
+
6 174 0.022166930558697128
|
648 |
+
6 175 0.02382568046444997
|
649 |
+
6 176 0.019377214808112796
|
650 |
+
6 177 0.013119203800045236
|
651 |
+
6 185 0.0016587499057528462
|
652 |
+
6 186 0.004448465656337178
|
653 |
+
6 187 0.0005277840609213601
|
654 |
+
6 189 0.020809771544899342
|
655 |
+
6 194 0.015154942320741913
|
656 |
+
6 195 0.01839704440925884
|
657 |
+
6 212 0.021262157882831936
|
658 |
+
6 213 0.022317726004674656
|
659 |
+
6 221 0.006333408731056322
|
660 |
+
6 222 0.016210510442584633
|
661 |
+
6 223 0.018472442132247607
|
662 |
+
6 224 0.00987710171152831
|
663 |
+
6 225 0.02744477116791073
|
664 |
+
6 226 0.020583578375933047
|
665 |
+
6 228 0.0005277840609213602
|
666 |
+
6 237 0.012516022016135112
|
667 |
+
6 238 0.011912840232224985
|
668 |
+
6 245 0.011912840232224985
|
669 |
+
6 258 0.0052024428862248355
|
670 |
+
6 259 0.002337329412651738
|
671 |
+
6 260 0.007162783683932745
|
672 |
+
6 261 0.013043806077056472
|
673 |
+
6 262 0.0016587499057528462
|
674 |
+
6 263 0.007388976852899043
|
675 |
+
6 272 0.014174771921887958
|
676 |
+
6 273 0.012817612908090177
|
677 |
+
6 274 0.0059564201161124925
|
678 |
+
6 280 0.019301817085124028
|
679 |
+
6 281 0.011385056171303627
|
680 |
+
6 282 0.011460453894292393
|
681 |
+
6 283 0.017643067179371186
|
682 |
+
6 294 0.003920681595415819
|
683 |
+
6 295 0.0069365905149664465
|
684 |
+
6 296 0.0037698861494382865
|
685 |
+
6 297 0.00512704516323607
|
686 |
+
6 298 0.006634999623011385
|
687 |
+
6 299 0.002789715750584332
|
688 |
+
6 300 0.0021865339666742064
|
689 |
+
6 301 0.0038452838724270517
|
690 |
+
6 302 0.0005277840609213601
|
691 |
+
6 303 0.0006031817839101259
|
692 |
+
6 305 0.00030159089195506294
|
693 |
+
6 316 0.0016587499057528462
|
694 |
+
6 321 0.0009047726758651889
|
695 |
+
6 330 0.0021111362436854408
|
696 |
+
6 331 0.0015079544597753145
|
697 |
+
6 340 0.00512704516323607
|
698 |
+
6 341 0.004599261102314709
|
699 |
+
6 342 0.0011309658448314859
|
700 |
+
6 344 0.0007539772298876573
|
701 |
+
6 345 0.00022619316896629722
|
702 |
+
7 46 0.008690077640857611
|
703 |
+
7 47 0.009188688653037966
|
704 |
+
7 48 0.0033478167960680964
|
705 |
+
7 49 0.0034902770852624832
|
706 |
+
7 56 0.010898212123370611
|
707 |
+
7 57 0.012322815015314481
|
708 |
+
7 58 0.004202578531234419
|
709 |
+
7 59 0.003276586651470902
|
710 |
+
7 86 0.00648194315834461
|
711 |
+
7 87 0.0016382933257354513
|
712 |
+
7 133 0.00035615072298596765
|
713 |
+
7 134 0.0015670631811382577
|
714 |
+
7 155 0.009829759954412709
|
715 |
+
7 156 0.004131348386637225
|
716 |
+
7 164 0.0009259918797635161
|
717 |
+
7 165 0.0006410713013747418
|
718 |
+
7 166 0.003917657952845645
|
719 |
+
7 167 0.0050573402664007405
|
720 |
+
7 174 0.001638293325735451
|
721 |
+
7 175 0.0014246028919438706
|
722 |
+
7 189 0.0009259918797635161
|
723 |
+
7 194 0.00028492057838877413
|
724 |
+
7 195 0.0006410713013747418
|
725 |
+
7 212 0.00042738086758316123
|
726 |
+
7 213 0.0037039675190540643
|
727 |
+
7 221 0.019517059619631027
|
728 |
+
7 222 0.016739083980340477
|
729 |
+
7 223 0.0143172590640359
|
730 |
+
7 224 0.02443193959683738
|
731 |
+
7 225 0.00683809388133058
|
732 |
+
7 226 0.01111190255716219
|
733 |
+
7 237 0.016739083980340477
|
734 |
+
7 238 0.018092456727687157
|
735 |
+
7 245 0.01367618776266116
|
736 |
+
7 272 0.02236626540351877
|
737 |
+
7 273 0.01923213904124225
|
738 |
+
7 280 0.011040672412564997
|
739 |
+
7 281 0.020086900776408578
|
740 |
+
7 282 0.01859106773986751
|
741 |
+
7 283 0.0165253935465489
|
742 |
+
7 294 0.024004558729254222
|
743 |
+
7 295 0.024075788873851416
|
744 |
+
7 296 0.02443193959683738
|
745 |
+
7 297 0.025357931476600898
|
746 |
+
7 298 0.026283923356364414
|
747 |
+
7 299 0.023933328584657028
|
748 |
+
7 300 0.022722416126504736
|
749 |
+
7 301 0.02514424104280932
|
750 |
+
7 302 0.01738015528171522
|
751 |
+
7 303 0.020941662511574897
|
752 |
+
7 304 0.007835315905691288
|
753 |
+
7 305 0.017380155281715225
|
754 |
+
7 306 0.011396823135550965
|
755 |
+
7 307 0.0036327373744568705
|
756 |
+
7 308 0.0012821426027494836
|
757 |
+
7 309 0.002777975639290548
|
758 |
+
7 310 0.011966664292328516
|
759 |
+
7 311 0.005342260844789515
|
760 |
+
7 312 0.0038464278082484507
|
761 |
+
7 313 0.0014958330365410642
|
762 |
+
7 314 0.0007835315905691288
|
763 |
+
7 315 0.008191466628677256
|
764 |
+
7 316 0.022651185981907542
|
765 |
+
7 317 0.00035615072298596765
|
766 |
+
7 321 0.02101289265617209
|
767 |
+
7 322 0.01225158487071729
|
768 |
+
7 323 0.007764085761094094
|
769 |
+
7 324 0.002564285205498967
|
770 |
+
7 325 0.01994444048721419
|
771 |
+
7 326 0.008690077640857611
|
772 |
+
7 327 0.0024218249163045803
|
773 |
+
7 328 0.0165253935465489
|
774 |
+
7 329 0.006980554170524965
|
775 |
+
7 330 0.028064676971294254
|
776 |
+
7 331 0.021084122800769284
|
777 |
+
7 332 0.0019232139041242254
|
778 |
+
7 333 0.00021369043379158061
|
779 |
+
7 334 0.010969442267967804
|
780 |
+
7 335 0.0024930550609017737
|
781 |
+
7 336 0.008690077640857611
|
782 |
+
7 337 0.003988888097442838
|
783 |
+
7 338 0.00028492057838877413
|
784 |
+
7 340 0.019588289764228224
|
785 |
+
7 341 0.0242182491630458
|
786 |
+
7 342 0.021867654391338417
|
787 |
+
7 343 0.014103568630244322
|
788 |
+
7 344 0.018662297884464708
|
789 |
+
7 345 0.014673409787021868
|
790 |
+
7 346 0.006125792435358643
|
791 |
+
7 347 0.009758529809815513
|
792 |
+
7 348 0.0017095234703326447
|
793 |
+
7 349 0.0031341263622765153
|
794 |
+
7 350 0.004772419688011967
|
795 |
+
7 351 0.0006410713013747418
|
796 |
+
7 352 0.0008547617351663223
|
797 |
+
7 353 0.00042738086758316123
|
798 |
+
7 354 0.001068452168957903
|
799 |
+
7 355 0.0009972220243607095
|
800 |
+
8 317 1.0
|
801 |
+
9 11 0.0002498906728306366
|
802 |
+
9 13 0.0002498906728306366
|
803 |
+
9 14 0.0009995626913225464
|
804 |
+
9 15 0.0022490160554757294
|
805 |
+
9 16 0.0029986880739676387
|
806 |
+
9 17 0.002249016055475729
|
807 |
+
9 18 0.007746610857749733
|
808 |
+
9 19 0.00949584556756419
|
809 |
+
9 20 0.0013743987005685012
|
810 |
+
9 21 0.00437308677453614
|
811 |
+
9 22 0.0009995626913225461
|
812 |
+
9 23 0.00018741800462297744
|
813 |
+
9 48 0.0004997813456612732
|
814 |
+
9 59 0.0002498906728306366
|
815 |
+
9 62 0.0014368713687761604
|
816 |
+
9 63 0.000874617354907228
|
817 |
+
9 64 6.247266820765915e-05
|
818 |
+
9 65 6.247266820765915e-05
|
819 |
+
9 66 0.0024989067283063657
|
820 |
+
9 67 0.000437308677453614
|
821 |
+
9 68 0.0006871993502842506
|
822 |
+
9 69 0.0029986880739676387
|
823 |
+
9 71 0.0004997813456612732
|
824 |
+
9 74 0.015555694383707127
|
825 |
+
9 75 0.017867183107390515
|
826 |
+
9 76 0.017242456425313923
|
827 |
+
9 77 0.00868370088086462
|
828 |
+
9 83 6.247266820765915e-05
|
829 |
+
9 87 0.0004997813456612732
|
830 |
+
9 93 0.0033110514150059348
|
831 |
+
9 127 0.0006247266820765914
|
832 |
+
9 132 0.004810395451989753
|
833 |
+
9 133 0.0006247266820765914
|
834 |
+
9 135 0.0001249453364153183
|
835 |
+
9 136 0.0004997813456612732
|
836 |
+
9 137 0.015555694383707127
|
837 |
+
9 138 0.007246829512088461
|
838 |
+
9 139 0.005997376147935278
|
839 |
+
9 140 0.008683700880864622
|
840 |
+
9 141 0.005997376147935278
|
841 |
+
9 142 0.0025613793965140247
|
842 |
+
9 143 0.015743112388330104
|
843 |
+
9 144 0.009558318235771848
|
844 |
+
9 145 0.0032485787467982754
|
845 |
+
9 146 0.0015618167051914785
|
846 |
+
9 147 0.006122321484350596
|
847 |
+
9 148 0.0025613793965140247
|
848 |
+
9 149 0.0071843568438808006
|
849 |
+
9 150 0.01243206097332417
|
850 |
+
9 151 0.013993877678515648
|
851 |
+
9 152 0.007809083525957393
|
852 |
+
9 157 0.0001249453364153183
|
853 |
+
9 158 0.0023114887236833884
|
854 |
+
9 160 0.0019991253826450927
|
855 |
+
9 161 0.0002498906728306366
|
856 |
+
9 162 0.0005622540138689324
|
857 |
+
9 163 0.0021240707190604106
|
858 |
+
9 164 0.0029362154057599797
|
859 |
+
9 165 0.002561379396514025
|
860 |
+
9 166 0.0007496720184919098
|
861 |
+
9 167 0.0007496720184919097
|
862 |
+
9 168 0.002124070719060411
|
863 |
+
9 169 0.0003123633410382957
|
864 |
+
9 170 0.0006871993502842506
|
865 |
+
9 171 0.002249016055475729
|
866 |
+
9 174 0.0028737427375523207
|
867 |
+
9 175 0.0018741800462297744
|
868 |
+
9 176 0.009433372899356529
|
869 |
+
9 177 0.006247266820765914
|
870 |
+
9 181 0.00018741800462297744
|
871 |
+
9 182 0.0009995626913225464
|
872 |
+
9 183 0.004248141438120822
|
873 |
+
9 185 0.019179109139751356
|
874 |
+
9 186 0.01661772974323733
|
875 |
+
9 187 0.019054163803336036
|
876 |
+
9 194 0.0015618167051914785
|
877 |
+
9 195 0.0001249453364153183
|
878 |
+
9 196 0.0004997813456612732
|
879 |
+
9 197 0.0014993440369838195
|
880 |
+
9 198 0.0003748360092459549
|
881 |
+
9 199 0.0001249453364153183
|
882 |
+
9 202 6.247266820765915e-05
|
883 |
+
9 206 0.013181732991816079
|
884 |
+
9 207 6.247266820765915e-05
|
885 |
+
9 212 0.0018741800462297742
|
886 |
+
9 213 0.0002498906728306366
|
887 |
+
9 218 0.0003123633410382957
|
888 |
+
9 219 0.0006871993502842506
|
889 |
+
9 220 0.014868495033422876
|
890 |
+
9 225 0.0006247266820765914
|
891 |
+
9 227 0.0006871993502842506
|
892 |
+
9 228 0.021802961204473042
|
893 |
+
9 230 0.0002498906728306366
|
894 |
+
9 246 0.020803398513150495
|
895 |
+
9 247 0.017304929093521583
|
896 |
+
9 258 0.0004997813456612732
|
897 |
+
9 259 0.0027487974011370024
|
898 |
+
9 260 0.0017492347098144558
|
899 |
+
9 261 0.002623852064721684
|
900 |
+
9 262 0.01974136315362029
|
901 |
+
9 263 0.01655525707502967
|
902 |
+
9 268 0.007746610857749734
|
903 |
+
9 269 0.02167801586805772
|
904 |
+
9 270 0.019054163803336036
|
905 |
+
9 271 0.011932279627662898
|
906 |
+
9 274 0.0066221028300118695
|
907 |
+
9 275 0.0007496720184919098
|
908 |
+
9 276 0.016742675079652648
|
909 |
+
9 277 0.02205285187730368
|
910 |
+
9 288 0.022427687886549634
|
911 |
+
9 289 0.0003123633410382957
|
912 |
+
9 290 0.00730930218029612
|
913 |
+
9 291 0.005685012806896982
|
914 |
+
9 292 0.0057474854751046415
|
915 |
+
9 293 0.008933591553695257
|
916 |
+
9 356 0.0014993440369838195
|
917 |
+
9 357 0.0014993440369838193
|
918 |
+
9 358 0.00668457549821953
|
919 |
+
9 359 0.004685450115574436
|
920 |
+
9 360 0.0007496720184919098
|
921 |
+
9 361 0.0007496720184919098
|
922 |
+
9 362 0.0024989067283063657
|
923 |
+
9 363 0.0038733054288748667
|
924 |
+
9 364 0.0014368713687761604
|
925 |
+
9 365 0.004498032110951459
|
926 |
+
9 366 0.009933154245017804
|
927 |
+
9 367 0.010245517586056099
|
928 |
+
9 368 0.015993003061160742
|
929 |
+
9 369 0.015993003061160742
|
930 |
+
9 370 0.021115761854188793
|
931 |
+
9 371 0.01693009308427563
|
932 |
+
9 372 0.0009995626913225464
|
933 |
+
9 373 0.0037483600924595483
|
934 |
+
9 374 0.008996064221902918
|
935 |
+
9 375 0.012432060973324168
|
936 |
+
9 376 0.004498032110951458
|
937 |
+
9 377 0.0031861060785906164
|
938 |
+
9 378 0.017554819766352217
|
939 |
+
9 379 0.01749234709814456
|
940 |
+
9 380 0.01649278440682201
|
941 |
+
9 381 0.008308864871618667
|
942 |
+
9 382 0.006434684825388891
|
943 |
+
9 383 0.016055475729368402
|
944 |
+
9 384 0.012557006309739488
|
945 |
+
9 385 0.01018304491784844
|
946 |
+
9 386 0.015180858374461174
|
947 |
+
9 387 0.01155744361841694
|
948 |
+
9 388 0.009058536890110576
|
949 |
+
9 389 0.0028112700693446614
|
950 |
+
9 391 0.00018741800462297744
|
951 |
+
9 392 0.0005622540138689324
|
952 |
+
9 394 0.0018117073780221152
|
953 |
+
9 395 0.0004997813456612732
|
954 |
+
9 399 0.01611794839757606
|
955 |
+
9 402 0.0008746173549072279
|
956 |
+
9 470 0.0007496720184919098
|
957 |
+
9 471 0.0004997813456612732
|
958 |
+
9 478 0.0007496720184919098
|
959 |
+
9 479 0.0004997813456612732
|
960 |
+
9 480 0.0026863247329293434
|
961 |
+
9 481 0.002623852064721684
|
962 |
+
9 483 0.0001249453364153183
|
963 |
+
9 484 0.0001249453364153183
|
964 |
+
9 485 0.0014993440369838195
|
965 |
+
9 486 0.0004997813456612732
|
966 |
+
9 488 0.008996064221902916
|
967 |
+
9 489 0.006059848816142937
|
968 |
+
9 490 0.006497157493596552
|
969 |
+
9 491 0.0001249453364153183
|
970 |
+
9 492 0.0003748360092459549
|
971 |
+
9 493 0.001311926032360842
|
972 |
+
9 494 0.000437308677453614
|
973 |
+
9 495 0.0017492347098144558
|
974 |
+
9 496 0.002623852064721684
|
975 |
+
9 497 0.0027487974011370024
|
976 |
+
9 498 0.0006247266820765914
|
977 |
+
9 509 0.0020615980508527517
|
978 |
+
9 510 0.0003748360092459549
|
979 |
+
9 579 0.0019991253826450927
|
980 |
+
10 74 0.0005264345341054373
|
981 |
+
10 75 0.0021809430698653833
|
982 |
+
10 76 0.000752049334436339
|
983 |
+
10 137 0.000827254267879973
|
984 |
+
10 143 0.0006016394675490712
|
985 |
+
10 150 0.0003008197337745356
|
986 |
+
10 151 0.0006016394675490712
|
987 |
+
10 185 0.004361886139730767
|
988 |
+
10 186 0.0010528690682108748
|
989 |
+
10 187 0.003910656539068963
|
990 |
+
10 206 0.0001504098668872678
|
991 |
+
10 220 0.0003008197337745356
|
992 |
+
10 228 0.0030834022711889904
|
993 |
+
10 246 0.003985861472512596
|
994 |
+
10 247 0.0012784838685417762
|
995 |
+
10 262 0.003910656539068963
|
996 |
+
10 263 0.0011280740016545085
|
997 |
+
10 269 0.0032338121380762574
|
998 |
+
10 270 0.002857787470858088
|
999 |
+
10 271 0.0003008197337745356
|
1000 |
+
10 276 0.000902459201323607
|
1001 |
+
10 277 0.00556516507482891
|
1002 |
+
10 288 0.0027825825374144545
|
1003 |
+
10 356 0.020305332029781156
|
1004 |
+
10 357 0.019703692562232082
|
1005 |
+
10 358 0.02549447243739189
|
1006 |
+
10 359 0.023764758968188315
|
1007 |
+
10 360 0.02587049710461006
|
1008 |
+
10 361 0.022486275099646538
|
1009 |
+
10 362 0.022411070166202904
|
1010 |
+
10 363 0.02278709483342107
|
1011 |
+
10 364 0.026321726705271865
|
1012 |
+
10 365 0.02007971722945025
|
1013 |
+
10 366 0.016093855756937656
|
1014 |
+
10 367 0.022260660299315636
|
1015 |
+
10 368 0.011882379484094157
|
1016 |
+
10 369 0.009400616680454237
|
1017 |
+
10 370 0.00962623148078514
|
1018 |
+
10 371 0.011431149883432353
|
1019 |
+
10 372 0.021583815898322933
|
1020 |
+
10 373 0.024742423102955553
|
1021 |
+
10 374 0.01947807776190118
|
1022 |
+
10 375 0.01789877415958487
|
1023 |
+
10 376 0.023388734300970146
|
1024 |
+
10 377 0.023689554034744677
|
1025 |
+
10 378 0.009400616680454237
|
1026 |
+
10 379 0.005865984808603443
|
1027 |
+
10 380 0.01135594494998872
|
1028 |
+
10 381 0.022486275099646538
|
1029 |
+
10 382 0.015341806422501316
|
1030 |
+
10 383 0.01135594494998872
|
1031 |
+
10 384 0.01158155975031962
|
1032 |
+
10 385 0.019703692562232082
|
1033 |
+
10 386 0.01504098668872678
|
1034 |
+
10 387 0.018124388959915774
|
1035 |
+
10 388 0.010077461081446944
|
1036 |
+
10 389 0.02293750470030834
|
1037 |
+
10 390 0.01383770775362864
|
1038 |
+
10 391 0.017372339625479433
|
1039 |
+
10 392 0.019703692562232086
|
1040 |
+
10 393 0.011882379484094157
|
1041 |
+
10 394 0.024667218169511923
|
1042 |
+
10 395 0.024667218169511916
|
1043 |
+
10 396 0.012333609084755958
|
1044 |
+
10 397 0.011506354816875987
|
1045 |
+
10 398 0.013236068286079568
|
1046 |
+
10 399 0.0070692637437015865
|
1047 |
+
10 400 0.01940287282845755
|
1048 |
+
10 401 0.016093855756937656
|
1049 |
+
10 402 0.020530946830112053
|
1050 |
+
10 403 0.008197337745356097
|
1051 |
+
10 404 0.01759795442581033
|
1052 |
+
10 405 0.021508610964879295
|
1053 |
+
10 406 0.008197337745356095
|
1054 |
+
10 407 0.013988117620515906
|
1055 |
+
10 408 0.008949387079792434
|
1056 |
+
10 409 0.006467624276152515
|
1057 |
+
10 410 0.005264345341054373
|
1058 |
+
10 411 0.005565165074828909
|
1059 |
+
10 412 0.003835451605625329
|
1060 |
+
10 413 0.002105738136421749
|
1061 |
+
10 414 0.0012784838685417764
|
1062 |
+
10 415 0.002556967737083553
|
1063 |
+
10 417 7.52049334436339e-05
|
1064 |
+
10 420 0.0020305332029781154
|
1065 |
+
10 421 0.0006016394675490712
|
1066 |
+
10 422 0.0006016394675490712
|
1067 |
+
10 427 7.52049334436339e-05
|
1068 |
+
10 430 0.004737910806948936
|
1069 |
+
10 431 0.002331352936752651
|
1070 |
+
10 432 0.0001504098668872678
|
1071 |
+
10 440 0.0010528690682108748
|
1072 |
+
10 441 0.0021057381364217496
|
1073 |
+
10 446 7.52049334436339e-05
|
1074 |
+
10 452 0.004512296006618034
|
1075 |
+
10 453 0.003609836805294428
|
1076 |
+
10 454 0.0006016394675490712
|
1077 |
+
10 456 0.0006016394675490712
|
1078 |
+
10 457 0.0004512296006618035
|
1079 |
+
11 356 0.011297349184080336
|
1080 |
+
11 357 0.011888060252528984
|
1081 |
+
11 358 0.004430333013364838
|
1082 |
+
11 359 0.004430333013364838
|
1083 |
+
11 360 0.009229860444510078
|
1084 |
+
11 361 0.011371188067636416
|
1085 |
+
11 362 0.0038396219449161927
|
1086 |
+
11 363 0.002805877575131064
|
1087 |
+
11 364 0.005759432917374288
|
1088 |
+
11 365 0.0014767776711216124
|
1089 |
+
11 366 0.0003691944177804031
|
1090 |
+
11 367 0.0014029387875655322
|
1091 |
+
11 372 0.011371188067636418
|
1092 |
+
11 373 0.004504171896920917
|
1093 |
+
11 374 0.0012552610204533705
|
1094 |
+
11 375 0.0011075832533412094
|
1095 |
+
11 376 0.005316399616037805
|
1096 |
+
11 377 0.005685594033818208
|
1097 |
+
11 381 0.001772133205345935
|
1098 |
+
11 382 0.0003691944177804031
|
1099 |
+
11 385 0.00118142213689729
|
1100 |
+
11 386 0.0005168721848925644
|
1101 |
+
11 387 0.0011075832533412094
|
1102 |
+
11 388 7.383888355608063e-05
|
1103 |
+
11 389 0.0031012331093553864
|
1104 |
+
11 390 0.019345787491693123
|
1105 |
+
11 391 0.010928154766299934
|
1106 |
+
11 392 0.01299564350587019
|
1107 |
+
11 393 0.02082256516281474
|
1108 |
+
11 394 0.0057594329173742895
|
1109 |
+
11 395 0.00945137709517832
|
1110 |
+
11 396 0.017352137635678947
|
1111 |
+
11 397 0.02001033744369785
|
1112 |
+
11 398 0.018238204238351912
|
1113 |
+
11 400 0.01794284870412759
|
1114 |
+
11 401 0.019124270841024884
|
1115 |
+
11 402 0.016170715498781657
|
1116 |
+
11 403 0.022816215018828915
|
1117 |
+
11 404 0.01727829875212287
|
1118 |
+
11 405 0.014546260060547885
|
1119 |
+
11 406 0.0239976371557262
|
1120 |
+
11 407 0.022963892785941076
|
1121 |
+
11 408 0.02695119249796943
|
1122 |
+
11 409 0.023776120505057962
|
1123 |
+
11 410 0.019493465258805284
|
1124 |
+
11 411 0.023849959388614037
|
1125 |
+
11 412 0.026581998080189025
|
1126 |
+
11 413 0.020601048512146496
|
1127 |
+
11 414 0.019493465258805288
|
1128 |
+
11 415 0.02163479288193162
|
1129 |
+
11 416 0.004873366314701322
|
1130 |
+
11 417 0.007900760540500627
|
1131 |
+
11 418 0.0042088163626965965
|
1132 |
+
11 419 0.0016982943217898545
|
1133 |
+
11 420 0.018238204238351912
|
1134 |
+
11 421 0.012035738019641142
|
1135 |
+
11 422 0.012331093553865465
|
1136 |
+
11 423 0.0055379162667060465
|
1137 |
+
11 424 0.004061138595584434
|
1138 |
+
11 425 0.0016982943217898542
|
1139 |
+
11 426 0.0008122277191168869
|
1140 |
+
11 427 0.00834379384183711
|
1141 |
+
11 428 0.0005168721848925643
|
1142 |
+
11 429 0.0015506165546776932
|
1143 |
+
11 430 0.023406926087277558
|
1144 |
+
11 431 0.019124270841024884
|
1145 |
+
11 432 0.016392232149449903
|
1146 |
+
11 433 0.005907110684486449
|
1147 |
+
11 434 0.0019198109724580966
|
1148 |
+
11 435 0.015432326663220851
|
1149 |
+
11 436 0.006940855054271579
|
1150 |
+
11 437 0.0013290999040094513
|
1151 |
+
11 438 0.013364837923650594
|
1152 |
+
11 439 0.00694085505427158
|
1153 |
+
11 440 0.02126559846415122
|
1154 |
+
11 441 0.02355460385438972
|
1155 |
+
11 442 0.002732038691574983
|
1156 |
+
11 444 7.383888355608063e-05
|
1157 |
+
11 446 0.010854315882743852
|
1158 |
+
11 447 0.0031012331093553864
|
1159 |
+
11 448 0.007753082773388465
|
1160 |
+
11 449 0.0018459720889020155
|
1161 |
+
11 450 0.00044303330133648377
|
1162 |
+
11 451 0.00044303330133648377
|
1163 |
+
11 452 0.023776120505057962
|
1164 |
+
11 453 0.02229934283393635
|
1165 |
+
11 454 0.02126559846415122
|
1166 |
+
11 455 0.013290999040094512
|
1167 |
+
11 456 0.018385882005464073
|
1168 |
+
11 457 0.015580004430333012
|
1169 |
+
11 458 0.010189765930739126
|
1170 |
+
11 459 0.012035738019641142
|
1171 |
+
11 460 0.0034704275271357893
|
1172 |
+
11 461 0.004578010780476998
|
1173 |
+
11 462 0.005907110684486449
|
1174 |
+
11 463 0.000590711068448645
|
1175 |
+
11 464 0.000590711068448645
|
1176 |
+
11 465 0.0002953555342243225
|
1177 |
+
11 466 0.0019936498560141768
|
1178 |
+
11 467 0.0013290999040094513
|
1179 |
+
12 444 1.0
|
1180 |
+
13 16 0.0014635288607891346
|
1181 |
+
13 17 0.002575810794988877
|
1182 |
+
13 18 0.005737033134293408
|
1183 |
+
13 19 0.001990399250673223
|
1184 |
+
13 20 0.007785973539398196
|
1185 |
+
13 21 0.008664090855871677
|
1186 |
+
13 22 0.002985598876009834
|
1187 |
+
13 23 0.002224563868399485
|
1188 |
+
13 63 5.854115443156538e-05
|
1189 |
+
13 66 0.0018147757873785268
|
1190 |
+
13 67 0.0006439526987472192
|
1191 |
+
13 68 0.0002927057721578269
|
1192 |
+
13 69 0.0008195761620419153
|
1193 |
+
13 70 0.0007024938531787846
|
1194 |
+
13 71 0.0033953869570307925
|
1195 |
+
13 72 0.0024001873316941806
|
1196 |
+
13 73 0.00023416461772626153
|
1197 |
+
13 74 0.009308043554618896
|
1198 |
+
13 75 0.007551808921671934
|
1199 |
+
13 76 0.01890879288139562
|
1200 |
+
13 77 0.013230300901533777
|
1201 |
+
13 80 0.0013464465519260039
|
1202 |
+
13 81 0.0002927057721578269
|
1203 |
+
13 82 0.0016976934785153963
|
1204 |
+
13 83 0.0040978808102095764
|
1205 |
+
13 93 0.00017562346329469617
|
1206 |
+
13 100 0.00017562346329469617
|
1207 |
+
13 102 0.00011708230886313077
|
1208 |
+
13 103 0.00035124692658939234
|
1209 |
+
13 137 0.00011708230886313077
|
1210 |
+
13 141 0.020021074815595362
|
1211 |
+
13 142 0.016625687858564567
|
1212 |
+
13 143 0.0016391523240838306
|
1213 |
+
13 144 0.0005268703898840885
|
1214 |
+
13 145 0.0002927057721578269
|
1215 |
+
13 146 0.002868516567146704
|
1216 |
+
13 147 0.006673691605198454
|
1217 |
+
13 148 0.008839714319166374
|
1218 |
+
13 149 0.0002927057721578269
|
1219 |
+
13 150 0.0002927057721578269
|
1220 |
+
13 151 0.0012293642430628731
|
1221 |
+
13 152 0.0011122819341997424
|
1222 |
+
13 157 0.0008781173164734808
|
1223 |
+
13 158 0.0004097880810209577
|
1224 |
+
13 160 0.02681184872965695
|
1225 |
+
13 161 0.023592085235920848
|
1226 |
+
13 162 0.03096827069429809
|
1227 |
+
13 163 0.02476290832455216
|
1228 |
+
13 178 0.0002927057721578269
|
1229 |
+
13 179 5.854115443156538e-05
|
1230 |
+
13 180 0.0009366584709050461
|
1231 |
+
13 181 0.00444912773679897
|
1232 |
+
13 182 0.013464465519260038
|
1233 |
+
13 183 0.0167427701674277
|
1234 |
+
13 184 0.00017562346329469617
|
1235 |
+
13 185 5.854115443156538e-05
|
1236 |
+
13 186 0.0002927057721578269
|
1237 |
+
13 187 0.0008195761620419153
|
1238 |
+
13 196 0.017503805175038047
|
1239 |
+
13 197 0.023416461772626154
|
1240 |
+
13 198 0.023416461772626154
|
1241 |
+
13 199 0.02921203606135113
|
1242 |
+
13 201 0.0018733169418100922
|
1243 |
+
13 202 0.006439526987472192
|
1244 |
+
13 206 0.015162158997775435
|
1245 |
+
13 207 0.0006439526987472192
|
1246 |
+
13 218 0.0007610350076103501
|
1247 |
+
13 219 0.00046832923545252306
|
1248 |
+
13 220 0.006673691605198454
|
1249 |
+
13 227 0.00011708230886313077
|
1250 |
+
13 228 0.0009951996253366115
|
1251 |
+
13 246 0.0106544901065449
|
1252 |
+
13 247 0.014576747453459781
|
1253 |
+
13 262 0.00011708230886313077
|
1254 |
+
13 268 0.0033368458025992264
|
1255 |
+
13 269 0.010420325488818641
|
1256 |
+
13 270 0.0035710104203254887
|
1257 |
+
13 271 0.002985598876009834
|
1258 |
+
13 275 0.009834913944502985
|
1259 |
+
13 276 0.02142606252195293
|
1260 |
+
13 277 0.01164968973188151
|
1261 |
+
13 278 0.00035124692658939234
|
1262 |
+
13 288 0.004741833508956796
|
1263 |
+
13 289 0.014693829762322912
|
1264 |
+
13 290 0.02207001522070015
|
1265 |
+
13 291 0.017913593256059006
|
1266 |
+
13 292 0.011005737033134292
|
1267 |
+
13 293 0.010478866643250203
|
1268 |
+
13 358 0.0003512469265893923
|
1269 |
+
13 363 5.854115443156538e-05
|
1270 |
+
13 365 0.00035124692658939234
|
1271 |
+
13 366 0.0007024938531787846
|
1272 |
+
13 367 0.00017562346329469617
|
1273 |
+
13 368 0.00017562346329469617
|
1274 |
+
13 369 0.0009951996253366115
|
1275 |
+
13 370 0.005151621589977754
|
1276 |
+
13 371 0.005385786207704015
|
1277 |
+
13 374 0.0016976934785153963
|
1278 |
+
13 375 0.0017562346329469615
|
1279 |
+
13 376 0.0004097880810209577
|
1280 |
+
13 377 0.0003512469265893923
|
1281 |
+
13 378 0.00046832923545252306
|
1282 |
+
13 379 0.0015220700152206996
|
1283 |
+
13 381 0.0014635288607891346
|
1284 |
+
13 382 0.0009951996253366115
|
1285 |
+
13 383 0.00532724505327245
|
1286 |
+
13 384 0.0037466338836201845
|
1287 |
+
13 386 0.0011708230886313077
|
1288 |
+
13 387 0.00011708230886313077
|
1289 |
+
13 388 0.0012293642430628731
|
1290 |
+
13 389 0.0002927057721578269
|
1291 |
+
13 394 0.00017562346329469617
|
1292 |
+
13 399 0.0033953869570307925
|
1293 |
+
13 468 5.854115443156538e-05
|
1294 |
+
13 469 0.0011122819341997424
|
1295 |
+
13 470 0.0027514342582835734
|
1296 |
+
13 471 0.0012879053974944384
|
1297 |
+
13 474 5.854115443156538e-05
|
1298 |
+
13 475 0.0002927057721578269
|
1299 |
+
13 476 0.0002927057721578269
|
1300 |
+
13 477 0.0018147757873785268
|
1301 |
+
13 478 0.0020489404051047887
|
1302 |
+
13 479 0.0011122819341997424
|
1303 |
+
13 480 0.004332045427935838
|
1304 |
+
13 481 0.006556609296335323
|
1305 |
+
13 483 0.00046832923545252306
|
1306 |
+
13 484 0.012352183585060298
|
1307 |
+
13 485 0.014869453225617611
|
1308 |
+
13 486 0.005912656597588104
|
1309 |
+
13 487 0.004214963119072708
|
1310 |
+
13 488 0.01164968973188151
|
1311 |
+
13 489 0.015806111696522657
|
1312 |
+
13 490 0.008312843929282283
|
1313 |
+
13 491 0.009834913944502985
|
1314 |
+
13 492 0.006146821215314366
|
1315 |
+
13 493 0.015513405924364829
|
1316 |
+
13 494 0.02007961597002693
|
1317 |
+
13 495 0.0024001873316941806
|
1318 |
+
13 496 0.008956796628029503
|
1319 |
+
13 497 0.004741833508956796
|
1320 |
+
13 498 0.003512469265893923
|
1321 |
+
13 499 0.002517269640557311
|
1322 |
+
13 501 0.0005854115443156538
|
1323 |
+
13 502 0.0004097880810209577
|
1324 |
+
13 504 0.001990399250673223
|
1325 |
+
13 505 0.00040978808102095764
|
1326 |
+
13 509 0.010478866643250205
|
1327 |
+
13 510 0.02207001522070015
|
1328 |
+
13 513 0.0012293642430628731
|
1329 |
+
13 579 0.021660227139679192
|
1330 |
+
13 580 0.0002927057721578269
|
1331 |
+
13 581 0.00011708230886313077
|
1332 |
+
13 582 0.0012879053974944388
|
1333 |
+
13 583 0.0018147757873785272
|
1334 |
+
13 584 5.854115443156538e-05
|
1335 |
+
13 585 0.00011708230886313077
|
1336 |
+
13 586 0.0011122819341997422
|
1337 |
+
13 587 0.0008195761620419154
|
1338 |
+
13 589 0.0007610350076103501
|
1339 |
+
13 590 0.003395386957030792
|
1340 |
+
13 591 0.0026928931038520073
|
1341 |
+
13 592 0.009834913944502985
|
1342 |
+
13 593 0.009834913944502985
|
1343 |
+
13 594 0.00011708230886313077
|
1344 |
+
13 595 0.0013464465519260039
|
1345 |
+
13 596 0.0015806111696522653
|
1346 |
+
13 597 0.0002927057721578269
|
1347 |
+
13 598 0.00023416461772626153
|
1348 |
+
13 599 0.0009951996253366115
|
1349 |
+
13 600 0.0002927057721578269
|
1350 |
+
13 601 0.0012293642430628731
|
1351 |
+
13 602 0.00046832923545252306
|
1352 |
+
13 603 0.00011708230886313077
|
1353 |
+
13 604 0.003980798501346446
|
1354 |
+
13 605 0.013523006673691603
|
1355 |
+
13 606 0.011591148577449948
|
1356 |
+
13 607 0.006263903524177495
|
1357 |
+
13 608 0.014693829762322912
|
1358 |
+
13 610 0.0003512469265893923
|
1359 |
+
13 611 0.0012293642430628734
|
1360 |
+
13 612 5.854115443156538e-05
|
1361 |
+
13 613 0.005327245053272449
|
1362 |
+
13 614 0.0019318580962416575
|
1363 |
+
13 615 0.006615150450766888
|
1364 |
+
13 616 0.0026928931038520073
|
1365 |
+
13 617 0.0002927057721578269
|
1366 |
+
13 627 0.005268703898840884
|
1367 |
+
13 630 0.00011708230886313077
|
1368 |
+
13 696 0.00023416461772626153
|
1369 |
+
13 769 0.00076103500761035
|
1370 |
+
13 770 0.004683292354525231
|
1371 |
+
13 771 0.0011122819341997424
|
1372 |
+
13 772 5.854115443156538e-05
|
1373 |
+
13 774 0.00076103500761035
|
1374 |
+
13 775 0.003512469265893923
|
1375 |
+
13 776 0.008020138157124457
|
1376 |
+
14 74 0.0005157677571470676
|
1377 |
+
14 75 0.0005157677571470676
|
1378 |
+
14 76 0.004273504273504274
|
1379 |
+
14 77 0.0008104921898025347
|
1380 |
+
14 141 0.002799882110226938
|
1381 |
+
14 142 0.0003684055408193339
|
1382 |
+
14 160 0.001326259946949602
|
1383 |
+
14 161 0.0005894488653109342
|
1384 |
+
14 162 0.004420866489832007
|
1385 |
+
14 163 0.0050103153551429415
|
1386 |
+
14 196 7.368110816386678e-05
|
1387 |
+
14 197 0.0014736221632773356
|
1388 |
+
14 198 0.0030209254347185383
|
1389 |
+
14 199 0.0009578544061302684
|
1390 |
+
14 206 7.368110816386678e-05
|
1391 |
+
14 246 0.0013262599469496023
|
1392 |
+
14 247 0.0061155319776009425
|
1393 |
+
14 269 7.368110816386678e-05
|
1394 |
+
14 276 0.0034630120837017397
|
1395 |
+
14 277 0.0008841732979664015
|
1396 |
+
14 290 0.001399941055113469
|
1397 |
+
14 291 0.0052313586796345415
|
1398 |
+
14 292 0.0058944886531093425
|
1399 |
+
14 293 0.008989095195991748
|
1400 |
+
14 468 0.0199675803124079
|
1401 |
+
14 469 0.02460949012673151
|
1402 |
+
14 470 0.021220159151193633
|
1403 |
+
14 471 0.02586206896551724
|
1404 |
+
14 472 0.020704391394046565
|
1405 |
+
14 473 0.017978190391983492
|
1406 |
+
14 474 0.020114942528735632
|
1407 |
+
14 475 0.02586206896551724
|
1408 |
+
14 476 0.02291482463896257
|
1409 |
+
14 477 0.02475685234305924
|
1410 |
+
14 478 0.021293840259357502
|
1411 |
+
14 479 0.026009431181844976
|
1412 |
+
14 480 0.019451812555260833
|
1413 |
+
14 481 0.014294134983790155
|
1414 |
+
14 482 0.01422045387562629
|
1415 |
+
14 483 0.02726201002063071
|
1416 |
+
14 484 0.02026230474506337
|
1417 |
+
14 485 0.015694076038903628
|
1418 |
+
14 486 0.02726201002063071
|
1419 |
+
14 487 0.02733569112879458
|
1420 |
+
14 488 0.01215738284703802
|
1421 |
+
14 489 0.009652225169466549
|
1422 |
+
14 490 0.015767757147067494
|
1423 |
+
14 491 0.02460949012673151
|
1424 |
+
14 492 0.020114942528735635
|
1425 |
+
14 493 0.013704686118479222
|
1426 |
+
14 494 0.01333628057765989
|
1427 |
+
14 495 0.022988505747126436
|
1428 |
+
14 496 0.018272914824638966
|
1429 |
+
14 497 0.020851753610374304
|
1430 |
+
14 498 0.016578249336870028
|
1431 |
+
14 499 0.025567344532861774
|
1432 |
+
14 500 0.007515473032714411
|
1433 |
+
14 501 0.019157088122605366
|
1434 |
+
14 502 0.015104627173592693
|
1435 |
+
14 503 0.00987326849395815
|
1436 |
+
14 504 0.021293840259357502
|
1437 |
+
14 505 0.020999115826702035
|
1438 |
+
14 506 0.013262599469496024
|
1439 |
+
14 507 0.013483642793987621
|
1440 |
+
14 508 0.010389036251105217
|
1441 |
+
14 509 0.011715296198054817
|
1442 |
+
14 510 0.010167992926613616
|
1443 |
+
14 511 0.011199528440907752
|
1444 |
+
14 512 0.009357500736811082
|
1445 |
+
14 513 0.020335985853227233
|
1446 |
+
14 514 0.010683760683760684
|
1447 |
+
14 515 0.01215738284703802
|
1448 |
+
14 516 0.016357206012378427
|
1449 |
+
14 517 0.004052460949012673
|
1450 |
+
14 518 0.006704980842911877
|
1451 |
+
14 519 0.004273504273504274
|
1452 |
+
14 520 0.0036103743000294726
|
1453 |
+
14 521 0.004494547597995874
|
1454 |
+
14 522 0.003020925434718538
|
1455 |
+
14 523 0.002136752136752137
|
1456 |
+
14 524 0.0037577365163572064
|
1457 |
+
14 525 0.0005894488653109342
|
1458 |
+
14 526 0.0008104921898025347
|
1459 |
+
14 531 0.0002947244326554671
|
1460 |
+
14 541 0.0016209843796050694
|
1461 |
+
14 542 0.0006631299734748011
|
1462 |
+
14 551 0.0019157088122605363
|
1463 |
+
14 552 0.0009578544061302684
|
1464 |
+
14 563 0.005010315355142941
|
1465 |
+
14 564 0.004715590922487474
|
1466 |
+
14 565 0.0010315355142941351
|
1467 |
+
14 567 0.000663129973474801
|
1468 |
+
14 568 0.00022104332449160037
|
1469 |
+
14 579 0.006115531977600943
|
1470 |
+
15 468 0.01103996467211305
|
1471 |
+
15 469 0.010230367262824759
|
1472 |
+
15 470 0.0023551924633841174
|
1473 |
+
15 471 0.004121586810922205
|
1474 |
+
15 472 0.009199970560094207
|
1475 |
+
15 473 0.011334363730036065
|
1476 |
+
15 474 0.004047987046441452
|
1477 |
+
15 475 0.0027967910502686394
|
1478 |
+
15 476 0.0059615809229410476
|
1479 |
+
15 477 0.0014719952896150733
|
1480 |
+
15 478 0.0003679988224037683
|
1481 |
+
15 479 0.0016191948185765807
|
1482 |
+
15 482 0.011187164201074557
|
1483 |
+
15 483 0.0056671818650180315
|
1484 |
+
15 484 0.0014719952896150733
|
1485 |
+
15 485 0.0003679988224037683
|
1486 |
+
15 486 0.004563185397806727
|
1487 |
+
15 487 0.0073599764480753675
|
1488 |
+
15 491 0.002134393169941856
|
1489 |
+
15 492 0.0003679988224037683
|
1490 |
+
15 495 0.0011775962316920587
|
1491 |
+
15 496 0.0005151983513652757
|
1492 |
+
15 497 0.0005151983513652757
|
1493 |
+
15 498 7.359976448075367e-05
|
1494 |
+
15 499 0.0032383896371531613
|
1495 |
+
15 500 0.019945536174284243
|
1496 |
+
15 501 0.01781114300434239
|
1497 |
+
15 502 0.014204754544785456
|
1498 |
+
15 503 0.02524471921689851
|
1499 |
+
15 504 0.005446382571575771
|
1500 |
+
15 505 0.010524766320747773
|
1501 |
+
15 506 0.01832634135570766
|
1502 |
+
15 507 0.01884153970707294
|
1503 |
+
15 508 0.018473540884669168
|
1504 |
+
15 511 0.01781114300434239
|
1505 |
+
15 512 0.019356738058438214
|
1506 |
+
15 513 0.012143961139324354
|
1507 |
+
15 514 0.020755133583572533
|
1508 |
+
15 515 0.01862074041363068
|
1509 |
+
15 516 0.015014351954073748
|
1510 |
+
15 517 0.024361522043129462
|
1511 |
+
15 518 0.02333112534039891
|
1512 |
+
15 519 0.027011113564436594
|
1513 |
+
15 520 0.02465592110105248
|
1514 |
+
15 521 0.024067122985206444
|
1515 |
+
15 522 0.024508721572090966
|
1516 |
+
15 523 0.023478324869360415
|
1517 |
+
15 524 0.025980716861706044
|
1518 |
+
15 525 0.018031942297784646
|
1519 |
+
15 526 0.020607934054611025
|
1520 |
+
15 527 0.0012511959961728123
|
1521 |
+
15 528 0.0059615809229410476
|
1522 |
+
15 529 0.0025023919923456246
|
1523 |
+
15 530 0.0009567969382497977
|
1524 |
+
15 531 0.018179141826746157
|
1525 |
+
15 532 0.006255979980864061
|
1526 |
+
15 533 0.011187164201074557
|
1527 |
+
15 534 0.005225583278133511
|
1528 |
+
15 535 0.004710384926768235
|
1529 |
+
15 536 0.0016927945830573343
|
1530 |
+
15 537 0.0007359976448075366
|
1531 |
+
15 538 0.013247957606535658
|
1532 |
+
15 540 7.359976448075367e-05
|
1533 |
+
15 541 0.02340472510487966
|
1534 |
+
15 542 0.02031353499668801
|
1535 |
+
15 543 0.010745565614190034
|
1536 |
+
15 544 0.0032383896371531613
|
1537 |
+
15 545 0.0003679988224037683
|
1538 |
+
15 546 0.015529550305439023
|
1539 |
+
15 547 0.005593582100537278
|
1540 |
+
15 548 0.001103996467211305
|
1541 |
+
15 549 0.019356738058438214
|
1542 |
+
15 550 0.009126370795613454
|
1543 |
+
15 551 0.025465518510340766
|
1544 |
+
15 552 0.022374328402149115
|
1545 |
+
15 553 0.0029439905792301465
|
1546 |
+
15 557 0.011923161845882095
|
1547 |
+
15 558 0.0029439905792301465
|
1548 |
+
15 559 0.00942076985353647
|
1549 |
+
15 560 0.003679988224037683
|
1550 |
+
15 561 0.0002943990579230147
|
1551 |
+
15 563 0.019356738058438214
|
1552 |
+
15 564 0.024582321336571723
|
1553 |
+
15 565 0.02244792816662987
|
1554 |
+
15 566 0.015382350776477514
|
1555 |
+
15 567 0.019503937587399718
|
1556 |
+
15 568 0.015161551483035255
|
1557 |
+
15 569 0.0059615809229410476
|
1558 |
+
15 570 0.01023036726282476
|
1559 |
+
15 571 0.0030175903437109006
|
1560 |
+
15 572 0.003459188930595423
|
1561 |
+
15 573 0.005519982336056524
|
1562 |
+
15 574 0.0008095974092882903
|
1563 |
+
15 575 0.0008095974092882903
|
1564 |
+
15 576 7.359976448075367e-05
|
1565 |
+
15 577 0.0013247957606535659
|
1566 |
+
15 578 0.0008095974092882903
|
1567 |
+
16 556 1.0
|
1568 |
+
17 17 0.0004919184820801125
|
1569 |
+
17 18 0.0006324666198172875
|
1570 |
+
17 20 0.005762473647224175
|
1571 |
+
17 21 0.0021082220660576245
|
1572 |
+
17 22 0.0014757554462403375
|
1573 |
+
17 23 0.0024595924104005625
|
1574 |
+
17 70 0.000140548137737175
|
1575 |
+
17 71 0.000983836964160225
|
1576 |
+
17 72 0.0023190442726633872
|
1577 |
+
17 73 0.0004919184820801125
|
1578 |
+
17 76 0.000140548137737175
|
1579 |
+
17 77 0.0006324666198172875
|
1580 |
+
17 80 0.008151791988756148
|
1581 |
+
17 81 0.006676036542515813
|
1582 |
+
17 82 0.016303583977512297
|
1583 |
+
17 83 0.012297962052002813
|
1584 |
+
17 96 0.002178496134926213
|
1585 |
+
17 97 0.0007027406886858749
|
1586 |
+
17 98 7.02740688685875e-05
|
1587 |
+
17 99 0.0011243851018974
|
1588 |
+
17 100 0.009065354884047786
|
1589 |
+
17 101 0.007308503162333099
|
1590 |
+
17 102 0.015038650737877725
|
1591 |
+
17 103 0.017919887561489812
|
1592 |
+
17 141 0.0018271257905832748
|
1593 |
+
17 142 0.003794799718903725
|
1594 |
+
17 148 0.0013352073085031624
|
1595 |
+
17 153 0.001546029515108925
|
1596 |
+
17 154 0.0024595924104005625
|
1597 |
+
17 160 0.0134926212227688
|
1598 |
+
17 161 0.01883345045678145
|
1599 |
+
17 162 0.012438510189739986
|
1600 |
+
17 163 0.005200281096275476
|
1601 |
+
17 178 0.0007730147575544624
|
1602 |
+
17 179 0.0007730147575544624
|
1603 |
+
17 180 0.003021784961349263
|
1604 |
+
17 181 0.00758959943780745
|
1605 |
+
17 182 0.01377371749824315
|
1606 |
+
17 183 0.007238229093464512
|
1607 |
+
17 184 0.0026001405481377374
|
1608 |
+
17 196 0.0123682361208714
|
1609 |
+
17 197 0.007449051300070275
|
1610 |
+
17 198 0.0071679550245959235
|
1611 |
+
17 199 0.0202389318341532
|
1612 |
+
17 200 0.0004919184820801125
|
1613 |
+
17 201 0.027406886858749122
|
1614 |
+
17 202 0.020028109627547436
|
1615 |
+
17 206 0.0019676739283204497
|
1616 |
+
17 207 0.0004919184820801125
|
1617 |
+
17 218 0.000140548137737175
|
1618 |
+
17 220 0.00028109627547435
|
1619 |
+
17 247 7.02740688685875e-05
|
1620 |
+
17 256 0.000140548137737175
|
1621 |
+
17 257 0.0004919184820801125
|
1622 |
+
17 269 0.00035137034434293746
|
1623 |
+
17 275 0.005270555165144062
|
1624 |
+
17 276 0.0010541110330288123
|
1625 |
+
17 277 7.02740688685875e-05
|
1626 |
+
17 278 0.02508784258608574
|
1627 |
+
17 289 0.019465917076598734
|
1628 |
+
17 290 0.0044975404075896
|
1629 |
+
17 291 0.001546029515108925
|
1630 |
+
17 292 0.0002108222066057625
|
1631 |
+
17 293 7.02740688685875e-05
|
1632 |
+
17 484 0.0009135628952916374
|
1633 |
+
17 485 0.0007730147575544624
|
1634 |
+
17 489 0.000421644413211525
|
1635 |
+
17 491 0.0004919184820801125
|
1636 |
+
17 492 7.02740688685875e-05
|
1637 |
+
17 493 0.0013352073085031622
|
1638 |
+
17 494 0.002951510892480675
|
1639 |
+
17 509 0.0002108222066057625
|
1640 |
+
17 510 0.0033731553056922
|
1641 |
+
17 579 0.0027406886858749122
|
1642 |
+
17 580 0.003162333099086437
|
1643 |
+
17 581 0.0023190442726633877
|
1644 |
+
17 582 0.009978917779339425
|
1645 |
+
17 583 0.009065354884047788
|
1646 |
+
17 584 0.0010541110330288125
|
1647 |
+
17 585 0.0016865776528460997
|
1648 |
+
17 586 0.004356992269852425
|
1649 |
+
17 587 0.003513703443429375
|
1650 |
+
17 588 0.0016865776528461
|
1651 |
+
17 589 0.004567814476458187
|
1652 |
+
17 590 0.009065354884047786
|
1653 |
+
17 591 0.006886858749121575
|
1654 |
+
17 592 0.013914265635980324
|
1655 |
+
17 593 0.016795502459592413
|
1656 |
+
17 594 0.021503865073787775
|
1657 |
+
17 595 0.028742094167252288
|
1658 |
+
17 596 0.02178496134926212
|
1659 |
+
17 597 0.02059030217849614
|
1660 |
+
17 598 0.0026001405481377374
|
1661 |
+
17 599 0.004427266338721012
|
1662 |
+
17 600 0.015038650737877721
|
1663 |
+
17 601 0.015390021082220663
|
1664 |
+
17 602 0.008081517919887564
|
1665 |
+
17 603 0.007308503162333099
|
1666 |
+
17 604 0.021995783555867888
|
1667 |
+
17 605 0.02312016865776529
|
1668 |
+
17 606 0.019465917076598734
|
1669 |
+
17 607 0.025720309205903027
|
1670 |
+
17 608 0.019465917076598737
|
1671 |
+
17 609 0.0123682361208714
|
1672 |
+
17 610 0.013070976809557275
|
1673 |
+
17 611 0.02009838369641603
|
1674 |
+
17 612 0.019184820801124384
|
1675 |
+
17 613 0.012719606465214337
|
1676 |
+
17 614 0.0134926212227688
|
1677 |
+
17 615 0.0179901616303584
|
1678 |
+
17 616 0.015319747013352071
|
1679 |
+
17 617 0.005621925509486999
|
1680 |
+
17 618 0.00028109627547435
|
1681 |
+
17 619 0.0022487702037948
|
1682 |
+
17 620 0.0026704146170063252
|
1683 |
+
17 621 0.0002108222066057625
|
1684 |
+
17 622 0.007589599437807451
|
1685 |
+
17 623 0.0018271257905832748
|
1686 |
+
17 624 0.00084328882642305
|
1687 |
+
17 625 0.0007027406886858749
|
1688 |
+
17 626 0.0009135628952916376
|
1689 |
+
17 627 0.01981728742094167
|
1690 |
+
17 628 0.0002108222066057625
|
1691 |
+
17 629 0.0004919184820801124
|
1692 |
+
17 630 0.003021784961349262
|
1693 |
+
17 631 0.0006324666198172875
|
1694 |
+
17 632 7.02740688685875e-05
|
1695 |
+
17 633 7.02740688685875e-05
|
1696 |
+
17 696 0.021152494729444835
|
1697 |
+
17 769 0.0179901616303584
|
1698 |
+
17 770 0.020941672522839076
|
1699 |
+
17 771 0.011243851018974
|
1700 |
+
17 772 0.005411103302881238
|
1701 |
+
17 773 0.0028109627547434997
|
1702 |
+
17 774 0.0036542515811665496
|
1703 |
+
17 775 0.00871398453970485
|
1704 |
+
17 776 0.012157413914265636
|
1705 |
+
17 777 0.00035137034434293746
|
1706 |
+
18 82 0.0006012777151446825
|
1707 |
+
18 83 0.00015031942878617063
|
1708 |
+
18 103 0.0004509582863585119
|
1709 |
+
18 142 7.515971439308531e-05
|
1710 |
+
18 160 0.001428034573468621
|
1711 |
+
18 161 0.0071401728673431055
|
1712 |
+
18 162 0.0018789928598271326
|
1713 |
+
18 182 0.0005261180007515971
|
1714 |
+
18 196 0.0018789928598271326
|
1715 |
+
18 197 0.0009019165727170237
|
1716 |
+
18 198 0.0008267568583239384
|
1717 |
+
18 199 0.006238256294626081
|
1718 |
+
18 201 0.003006388575723412
|
1719 |
+
18 202 0.0018038331454340473
|
1720 |
+
18 275 0.00015031942878617063
|
1721 |
+
18 278 0.002179631717399474
|
1722 |
+
18 289 0.003607666290868095
|
1723 |
+
18 580 0.020142803457346866
|
1724 |
+
18 581 0.01698609545283728
|
1725 |
+
18 582 0.03013904547162721
|
1726 |
+
18 583 0.02705749718151071
|
1727 |
+
18 584 0.017737692596768138
|
1728 |
+
18 585 0.016910935738444193
|
1729 |
+
18 586 0.021270199173243146
|
1730 |
+
18 587 0.018489289740698987
|
1731 |
+
18 588 0.01924088688462984
|
1732 |
+
18 589 0.016910935738444197
|
1733 |
+
18 590 0.016234498308906428
|
1734 |
+
18 591 0.017061255167230362
|
1735 |
+
18 592 0.015182262307403233
|
1736 |
+
18 593 0.010672679443818113
|
1737 |
+
18 594 0.004509582863585118
|
1738 |
+
18 595 0.005712138293874483
|
1739 |
+
18 596 0.01570838030815483
|
1740 |
+
18 597 0.011424276587748966
|
1741 |
+
18 598 0.018714768883878245
|
1742 |
+
18 599 0.01969184517098835
|
1743 |
+
18 600 0.020593761743705377
|
1744 |
+
18 601 0.023900789177001128
|
1745 |
+
18 602 0.027583615182262308
|
1746 |
+
18 603 0.0266065388951522
|
1747 |
+
18 604 0.007591131153701616
|
1748 |
+
18 605 0.007666290868094702
|
1749 |
+
18 606 0.012701991732431419
|
1750 |
+
18 607 0.020668921458098462
|
1751 |
+
18 608 0.016309658023299513
|
1752 |
+
18 609 0.015031942878617064
|
1753 |
+
18 610 0.017061255167230362
|
1754 |
+
18 611 0.009845922585494176
|
1755 |
+
18 612 0.009845922585494176
|
1756 |
+
18 613 0.02435174746335964
|
1757 |
+
18 614 0.02450206689214581
|
1758 |
+
18 615 0.02720781661029688
|
1759 |
+
18 616 0.016459977452085682
|
1760 |
+
18 617 0.021119879744456973
|
1761 |
+
18 618 0.012251033446072902
|
1762 |
+
18 619 0.020744081172491546
|
1763 |
+
18 620 0.02247275460353251
|
1764 |
+
18 621 0.008643367155204812
|
1765 |
+
18 622 0.025779782036828264
|
1766 |
+
18 623 0.02006764374295378
|
1767 |
+
18 624 0.011724915445321307
|
1768 |
+
18 625 0.01089815858699737
|
1769 |
+
18 626 0.011800075159714395
|
1770 |
+
18 627 0.006914693724163849
|
1771 |
+
18 628 0.011499436302142051
|
1772 |
+
18 629 0.011273957158962795
|
1773 |
+
18 630 0.0214956783164224
|
1774 |
+
18 631 0.012251033446072907
|
1775 |
+
18 632 0.013077790304396842
|
1776 |
+
18 633 0.014656144306651634
|
1777 |
+
18 634 0.00496054114994363
|
1778 |
+
18 635 0.006914693724163849
|
1779 |
+
18 636 0.004735062006764375
|
1780 |
+
18 637 0.004359263434798948
|
1781 |
+
18 638 0.004208944006012777
|
1782 |
+
18 639 0.003757985719654265
|
1783 |
+
18 640 0.0021796317173994736
|
1784 |
+
18 641 0.003908305148440436
|
1785 |
+
18 642 0.0010522360015031943
|
1786 |
+
18 643 0.0006012777151446825
|
1787 |
+
18 648 0.0005261180007515971
|
1788 |
+
18 658 0.0020293122886133035
|
1789 |
+
18 659 0.0011273957158962795
|
1790 |
+
18 668 0.000751597143930853
|
1791 |
+
18 669 0.0011273957158962795
|
1792 |
+
18 680 0.002931228861330327
|
1793 |
+
18 681 0.0057872980082675695
|
1794 |
+
18 682 0.001428034573468621
|
1795 |
+
18 684 0.0006012777151446825
|
1796 |
+
18 685 0.0006012777151446825
|
1797 |
+
18 696 0.003757985719654265
|
1798 |
+
18 769 0.0009019165727170238
|
1799 |
+
18 770 0.002931228861330327
|
1800 |
+
18 771 0.00015031942878617063
|
1801 |
+
18 775 0.00022547914317925594
|
1802 |
+
18 776 0.001202555430289365
|
1803 |
+
19 580 0.012027744982290436
|
1804 |
+
19 581 0.009961629279811098
|
1805 |
+
19 582 0.0059031877213695395
|
1806 |
+
19 583 0.004574970484061393
|
1807 |
+
19 584 0.009223730814639905
|
1808 |
+
19 585 0.011289846517119244
|
1809 |
+
19 586 0.0038370720188902006
|
1810 |
+
19 587 0.0028040141676505316
|
1811 |
+
19 588 0.0057556080283353
|
1812 |
+
19 589 0.0014757969303423849
|
1813 |
+
19 590 0.0003689492325855962
|
1814 |
+
19 591 0.0008116883116883117
|
1815 |
+
19 598 0.011806375442739079
|
1816 |
+
19 599 0.004501180637544274
|
1817 |
+
19 600 0.0012544273907910272
|
1818 |
+
19 601 0.0012544273907910272
|
1819 |
+
19 602 0.005165289256198347
|
1820 |
+
19 603 0.005165289256198347
|
1821 |
+
19 609 0.0005165289256198347
|
1822 |
+
19 610 0.0008116883116883117
|
1823 |
+
19 613 0.0016971664698937428
|
1824 |
+
19 614 0.0010330578512396697
|
1825 |
+
19 615 0.0011068476977567888
|
1826 |
+
19 616 0.0003689492325855962
|
1827 |
+
19 617 0.0028040141676505316
|
1828 |
+
19 618 0.021325265643447465
|
1829 |
+
19 619 0.012322904368358915
|
1830 |
+
19 620 0.016528925619834708
|
1831 |
+
19 621 0.022432113341204252
|
1832 |
+
19 622 0.0042060212514757975
|
1833 |
+
19 623 0.009518890200708384
|
1834 |
+
19 624 0.021989374262101534
|
1835 |
+
19 625 0.015053128689492327
|
1836 |
+
19 626 0.01977567886658796
|
1837 |
+
19 628 0.017635773317591502
|
1838 |
+
19 629 0.019406729634002362
|
1839 |
+
19 630 0.010256788665879575
|
1840 |
+
19 631 0.01977567886658796
|
1841 |
+
19 632 0.018299881936245575
|
1842 |
+
19 633 0.014684179456906728
|
1843 |
+
19 634 0.027007083825265645
|
1844 |
+
19 635 0.02317001180637544
|
1845 |
+
19 636 0.026638134592680048
|
1846 |
+
19 637 0.02368654073199528
|
1847 |
+
19 638 0.02435064935064935
|
1848 |
+
19 639 0.02457201889020071
|
1849 |
+
19 640 0.021915584415584416
|
1850 |
+
19 641 0.0256788665879575
|
1851 |
+
19 642 0.018816410861865408
|
1852 |
+
19 643 0.01682408500590319
|
1853 |
+
19 644 0.002656434474616293
|
1854 |
+
19 645 0.009445100354191263
|
1855 |
+
19 646 0.0031729634002361272
|
1856 |
+
19 647 0.0025826446280991736
|
1857 |
+
19 648 0.016602715466351833
|
1858 |
+
19 649 0.006419716646989375
|
1859 |
+
19 650 0.010478158205430934
|
1860 |
+
19 651 0.004870129870129871
|
1861 |
+
19 652 0.003246753246753247
|
1862 |
+
19 653 0.0014757969303423849
|
1863 |
+
19 654 0.0013282172373081465
|
1864 |
+
19 655 0.010847107438016527
|
1865 |
+
19 656 0.0005903187721369539
|
1866 |
+
19 657 0.0005165289256198347
|
1867 |
+
19 658 0.02169421487603306
|
1868 |
+
19 659 0.019406729634002362
|
1869 |
+
19 660 0.007747933884297522
|
1870 |
+
19 661 0.001844746162927981
|
1871 |
+
19 662 0.00014757969303423848
|
1872 |
+
19 663 0.012101534828807558
|
1873 |
+
19 664 0.0038370720188902014
|
1874 |
+
19 665 0.0007378984651711924
|
1875 |
+
19 666 0.0157172373081464
|
1876 |
+
19 667 0.006050767414403779
|
1877 |
+
19 668 0.02221074380165289
|
1878 |
+
19 669 0.021103896103896107
|
1879 |
+
19 670 0.0028040141676505316
|
1880 |
+
19 674 0.006419716646989373
|
1881 |
+
19 675 0.0014020070838252656
|
1882 |
+
19 676 0.010109208972845335
|
1883 |
+
19 677 0.0030253837072018895
|
1884 |
+
19 678 7.378984651711924e-05
|
1885 |
+
19 679 0.00014757969303423848
|
1886 |
+
19 680 0.02206316410861866
|
1887 |
+
19 681 0.022432113341204252
|
1888 |
+
19 682 0.0256788665879575
|
1889 |
+
19 683 0.01977567886658796
|
1890 |
+
19 684 0.022358323494687134
|
1891 |
+
19 685 0.02088252656434475
|
1892 |
+
19 686 0.012470484061393153
|
1893 |
+
19 687 0.01586481700118064
|
1894 |
+
19 688 0.004427390791027155
|
1895 |
+
19 689 0.006198347107438017
|
1896 |
+
19 690 0.00974025974025974
|
1897 |
+
19 691 0.000885478158205431
|
1898 |
+
19 692 0.0003689492325855962
|
1899 |
+
19 693 0.00014757969303423848
|
1900 |
+
19 694 0.002877804014167651
|
1901 |
+
19 695 0.0016233766233766235
|
1902 |
+
20 673 1.0
|
data/body_models/SMPLX_to_J14.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5df844ddea85b0a400a2e8dbe63d09d19f2b1b7ec0e0e952daeae08f83d82d61
|
3 |
+
size 4692193
|
data/body_models/SMPL_NEUTRAL.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4924f235e63f7c5d5b690acedf736419c2edb846a2d69fc0956169615fa75688
|
3 |
+
size 247186228
|
data/body_models/all_means.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:010c2178eff5fd58d07bab3717002e959fe62541aaaef778b09414ec0237690d
|
3 |
+
size 4758
|
data/body_models/downsample_mat_smplx.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b67d12e8e9af767d9856fea8cb3366bfa8025fdf17cd4e25fc8b10f9a45eca9e
|
3 |
+
size 18310685
|
data/body_models/joints_regressor_cmr.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a408b885040d714c94b41f64b2ec329d20dce673ae330d04a07a4b02dae7a13d
|
3 |
+
size 661568
|
data/body_models/smpl/SMPL_FEMALE.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a583c1b98e4afc19042641f1bae5cd8a1f712a6724886291a7627ec07acd408d
|
3 |
+
size 39056454
|
data/body_models/smpl/SMPL_MALE.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0e8c0bbbbc635dcb166ed29c303fb4bef16ea5f623e5a89263495a9e403575bd
|
3 |
+
size 39056404
|
data/body_models/smpl/SMPL_NEUTRAL.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4924f235e63f7c5d5b690acedf736419c2edb846a2d69fc0956169615fa75688
|
3 |
+
size 247186228
|
data/body_models/smpl/index.html
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
|
2 |
+
<html>
|
3 |
+
<head>
|
4 |
+
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
5 |
+
<title>Directory listing for /body_models/smpl/</title>
|
6 |
+
</head>
|
7 |
+
<body>
|
8 |
+
<h1>Directory listing for /body_models/smpl/</h1>
|
9 |
+
<hr>
|
10 |
+
<ul>
|
11 |
+
<li><a href="SMPL_FEMALE.pkl">SMPL_FEMALE.pkl</a></li>
|
12 |
+
<li><a href="SMPL_MALE.pkl">SMPL_MALE.pkl</a></li>
|
13 |
+
<li><a href="SMPL_NEUTRAL.pkl">SMPL_NEUTRAL.pkl</a></li>
|
14 |
+
</ul>
|
15 |
+
<hr>
|
16 |
+
</body>
|
17 |
+
</html>
|
data/body_models/smpl_mean_params.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6fd6dd687800da946d0a0492383f973b92ec20f166a0b829775882868c35fcdd
|
3 |
+
size 1310
|
data/body_models/smplx/MANO_SMPLX_vertex_ids.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5abe70b6574de25470475091e8008314a5b90127eb48c3e63bfa0adf8c04dcf
|
3 |
+
size 13535
|
data/body_models/smplx/SMPL-X__FLAME_vertex_ids.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7e70cdc3659aae699b9732e8dd4af49106310c69b90dc83d9f73e96dbf871e49
|
3 |
+
size 40312
|
data/body_models/smplx/SMPLX_FEMALE.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b2a3686c9d6d218ff6822fba411c607a3c8125a70af340f384ce68bebecabe0e
|
3 |
+
size 108794146
|
data/body_models/smplx/SMPLX_FEMALE.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f3ac7af258fd217ab480b839c011545e5826cfa333ab34b3c98244ee3039bddd
|
3 |
+
size 544434140
|
data/body_models/smplx/SMPLX_MALE.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab318e3f37d2bfaae26abf4e6fab445c2a610e1d63714794d60379cc263bc2a5
|
3 |
+
size 108753445
|
data/body_models/smplx/SMPLX_MALE.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:af7ebc82e44cf098598685474c0592049ddfaca8e850feb0c2b88343f9aacee3
|
3 |
+
size 544477159
|
data/body_models/smplx/SMPLX_NEUTRAL.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:376021446ddc86e99acacd795182bbef903e61d33b76b9d8b359c2b0865bd992
|
3 |
+
size 108752058
|
data/body_models/smplx/SMPLX_NEUTRAL.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:381c808965deb4f5e845f8c3eddb0cd69930cc72e5774ce4f34c4ce3cf058361
|
3 |
+
size 544173380
|
data/body_models/smplx/SMPLX_to_J14.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be01f37aa99e794ace8f52abe7b31df302fe54c68e75062ea0431a6c2f5e084f
|
3 |
+
size 1173328
|
data/body_models/smplx/SMPLX_to_J14.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5df844ddea85b0a400a2e8dbe63d09d19f2b1b7ec0e0e952daeae08f83d82d61
|
3 |
+
size 4692193
|
data/body_models/smplx/smplx_kid_template.npy
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bdce4f5886b9ddcb6da3ee0f70ae636b1aa1292f2b379c4c3149fce8abc0a604
|
3 |
+
size 251528
|
data/body_models/smplx2smpl.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c1d912d121ad98132e4492d8e7a0f1a8cf4412811e14a7ef8cb337bb48eef99e
|
3 |
+
size 578019251
|
datasets/AGORA_MM.py
ADDED
@@ -0,0 +1,974 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import os.path as osp
|
3 |
+
from glob import glob
|
4 |
+
import numpy as np
|
5 |
+
from config.config import cfg
|
6 |
+
import copy
|
7 |
+
import json
|
8 |
+
import pickle
|
9 |
+
import cv2
|
10 |
+
import torch
|
11 |
+
from pycocotools.coco import COCO
|
12 |
+
from util.human_models import smpl_x
|
13 |
+
from util.preprocessing import load_img, sanitize_bbox, process_bbox, load_ply, load_obj
|
14 |
+
from util.transforms import rigid_align, rigid_align_batch
|
15 |
+
import tqdm
|
16 |
+
import random
|
17 |
+
from util.formatting import DefaultFormatBundle
|
18 |
+
from detrsmpl.data.datasets.pipelines.transforms import Normalize
|
19 |
+
import time
|
20 |
+
from util.preprocessing import (
|
21 |
+
load_img, process_bbox, augmentation_instance_sample
|
22 |
+
,process_human_model_output_batch_simplify,process_db_coord_batch_no_valid)
|
23 |
+
# from util.human_models import smpl_x
|
24 |
+
from .humandata import HumanDataset
|
25 |
+
import csv
|
26 |
+
KPS2D_KEYS = [
|
27 |
+
'keypoints2d_ori', 'keypoints2d_smplx', 'keypoints2d_smpl',
|
28 |
+
'keypoints2d_original','keypoints2d_gta'
|
29 |
+
]
|
30 |
+
KPS3D_KEYS = [
|
31 |
+
'keypoints3d_cam', 'keypoints3d', 'keypoints3d_smplx', 'keypoints3d_smpl',
|
32 |
+
'keypoints3d_original', 'keypoints3d_gta'
|
33 |
+
]
|
34 |
+
class AGORA_MM(HumanDataset):
|
35 |
+
def __init__(self, transform, data_split):
|
36 |
+
super(AGORA_MM, self).__init__(transform, data_split)
|
37 |
+
self.img_shape = [2160,3840]
|
38 |
+
pre_prc_file_train = 'spec_train_smpl.npz'
|
39 |
+
pre_prc_file_test = 'spec_test_smpl.npz'
|
40 |
+
self.save_idx = 0
|
41 |
+
if self.data_split == 'train':
|
42 |
+
filename = getattr(cfg, 'filename', pre_prc_file_train)
|
43 |
+
else:
|
44 |
+
self.test_set = 'val'
|
45 |
+
|
46 |
+
self.img_dir = './data/datasets/agora'
|
47 |
+
|
48 |
+
|
49 |
+
if data_split == 'train':
|
50 |
+
if self.img_shape == [2160,3840]:
|
51 |
+
self.annot_path = 'data/preprocessed_npz/multihuman_data/agora_train_3840_w_occ_multi_2010.npz'
|
52 |
+
self.annot_path_cache = 'data/preprocessed_npz/cache/agora_train_3840_w_occ_cache_2010.npz'
|
53 |
+
elif self.img_shape == [720,1280]:
|
54 |
+
self.annot_path = 'data/preprocessed_npz/multihuman_data/agora_train_1280_multi_1010.npz'
|
55 |
+
self.annot_path_cache = 'data/preprocessed_npz/cache/agora_train_cache_1280_1010.npz'
|
56 |
+
|
57 |
+
elif data_split == 'test':
|
58 |
+
if self.img_shape == [2160,3840]:
|
59 |
+
self.annot_path = 'data/preprocessed_npz/multihuman_data/agora_validation_multi_3840_1010.npz'
|
60 |
+
self.annot_path_cache = 'data/preprocessed_npz/cache/agora_validation_cache_3840_1010_occ_cache_balance.npz'
|
61 |
+
elif self.img_shape == [720,1280]:
|
62 |
+
self.annot_path = 'data/preprocessed_npz/multihuman_data/agora_validation_1280_1010_occ.npz'
|
63 |
+
self.annot_path_cache = 'data/preprocessed_npz/cache/agora_validation_cache_1280_1010_occ.npz'
|
64 |
+
|
65 |
+
self.use_cache = getattr(cfg, 'use_cache', False)
|
66 |
+
self.cam_param = {}
|
67 |
+
|
68 |
+
# load data or cache
|
69 |
+
if self.use_cache and osp.isfile(self.annot_path_cache):
|
70 |
+
print(f'[{self.__class__.__name__}] loading cache from {self.annot_path_cache}')
|
71 |
+
self.datalist = self.load_cache(self.annot_path_cache)
|
72 |
+
else:
|
73 |
+
if self.use_cache:
|
74 |
+
print(f'[{self.__class__.__name__}] Cache not found, generating cache...')
|
75 |
+
self.datalist = self.load_data(
|
76 |
+
train_sample_interval=getattr(cfg, f'{self.__class__.__name__}_train_sample_interval', 1))
|
77 |
+
if self.use_cache:
|
78 |
+
self.save_cache(self.annot_path_cache, self.datalist)
|
79 |
+
|
80 |
+
|
81 |
+
def load_data(self, train_sample_interval=1):
|
82 |
+
|
83 |
+
content = np.load(self.annot_path, allow_pickle=True)
|
84 |
+
|
85 |
+
try:
|
86 |
+
frame_range = content['frame_range']
|
87 |
+
except KeyError:
|
88 |
+
frame_range = \
|
89 |
+
np.array([[i, i + 1] for i in range(self.num_data)])
|
90 |
+
|
91 |
+
num_examples = len(frame_range)
|
92 |
+
|
93 |
+
if 'meta' in content:
|
94 |
+
meta = content['meta'].item()
|
95 |
+
print('meta keys:', meta.keys())
|
96 |
+
else:
|
97 |
+
meta = None
|
98 |
+
print(
|
99 |
+
'No meta info provided! Please give height and width manually')
|
100 |
+
|
101 |
+
print(
|
102 |
+
f'Start loading humandata {self.annot_path} into memory...\nDataset includes: {content.files}'
|
103 |
+
)
|
104 |
+
tic = time.time()
|
105 |
+
image_path = content['image_path']
|
106 |
+
|
107 |
+
if meta is not None and 'height' in meta:
|
108 |
+
height = np.array(meta['height'])
|
109 |
+
width = np.array(meta['width'])
|
110 |
+
image_shape = np.stack([height, width], axis=-1)
|
111 |
+
else:
|
112 |
+
image_shape = None
|
113 |
+
|
114 |
+
if meta is not None and 'gender' in meta and len(meta['gender']) != 0:
|
115 |
+
gender = meta['gender']
|
116 |
+
else:
|
117 |
+
gender = None
|
118 |
+
|
119 |
+
if meta is not None and 'is_kid' in meta and len(meta['is_kid']) != 0:
|
120 |
+
is_kid = meta['is_kid']
|
121 |
+
else:
|
122 |
+
is_kid = None
|
123 |
+
|
124 |
+
bbox_xywh = content['bbox_xywh']
|
125 |
+
|
126 |
+
if 'smplx' in content:
|
127 |
+
smplx = content['smplx'].item()
|
128 |
+
as_smplx = 'smplx'
|
129 |
+
elif 'smpl' in content:
|
130 |
+
smplx = content['smpl'].item()
|
131 |
+
as_smplx = 'smpl'
|
132 |
+
elif 'smplh' in content:
|
133 |
+
smplx = content['smplh'].item()
|
134 |
+
as_smplx = 'smplh'
|
135 |
+
# TODO: temp solution, should be more general. But SHAPY is very special
|
136 |
+
elif self.__class__.__name__ == 'SHAPY':
|
137 |
+
smplx = {}
|
138 |
+
else:
|
139 |
+
raise KeyError('No SMPL for SMPLX available, please check keys:\n'
|
140 |
+
f'{content.files}')
|
141 |
+
|
142 |
+
print('Smplx param', smplx.keys())
|
143 |
+
|
144 |
+
if 'lhand_bbox_xywh' in content and 'rhand_bbox_xywh' in content:
|
145 |
+
lhand_bbox_xywh = content['lhand_bbox_xywh']
|
146 |
+
rhand_bbox_xywh = content['rhand_bbox_xywh']
|
147 |
+
else:
|
148 |
+
lhand_bbox_xywh = np.zeros_like(bbox_xywh)
|
149 |
+
rhand_bbox_xywh = np.zeros_like(bbox_xywh)
|
150 |
+
|
151 |
+
if 'face_bbox_xywh' in content:
|
152 |
+
face_bbox_xywh = content['face_bbox_xywh']
|
153 |
+
else:
|
154 |
+
face_bbox_xywh = np.zeros_like(bbox_xywh)
|
155 |
+
|
156 |
+
decompressed = False
|
157 |
+
if content['__keypoints_compressed__']:
|
158 |
+
decompressed_kps = self.decompress_keypoints(content)
|
159 |
+
decompressed = True
|
160 |
+
|
161 |
+
keypoints3d = None
|
162 |
+
valid_kps3d = False
|
163 |
+
keypoints3d_mask = None
|
164 |
+
valid_kps3d_mask = False
|
165 |
+
|
166 |
+
|
167 |
+
# processing keypoints
|
168 |
+
for kps3d_key in KPS3D_KEYS:
|
169 |
+
if kps3d_key in content:
|
170 |
+
keypoints3d = decompressed_kps[kps3d_key][:, self.SMPLX_137_MAPPING, :] if decompressed \
|
171 |
+
else content[kps3d_key][:, self.SMPLX_137_MAPPING, :]
|
172 |
+
valid_kps3d = True
|
173 |
+
if keypoints3d.shape[-1] == 4:
|
174 |
+
valid_kps3d_mask = True
|
175 |
+
break
|
176 |
+
if self.keypoints2d is not None:
|
177 |
+
keypoints2d = decompressed_kps[self.keypoints2d][:, self.SMPLX_137_MAPPING, :] if decompressed \
|
178 |
+
else content[self.keypoints2d][:, self.SMPLX_137_MAPPING, :]
|
179 |
+
|
180 |
+
|
181 |
+
else:
|
182 |
+
for kps2d_key in KPS2D_KEYS:
|
183 |
+
if kps2d_key in content:
|
184 |
+
keypoints2d = decompressed_kps[kps2d_key][:, self.SMPLX_137_MAPPING, :] if decompressed \
|
185 |
+
else content[kps2d_key][:, self.SMPLX_137_MAPPING, :]
|
186 |
+
|
187 |
+
if keypoints2d.shape[-1] == 3:
|
188 |
+
valid_kps3d_mask = True
|
189 |
+
occlusion = content['meta'][()]['occ'] if 'occ' in content['meta'][()] and len(content['meta'][()]['occ'])>0 else None
|
190 |
+
|
191 |
+
print('Done. Time: {:.2f}s'.format(time.time() - tic))
|
192 |
+
|
193 |
+
datalist = []
|
194 |
+
# num_examples
|
195 |
+
|
196 |
+
# processing each image, filter according to bbox valid
|
197 |
+
for i in tqdm.tqdm(range(int(num_examples))):
|
198 |
+
if self.data_split == 'train' and i % train_sample_interval != 0:
|
199 |
+
continue
|
200 |
+
frame_start, frame_end = frame_range[i]
|
201 |
+
img_path = osp.join(self.img_dir, image_path[frame_start])
|
202 |
+
# im_shape = cv2.imread(img_path).shape[:2]
|
203 |
+
img_shape = image_shape[
|
204 |
+
frame_start] if image_shape is not None else self.img_shape
|
205 |
+
|
206 |
+
|
207 |
+
bbox_list = bbox_xywh[frame_start:frame_end, :4]
|
208 |
+
|
209 |
+
valid_idx = []
|
210 |
+
body_bbox_list = []
|
211 |
+
|
212 |
+
if hasattr(cfg, 'bbox_ratio'):
|
213 |
+
bbox_ratio = cfg.bbox_ratio * 0.833 # preprocess body bbox is giving 1.2 box padding
|
214 |
+
else:
|
215 |
+
bbox_ratio = 1.25
|
216 |
+
|
217 |
+
for bbox_i, bbox in enumerate(bbox_list):
|
218 |
+
|
219 |
+
bbox = process_bbox(bbox,
|
220 |
+
img_width=img_shape[1],
|
221 |
+
img_height=img_shape[0],
|
222 |
+
ratio=bbox_ratio)
|
223 |
+
if bbox is None:
|
224 |
+
continue
|
225 |
+
else:
|
226 |
+
valid_idx.append(frame_start + bbox_i)
|
227 |
+
bbox[2:] += bbox[:2]
|
228 |
+
body_bbox_list.append(bbox)
|
229 |
+
if len(valid_idx) == 0:
|
230 |
+
continue
|
231 |
+
valid_num = len(valid_idx)
|
232 |
+
# hand/face bbox
|
233 |
+
lhand_bbox_list = []
|
234 |
+
rhand_bbox_list = []
|
235 |
+
face_bbox_list = []
|
236 |
+
|
237 |
+
for bbox_i in valid_idx:
|
238 |
+
lhand_bbox = lhand_bbox_xywh[bbox_i]
|
239 |
+
|
240 |
+
rhand_bbox = rhand_bbox_xywh[bbox_i]
|
241 |
+
face_bbox = face_bbox_xywh[bbox_i]
|
242 |
+
if lhand_bbox[-1] > 0: # conf > 0
|
243 |
+
lhand_bbox = lhand_bbox[:4]
|
244 |
+
if hasattr(cfg, 'bbox_ratio'):
|
245 |
+
lhand_bbox = process_bbox(lhand_bbox,
|
246 |
+
img_width=img_shape[1],
|
247 |
+
img_height=img_shape[0],
|
248 |
+
ratio=bbox_ratio)
|
249 |
+
if lhand_bbox is not None:
|
250 |
+
lhand_bbox[2:] += lhand_bbox[:2] # xywh -> xyxy
|
251 |
+
else:
|
252 |
+
lhand_bbox = None
|
253 |
+
if rhand_bbox[-1] > 0:
|
254 |
+
rhand_bbox = rhand_bbox[:4]
|
255 |
+
if hasattr(cfg, 'bbox_ratio'):
|
256 |
+
rhand_bbox = process_bbox(rhand_bbox,
|
257 |
+
img_width=img_shape[1],
|
258 |
+
img_height=img_shape[0],
|
259 |
+
ratio=bbox_ratio)
|
260 |
+
if rhand_bbox is not None:
|
261 |
+
rhand_bbox[2:] += rhand_bbox[:2] # xywh -> xyxy
|
262 |
+
else:
|
263 |
+
rhand_bbox = None
|
264 |
+
if face_bbox[-1] > 0:
|
265 |
+
face_bbox = face_bbox[:4]
|
266 |
+
if hasattr(cfg, 'bbox_ratio'):
|
267 |
+
face_bbox = process_bbox(face_bbox,
|
268 |
+
img_width=img_shape[1],
|
269 |
+
img_height=img_shape[0],
|
270 |
+
ratio=bbox_ratio)
|
271 |
+
if face_bbox is not None:
|
272 |
+
face_bbox[2:] += face_bbox[:2] # xywh -> xyxy
|
273 |
+
else:
|
274 |
+
face_bbox = None
|
275 |
+
lhand_bbox_list.append(lhand_bbox)
|
276 |
+
rhand_bbox_list.append(rhand_bbox)
|
277 |
+
face_bbox_list.append(face_bbox)
|
278 |
+
|
279 |
+
# lhand_bbox = np.stack(lhand_bbox_list,axis=0)
|
280 |
+
# rhand_bbox = np.stack(rhand_bbox_list,axis=0)
|
281 |
+
# face_bbox = np.stack(face_bbox_list,axis=0)
|
282 |
+
joint_img = keypoints2d[valid_idx]
|
283 |
+
|
284 |
+
# num_joints = joint_cam.shape[0]
|
285 |
+
# joint_valid = np.ones((num_joints, 1))
|
286 |
+
if valid_kps3d:
|
287 |
+
joint_cam = keypoints3d[valid_idx]
|
288 |
+
else:
|
289 |
+
joint_cam = None
|
290 |
+
|
291 |
+
if 'leye_pose_0' in smplx.keys():
|
292 |
+
smplx.pop('leye_pose_0')
|
293 |
+
if 'leye_pose_1' in smplx.keys():
|
294 |
+
smplx.pop('leye_pose_1')
|
295 |
+
if 'leye_pose' in smplx.keys():
|
296 |
+
smplx.pop('leye_pose')
|
297 |
+
if 'reye_pose_0' in smplx.keys():
|
298 |
+
smplx.pop('reye_pose_0')
|
299 |
+
if 'reye_pose_1' in smplx.keys():
|
300 |
+
smplx.pop('reye_pose_1')
|
301 |
+
if 'reye_pose' in smplx.keys():
|
302 |
+
smplx.pop('reye_pose')
|
303 |
+
|
304 |
+
occlusion_frame = occlusion[valid_idx] \
|
305 |
+
if occlusion is not None else np.array([1]*(valid_num))
|
306 |
+
|
307 |
+
smplx_param = {k: v[valid_idx] for k, v in smplx.items()}
|
308 |
+
gender_ = gender[valid_idx] \
|
309 |
+
if gender is not None else np.array(['neutral']*(valid_num))
|
310 |
+
|
311 |
+
is_kid_ = is_kid[valid_idx] \
|
312 |
+
if is_kid is not None else np.array([1]*(valid_num))
|
313 |
+
lhand_bbox_valid = lhand_bbox_xywh[valid_idx,4]
|
314 |
+
rhand_bbox_valid = rhand_bbox_xywh[valid_idx,4]
|
315 |
+
face_bbox_valid = face_bbox_xywh[valid_idx,4]
|
316 |
+
|
317 |
+
smplx_param['root_pose'] = smplx_param.pop('global_orient', None)
|
318 |
+
smplx_param['shape'] = smplx_param.pop('betas', None)
|
319 |
+
smplx_param['trans'] = smplx_param.pop('transl', np.zeros(3))
|
320 |
+
smplx_param['lhand_pose'] = smplx_param.pop('left_hand_pose', None)
|
321 |
+
smplx_param['rhand_pose'] = smplx_param.pop(
|
322 |
+
'right_hand_pose', None)
|
323 |
+
smplx_param['expr'] = smplx_param.pop('expression', None)
|
324 |
+
|
325 |
+
# TODO do not fix betas, give up shape supervision
|
326 |
+
if 'betas_neutral' in smplx_param and self.data_split == 'train':
|
327 |
+
smplx_param['shape'] = smplx_param.pop('betas_neutral')
|
328 |
+
# smplx_param['shape'] = np.zeros(10, dtype=np.float32)
|
329 |
+
|
330 |
+
if smplx_param['lhand_pose'] is None or self.body_only == True:
|
331 |
+
smplx_param['lhand_valid'] = np.zeros(valid_num, dtype=np.bool8)
|
332 |
+
else:
|
333 |
+
smplx_param['lhand_valid'] = lhand_bbox_valid.astype(np.bool8)
|
334 |
+
|
335 |
+
if smplx_param['rhand_pose'] is None or self.body_only == True:
|
336 |
+
smplx_param['rhand_valid'] = np.zeros(valid_num, dtype=np.bool8)
|
337 |
+
else:
|
338 |
+
smplx_param['rhand_valid'] = rhand_bbox_valid.astype(np.bool8)
|
339 |
+
|
340 |
+
if smplx_param['expr'] is None or self.body_only == True:
|
341 |
+
smplx_param['face_valid'] = np.zeros(valid_num, dtype=np.bool8)
|
342 |
+
else:
|
343 |
+
smplx_param['face_valid'] = face_bbox_valid.astype(np.bool8)
|
344 |
+
|
345 |
+
if joint_cam is not None and np.any(np.isnan(joint_cam)):
|
346 |
+
continue
|
347 |
+
|
348 |
+
|
349 |
+
datalist.append({
|
350 |
+
'img_path': img_path,
|
351 |
+
'img_shape': img_shape,
|
352 |
+
'bbox': body_bbox_list,
|
353 |
+
'lhand_bbox': lhand_bbox_list,
|
354 |
+
'rhand_bbox': rhand_bbox_list,
|
355 |
+
'face_bbox': face_bbox_list,
|
356 |
+
'joint_img': joint_img,
|
357 |
+
'joint_cam': joint_cam,
|
358 |
+
'smplx_param': smplx_param,
|
359 |
+
'as_smplx': as_smplx,
|
360 |
+
'gender': gender_,
|
361 |
+
'occlusion': occlusion_frame,
|
362 |
+
'is_kid': is_kid_,
|
363 |
+
})
|
364 |
+
|
365 |
+
# save memory
|
366 |
+
del content, image_path, bbox_xywh, lhand_bbox_xywh, rhand_bbox_xywh, face_bbox_xywh, keypoints3d, keypoints2d
|
367 |
+
|
368 |
+
if self.data_split == 'train':
|
369 |
+
print(f'[{self.__class__.__name__} train] original size:',
|
370 |
+
int(num_examples), '. Sample interval:',
|
371 |
+
train_sample_interval, '. Sampled size:', len(datalist))
|
372 |
+
|
373 |
+
if getattr(cfg, 'data_strategy',
|
374 |
+
None) == 'balance' and self.data_split == 'train':
|
375 |
+
print(
|
376 |
+
f'[{self.__class__.__name__}] Using [balance] strategy with datalist shuffled...'
|
377 |
+
)
|
378 |
+
random.shuffle(datalist)
|
379 |
+
|
380 |
+
return datalist
|
381 |
+
|
382 |
+
def __getitem__(self, idx):
|
383 |
+
try:
|
384 |
+
data = copy.deepcopy(self.datalist[idx])
|
385 |
+
except Exception as e:
|
386 |
+
print(f'[{self.__class__.__name__}] Error loading data {idx}')
|
387 |
+
print(e)
|
388 |
+
exit(0)
|
389 |
+
|
390 |
+
img_path, img_shape, bbox = \
|
391 |
+
data['img_path'], data['img_shape'], data['bbox']
|
392 |
+
as_smplx = data['as_smplx']
|
393 |
+
gender = data['gender'].copy()
|
394 |
+
for gender_str, gender_num in {
|
395 |
+
'neutral': -1, 'male': 0, 'female': 1}.items():
|
396 |
+
gender[gender==gender_str]=gender_num
|
397 |
+
gender = gender.astype(int)
|
398 |
+
|
399 |
+
img_whole_bbox = np.array([0, 0, img_shape[1], img_shape[0]])
|
400 |
+
img = load_img(img_path, order='BGR')
|
401 |
+
|
402 |
+
num_person = len(data['bbox'])
|
403 |
+
data_name = self.__class__.__name__
|
404 |
+
img, img2bb_trans, bb2img_trans, rot, do_flip = \
|
405 |
+
augmentation_instance_sample(img, img_whole_bbox, self.data_split,data,data_name)
|
406 |
+
cropped_img_shape=img.shape[:2]
|
407 |
+
|
408 |
+
num_person = len(data['bbox'])
|
409 |
+
if self.data_split == 'train':
|
410 |
+
joint_cam = data['joint_cam'] # num, 137,4
|
411 |
+
if joint_cam is not None:
|
412 |
+
dummy_cord = False
|
413 |
+
joint_cam[:,:,:3] = \
|
414 |
+
joint_cam[:,:,:3] - joint_cam[:, self.joint_set['root_joint_idx'], None, :3] # root-relative
|
415 |
+
else:
|
416 |
+
# dummy cord as joint_cam
|
417 |
+
dummy_cord = True
|
418 |
+
joint_cam = np.zeros(
|
419 |
+
(num_person, self.joint_set['joint_num'], 4),
|
420 |
+
dtype=np.float32)
|
421 |
+
|
422 |
+
joint_img = data['joint_img']
|
423 |
+
# do rotation on keypoints
|
424 |
+
joint_img_aug, joint_cam_wo_ra, joint_cam_ra, joint_trunc = \
|
425 |
+
process_db_coord_batch_no_valid(
|
426 |
+
joint_img, joint_cam, do_flip, img_shape,
|
427 |
+
self.joint_set['flip_pairs'], img2bb_trans, rot,
|
428 |
+
self.joint_set['joints_name'], smpl_x.joints_name,
|
429 |
+
cropped_img_shape)
|
430 |
+
joint_img_aug[:,:,2:] = joint_img_aug[:,:,2:] * joint_trunc
|
431 |
+
|
432 |
+
# smplx coordinates and parameters
|
433 |
+
smplx_param = data['smplx_param']
|
434 |
+
smplx_pose, smplx_shape, smplx_expr, smplx_pose_valid, \
|
435 |
+
smplx_joint_valid, smplx_expr_valid, smplx_shape_valid = \
|
436 |
+
process_human_model_output_batch_simplify(
|
437 |
+
smplx_param, do_flip, rot, as_smplx)
|
438 |
+
# if cam not provided, we take joint_img as smplx joint 2d,
|
439 |
+
# which is commonly the case for our processed humandata
|
440 |
+
# change smplx_shape if use_betas_neutral
|
441 |
+
# processing follows that in process_human_model_output
|
442 |
+
|
443 |
+
if self.use_betas_neutral:
|
444 |
+
smplx_shape = smplx_param['betas_neutral'].reshape(
|
445 |
+
num_person, -1)
|
446 |
+
smplx_shape[(np.abs(smplx_shape) > 3).any(axis=1)] = 0.
|
447 |
+
smplx_shape = smplx_shape.reshape(num_person, -1)
|
448 |
+
# SMPLX joint coordinate validity
|
449 |
+
# for name in ('L_Big_toe', 'L_Small_toe', 'L_Heel', 'R_Big_toe', 'R_Small_toe', 'R_Heel'):
|
450 |
+
# smplx_joint_valid[smpl_x.joints_name.index(name)] = 0
|
451 |
+
smplx_joint_valid = smplx_joint_valid[:, :, None]
|
452 |
+
|
453 |
+
lhand_bbox_center_list = []
|
454 |
+
lhand_bbox_valid_list = []
|
455 |
+
lhand_bbox_size_list = []
|
456 |
+
lhand_bbox_list = []
|
457 |
+
face_bbox_center_list = []
|
458 |
+
face_bbox_size_list = []
|
459 |
+
face_bbox_valid_list = []
|
460 |
+
face_bbox_list = []
|
461 |
+
rhand_bbox_center_list = []
|
462 |
+
rhand_bbox_valid_list = []
|
463 |
+
rhand_bbox_size_list = []
|
464 |
+
rhand_bbox_list = []
|
465 |
+
body_bbox_center_list = []
|
466 |
+
body_bbox_size_list = []
|
467 |
+
body_bbox_valid_list = []
|
468 |
+
body_bbox_list = []
|
469 |
+
|
470 |
+
for i in range(num_person):
|
471 |
+
body_bbox, body_bbox_valid = self.process_hand_face_bbox(
|
472 |
+
data['bbox'][i], do_flip, img_shape, img2bb_trans,
|
473 |
+
cropped_img_shape)
|
474 |
+
|
475 |
+
lhand_bbox, lhand_bbox_valid = self.process_hand_face_bbox(
|
476 |
+
data['lhand_bbox'][i], do_flip, img_shape, img2bb_trans,
|
477 |
+
cropped_img_shape)
|
478 |
+
lhand_bbox_valid *= smplx_param['lhand_valid'][i]
|
479 |
+
|
480 |
+
rhand_bbox, rhand_bbox_valid = self.process_hand_face_bbox(
|
481 |
+
data['rhand_bbox'][i], do_flip, img_shape, img2bb_trans,
|
482 |
+
cropped_img_shape)
|
483 |
+
rhand_bbox_valid *= smplx_param['rhand_valid'][i]
|
484 |
+
|
485 |
+
face_bbox, face_bbox_valid = self.process_hand_face_bbox(
|
486 |
+
data['face_bbox'][i], do_flip, img_shape, img2bb_trans,
|
487 |
+
cropped_img_shape)
|
488 |
+
face_bbox_valid *= smplx_param['face_valid'][i]
|
489 |
+
|
490 |
+
if do_flip:
|
491 |
+
lhand_bbox, rhand_bbox = rhand_bbox, lhand_bbox
|
492 |
+
lhand_bbox_valid, rhand_bbox_valid = rhand_bbox_valid, lhand_bbox_valid
|
493 |
+
|
494 |
+
body_bbox_list.append(body_bbox)
|
495 |
+
lhand_bbox_list.append(lhand_bbox)
|
496 |
+
rhand_bbox_list.append(rhand_bbox)
|
497 |
+
face_bbox_list.append(face_bbox)
|
498 |
+
|
499 |
+
lhand_bbox_center = (lhand_bbox[0] + lhand_bbox[1]) / 2.
|
500 |
+
rhand_bbox_center = (rhand_bbox[0] + rhand_bbox[1]) / 2.
|
501 |
+
face_bbox_center = (face_bbox[0] + face_bbox[1]) / 2.
|
502 |
+
body_bbox_center = (body_bbox[0] + body_bbox[1]) / 2.
|
503 |
+
lhand_bbox_size = lhand_bbox[1] - lhand_bbox[0]
|
504 |
+
rhand_bbox_size = rhand_bbox[1] - rhand_bbox[0]
|
505 |
+
|
506 |
+
face_bbox_size = face_bbox[1] - face_bbox[0]
|
507 |
+
body_bbox_size = body_bbox[1] - body_bbox[0]
|
508 |
+
lhand_bbox_center_list.append(lhand_bbox_center)
|
509 |
+
lhand_bbox_valid_list.append(lhand_bbox_valid)
|
510 |
+
lhand_bbox_size_list.append(lhand_bbox_size)
|
511 |
+
face_bbox_center_list.append(face_bbox_center)
|
512 |
+
face_bbox_size_list.append(face_bbox_size)
|
513 |
+
face_bbox_valid_list.append(face_bbox_valid)
|
514 |
+
rhand_bbox_center_list.append(rhand_bbox_center)
|
515 |
+
rhand_bbox_valid_list.append(rhand_bbox_valid)
|
516 |
+
rhand_bbox_size_list.append(rhand_bbox_size)
|
517 |
+
body_bbox_center_list.append(body_bbox_center)
|
518 |
+
body_bbox_size_list.append(body_bbox_size)
|
519 |
+
body_bbox_valid_list.append(body_bbox_valid)
|
520 |
+
|
521 |
+
|
522 |
+
body_bbox = np.stack(body_bbox_list, axis=0)
|
523 |
+
lhand_bbox = np.stack(lhand_bbox_list, axis=0)
|
524 |
+
rhand_bbox = np.stack(rhand_bbox_list, axis=0)
|
525 |
+
face_bbox = np.stack(face_bbox_list, axis=0)
|
526 |
+
lhand_bbox_center = np.stack(lhand_bbox_center_list, axis=0)
|
527 |
+
lhand_bbox_valid = np.stack(lhand_bbox_valid_list, axis=0)
|
528 |
+
lhand_bbox_size = np.stack(lhand_bbox_size_list, axis=0)
|
529 |
+
face_bbox_center = np.stack(face_bbox_center_list, axis=0)
|
530 |
+
face_bbox_size = np.stack(face_bbox_size_list, axis=0)
|
531 |
+
face_bbox_valid = np.stack(face_bbox_valid_list, axis=0)
|
532 |
+
body_bbox_center = np.stack(body_bbox_center_list, axis=0)
|
533 |
+
body_bbox_size = np.stack(body_bbox_size_list, axis=0)
|
534 |
+
body_bbox_valid = np.stack(body_bbox_valid_list, axis=0)
|
535 |
+
rhand_bbox_center = np.stack(rhand_bbox_center_list, axis=0)
|
536 |
+
rhand_bbox_valid = np.stack(rhand_bbox_valid_list, axis=0)
|
537 |
+
rhand_bbox_size = np.stack(rhand_bbox_size_list, axis=0)
|
538 |
+
|
539 |
+
|
540 |
+
if 'occlusion' in data:
|
541 |
+
occlusion = data['occlusion']
|
542 |
+
occ_mask = occlusion<97
|
543 |
+
|
544 |
+
joint_img_aug[:,:,2] = joint_img_aug[:,:,2]*occ_mask[:,None]
|
545 |
+
joint_cam_wo_ra[:,:,3] = joint_cam_wo_ra[:,:,3]*occ_mask[:,None]
|
546 |
+
joint_trunc = joint_trunc*occ_mask[:,None,None]
|
547 |
+
smplx_pose_valid = smplx_pose_valid*occ_mask[:,None]
|
548 |
+
smplx_joint_valid = smplx_joint_valid*occ_mask[:,None,None]
|
549 |
+
smplx_expr_valid = smplx_expr_valid*occ_mask
|
550 |
+
smplx_shape_valid = smplx_shape_valid*occ_mask
|
551 |
+
rhand_bbox_valid = rhand_bbox_valid*occ_mask
|
552 |
+
lhand_bbox_valid = lhand_bbox_valid*occ_mask
|
553 |
+
face_bbox_valid = face_bbox_valid*occ_mask
|
554 |
+
|
555 |
+
|
556 |
+
if 'is_kid' in data:
|
557 |
+
is_kid = data['is_kid'].copy()
|
558 |
+
smplx_shape_valid = smplx_shape_valid * (is_kid==0)
|
559 |
+
|
560 |
+
|
561 |
+
inputs = {'img': img}
|
562 |
+
|
563 |
+
joint_img_aug[:,:,2] = joint_img_aug[:,:,2] * body_bbox_valid[:,None]
|
564 |
+
|
565 |
+
is_3D = float(False) if dummy_cord else float(True)
|
566 |
+
|
567 |
+
targets = {
|
568 |
+
# keypoints2d, [0,img_w],[0,img_h] -> [0,1] -> [0,output_hm_shape]
|
569 |
+
'joint_img': joint_img_aug[body_bbox_valid>0],
|
570 |
+
# joint_cam, kp3d wo ra # raw kps3d probably without ra
|
571 |
+
'joint_cam': joint_cam_wo_ra[body_bbox_valid>0],
|
572 |
+
# kps3d with body, face, hand ra
|
573 |
+
'smplx_joint_cam': joint_cam_ra[body_bbox_valid>0],
|
574 |
+
'smplx_pose': smplx_pose[body_bbox_valid>0],
|
575 |
+
'smplx_shape': smplx_shape[body_bbox_valid>0],
|
576 |
+
'smplx_expr': smplx_expr[body_bbox_valid>0],
|
577 |
+
'lhand_bbox_center': lhand_bbox_center[body_bbox_valid>0],
|
578 |
+
'lhand_bbox_size': lhand_bbox_size[body_bbox_valid>0],
|
579 |
+
'rhand_bbox_center': rhand_bbox_center[body_bbox_valid>0],
|
580 |
+
'rhand_bbox_size': rhand_bbox_size[body_bbox_valid>0],
|
581 |
+
'face_bbox_center': face_bbox_center[body_bbox_valid>0],
|
582 |
+
'face_bbox_size': face_bbox_size[body_bbox_valid>0],
|
583 |
+
'body_bbox_center': body_bbox_center[body_bbox_valid>0],
|
584 |
+
'body_bbox_size': body_bbox_size[body_bbox_valid>0],
|
585 |
+
'body_bbox': body_bbox.reshape(-1,4)[body_bbox_valid>0],
|
586 |
+
'lhand_bbox': lhand_bbox.reshape(-1,4)[body_bbox_valid>0],
|
587 |
+
'rhand_bbox': rhand_bbox.reshape(-1,4)[body_bbox_valid>0],
|
588 |
+
'face_bbox': face_bbox.reshape(-1,4)[body_bbox_valid>0],
|
589 |
+
'gender': gender[body_bbox_valid>0]}
|
590 |
+
|
591 |
+
meta_info = {
|
592 |
+
'joint_trunc': joint_trunc[body_bbox_valid>0],
|
593 |
+
'smplx_pose_valid': smplx_pose_valid[body_bbox_valid>0],
|
594 |
+
'smplx_shape_valid': smplx_shape_valid[body_bbox_valid>0],
|
595 |
+
'smplx_expr_valid': smplx_expr_valid[body_bbox_valid>0],
|
596 |
+
'is_3D': is_3D,
|
597 |
+
'lhand_bbox_valid': lhand_bbox_valid[body_bbox_valid>0],
|
598 |
+
'rhand_bbox_valid': rhand_bbox_valid[body_bbox_valid>0],
|
599 |
+
'face_bbox_valid': face_bbox_valid[body_bbox_valid>0],
|
600 |
+
'body_bbox_valid': body_bbox_valid[body_bbox_valid>0],
|
601 |
+
'img_shape': np.array(img.shape[:2]),
|
602 |
+
'ori_shape':data['img_shape'],
|
603 |
+
'idx': idx
|
604 |
+
|
605 |
+
}
|
606 |
+
result = {**inputs, **targets, **meta_info}
|
607 |
+
|
608 |
+
result = self.normalize(result)
|
609 |
+
result = self.format(result)
|
610 |
+
return result
|
611 |
+
|
612 |
+
|
613 |
+
|
614 |
+
if self.data_split == 'test':
|
615 |
+
self.cam_param = {}
|
616 |
+
joint_cam = data['joint_cam']
|
617 |
+
|
618 |
+
if joint_cam is not None:
|
619 |
+
dummy_cord = False
|
620 |
+
joint_cam[:,:,:3] = joint_cam[:,:,:3] - joint_cam[
|
621 |
+
:, self.joint_set['root_joint_idx'], None, :3] # root-relative
|
622 |
+
else:
|
623 |
+
# dummy cord as joint_cam
|
624 |
+
dummy_cord = True
|
625 |
+
joint_cam = np.zeros(
|
626 |
+
(num_person, self.joint_set['joint_num'], 3),
|
627 |
+
dtype=np.float32)
|
628 |
+
|
629 |
+
joint_img = data['joint_img']
|
630 |
+
|
631 |
+
|
632 |
+
joint_img_aug, joint_cam_wo_ra, joint_cam_ra, joint_trunc = \
|
633 |
+
process_db_coord_batch_no_valid(
|
634 |
+
joint_img, joint_cam, do_flip, img_shape,
|
635 |
+
self.joint_set['flip_pairs'], img2bb_trans, rot,
|
636 |
+
self.joint_set['joints_name'], smpl_x.joints_name,
|
637 |
+
cropped_img_shape)
|
638 |
+
|
639 |
+
|
640 |
+
|
641 |
+
# smplx coordinates and parameters
|
642 |
+
smplx_param = data['smplx_param']
|
643 |
+
# smplx_cam_trans = np.array(
|
644 |
+
# smplx_param['trans']) if 'trans' in smplx_param else None
|
645 |
+
# TODO: remove this, seperate smpl and smplx
|
646 |
+
smplx_pose, smplx_shape, smplx_expr, smplx_pose_valid, \
|
647 |
+
smplx_joint_valid, smplx_expr_valid, smplx_shape_valid = \
|
648 |
+
process_human_model_output_batch_simplify(
|
649 |
+
smplx_param, do_flip, rot, as_smplx)
|
650 |
+
|
651 |
+
# if cam not provided, we take joint_img as smplx joint 2d,
|
652 |
+
# which is commonly the case for our processed humandata
|
653 |
+
if self.use_betas_neutral:
|
654 |
+
smplx_shape = smplx_param['betas_neutral'].reshape(
|
655 |
+
num_person, -1)
|
656 |
+
smplx_shape[(np.abs(smplx_shape) > 3).any(axis=1)] = 0.
|
657 |
+
smplx_shape = smplx_shape.reshape(num_person, -1)
|
658 |
+
|
659 |
+
smplx_joint_valid = smplx_joint_valid[:, :, None]
|
660 |
+
|
661 |
+
lhand_bbox_center_list = []
|
662 |
+
lhand_bbox_valid_list = []
|
663 |
+
lhand_bbox_size_list = []
|
664 |
+
lhand_bbox_list = []
|
665 |
+
face_bbox_center_list = []
|
666 |
+
face_bbox_size_list = []
|
667 |
+
face_bbox_valid_list = []
|
668 |
+
face_bbox_list = []
|
669 |
+
rhand_bbox_center_list = []
|
670 |
+
rhand_bbox_valid_list = []
|
671 |
+
rhand_bbox_size_list = []
|
672 |
+
rhand_bbox_list = []
|
673 |
+
body_bbox_center_list = []
|
674 |
+
body_bbox_size_list = []
|
675 |
+
body_bbox_valid_list = []
|
676 |
+
body_bbox_list = []
|
677 |
+
|
678 |
+
for i in range(num_person):
|
679 |
+
lhand_bbox, lhand_bbox_valid = self.process_hand_face_bbox(
|
680 |
+
data['lhand_bbox'][i], do_flip, img_shape, img2bb_trans,
|
681 |
+
cropped_img_shape)
|
682 |
+
rhand_bbox, rhand_bbox_valid = self.process_hand_face_bbox(
|
683 |
+
data['rhand_bbox'][i], do_flip, img_shape, img2bb_trans,
|
684 |
+
cropped_img_shape)
|
685 |
+
face_bbox, face_bbox_valid = self.process_hand_face_bbox(
|
686 |
+
data['face_bbox'][i], do_flip, img_shape, img2bb_trans,
|
687 |
+
cropped_img_shape)
|
688 |
+
|
689 |
+
body_bbox, body_bbox_valid = self.process_hand_face_bbox(
|
690 |
+
data['bbox'][i], do_flip, img_shape, img2bb_trans,
|
691 |
+
cropped_img_shape)
|
692 |
+
|
693 |
+
if do_flip:
|
694 |
+
lhand_bbox, rhand_bbox = rhand_bbox, lhand_bbox
|
695 |
+
lhand_bbox_valid, rhand_bbox_valid = rhand_bbox_valid, lhand_bbox_valid
|
696 |
+
|
697 |
+
body_bbox_list.append(body_bbox)
|
698 |
+
lhand_bbox_list.append(lhand_bbox)
|
699 |
+
rhand_bbox_list.append(rhand_bbox)
|
700 |
+
face_bbox_list.append(face_bbox)
|
701 |
+
|
702 |
+
lhand_bbox_center = (lhand_bbox[0] + lhand_bbox[1]) / 2.
|
703 |
+
rhand_bbox_center = (rhand_bbox[0] + rhand_bbox[1]) / 2.
|
704 |
+
face_bbox_center = (face_bbox[0] + face_bbox[1]) / 2.
|
705 |
+
body_bbox_center = (body_bbox[0] + body_bbox[1]) / 2.
|
706 |
+
lhand_bbox_size = lhand_bbox[1] - lhand_bbox[0]
|
707 |
+
rhand_bbox_size = rhand_bbox[1] - rhand_bbox[0]
|
708 |
+
|
709 |
+
face_bbox_size = face_bbox[1] - face_bbox[0]
|
710 |
+
body_bbox_size = body_bbox[1] - body_bbox[0]
|
711 |
+
lhand_bbox_center_list.append(lhand_bbox_center)
|
712 |
+
lhand_bbox_valid_list.append(lhand_bbox_valid)
|
713 |
+
lhand_bbox_size_list.append(lhand_bbox_size)
|
714 |
+
face_bbox_center_list.append(face_bbox_center)
|
715 |
+
face_bbox_size_list.append(face_bbox_size)
|
716 |
+
face_bbox_valid_list.append(face_bbox_valid)
|
717 |
+
rhand_bbox_center_list.append(rhand_bbox_center)
|
718 |
+
rhand_bbox_valid_list.append(rhand_bbox_valid)
|
719 |
+
rhand_bbox_size_list.append(rhand_bbox_size)
|
720 |
+
body_bbox_center_list.append(body_bbox_center)
|
721 |
+
body_bbox_size_list.append(body_bbox_size)
|
722 |
+
body_bbox_valid_list.append(body_bbox_valid)
|
723 |
+
|
724 |
+
body_bbox = np.stack(body_bbox_list, axis=0)
|
725 |
+
lhand_bbox = np.stack(lhand_bbox_list, axis=0)
|
726 |
+
rhand_bbox = np.stack(rhand_bbox_list, axis=0)
|
727 |
+
face_bbox = np.stack(face_bbox_list, axis=0)
|
728 |
+
lhand_bbox_center = np.stack(lhand_bbox_center_list, axis=0)
|
729 |
+
lhand_bbox_valid = np.stack(lhand_bbox_valid_list, axis=0)
|
730 |
+
lhand_bbox_size = np.stack(lhand_bbox_size_list, axis=0)
|
731 |
+
face_bbox_center = np.stack(face_bbox_center_list, axis=0)
|
732 |
+
face_bbox_size = np.stack(face_bbox_size_list, axis=0)
|
733 |
+
face_bbox_valid = np.stack(face_bbox_valid_list, axis=0)
|
734 |
+
body_bbox_center = np.stack(body_bbox_center_list, axis=0)
|
735 |
+
body_bbox_size = np.stack(body_bbox_size_list, axis=0)
|
736 |
+
body_bbox_valid = np.stack(body_bbox_valid_list, axis=0)
|
737 |
+
rhand_bbox_center = np.stack(rhand_bbox_center_list, axis=0)
|
738 |
+
rhand_bbox_valid = np.stack(rhand_bbox_valid_list, axis=0)
|
739 |
+
rhand_bbox_size = np.stack(rhand_bbox_size_list, axis=0)
|
740 |
+
|
741 |
+
|
742 |
+
inputs = {'img': img}
|
743 |
+
|
744 |
+
targets = {
|
745 |
+
# keypoints2d, [0,img_w],[0,img_h] -> [0,1] -> [0,output_hm_shape]
|
746 |
+
'joint_img': joint_img_aug,
|
747 |
+
# projected smplx if valid cam_param, else same as keypoints2d
|
748 |
+
# joint_cam, kp3d wo ra # raw kps3d probably without ra
|
749 |
+
'joint_cam': joint_cam_wo_ra,
|
750 |
+
'ann_idx': idx,
|
751 |
+
# kps3d with body, face, hand ra
|
752 |
+
'smplx_joint_cam': joint_cam_ra,
|
753 |
+
'smplx_pose': smplx_pose,
|
754 |
+
'smplx_shape': smplx_shape,
|
755 |
+
'smplx_expr': smplx_expr,
|
756 |
+
'lhand_bbox_center': lhand_bbox_center,
|
757 |
+
'lhand_bbox_size': lhand_bbox_size,
|
758 |
+
'rhand_bbox_center': rhand_bbox_center,
|
759 |
+
'rhand_bbox_size': rhand_bbox_size,
|
760 |
+
'face_bbox_center': face_bbox_center,
|
761 |
+
'face_bbox_size': face_bbox_size,
|
762 |
+
'body_bbox_center': body_bbox_center,
|
763 |
+
'body_bbox_size': body_bbox_size,
|
764 |
+
'body_bbox': body_bbox.reshape(-1,4),
|
765 |
+
'lhand_bbox': lhand_bbox.reshape(-1,4),
|
766 |
+
'rhand_bbox': rhand_bbox.reshape(-1,4),
|
767 |
+
'face_bbox': face_bbox.reshape(-1,4),
|
768 |
+
'gender': gender,
|
769 |
+
'bb2img_trans': bb2img_trans,
|
770 |
+
}
|
771 |
+
|
772 |
+
if self.body_only:
|
773 |
+
meta_info = {
|
774 |
+
'joint_trunc': joint_trunc,
|
775 |
+
'smplx_pose_valid': smplx_pose_valid,
|
776 |
+
'smplx_shape_valid': float(smplx_shape_valid),
|
777 |
+
'smplx_expr_valid': smplx_expr_valid,
|
778 |
+
'is_3D': float(False) if dummy_cord else float(True),
|
779 |
+
'lhand_bbox_valid': lhand_bbox_valid,
|
780 |
+
'rhand_bbox_valid': rhand_bbox_valid,
|
781 |
+
'face_bbox_valid': face_bbox_valid,
|
782 |
+
'body_bbox_valid': body_bbox_valid,
|
783 |
+
'img_shape': np.array(img.shape[:2]),
|
784 |
+
'ori_shape':data['img_shape'],
|
785 |
+
'idx': idx
|
786 |
+
}
|
787 |
+
else:
|
788 |
+
meta_info = {
|
789 |
+
'joint_trunc': joint_trunc,
|
790 |
+
'smplx_pose_valid': smplx_pose_valid,
|
791 |
+
'smplx_shape_valid': smplx_shape_valid,
|
792 |
+
'smplx_expr_valid': smplx_expr_valid,
|
793 |
+
'is_3D': float(False) if dummy_cord else float(True),
|
794 |
+
'lhand_bbox_valid': lhand_bbox_valid,
|
795 |
+
'rhand_bbox_valid': rhand_bbox_valid,
|
796 |
+
'face_bbox_valid': face_bbox_valid,
|
797 |
+
'body_bbox_valid': body_bbox_valid,
|
798 |
+
'img_shape': np.array(img.shape[:2]),
|
799 |
+
'ori_shape':data['img_shape'],
|
800 |
+
'idx': idx
|
801 |
+
}
|
802 |
+
|
803 |
+
result = {**inputs, **targets, **meta_info}
|
804 |
+
result = self.normalize(result)
|
805 |
+
result = self.format(result)
|
806 |
+
return result
|
807 |
+
|
808 |
+
def evaluate(self, outs, cur_sample_idx):
|
809 |
+
annots = self.datalist
|
810 |
+
sample_num = len(outs)
|
811 |
+
eval_result = {
|
812 |
+
'pa_mpvpe_all': [],
|
813 |
+
'pa_mpvpe_l_hand': [],
|
814 |
+
'pa_mpvpe_r_hand': [],
|
815 |
+
'pa_mpvpe_hand': [],
|
816 |
+
'pa_mpvpe_face': [],
|
817 |
+
'mpvpe_all': [],
|
818 |
+
'mpvpe_l_hand': [],
|
819 |
+
'mpvpe_r_hand': [],
|
820 |
+
'mpvpe_hand': [],
|
821 |
+
'mpvpe_face': []
|
822 |
+
}
|
823 |
+
|
824 |
+
vis = getattr(cfg, 'vis', False)
|
825 |
+
vis_save_dir = cfg.vis_dir
|
826 |
+
|
827 |
+
csv_file = f'{cfg.result_dir}/agora_smplx_error.csv'
|
828 |
+
file = open(csv_file, 'a', newline='')
|
829 |
+
for n in range(sample_num):
|
830 |
+
annot = annots[cur_sample_idx + n]
|
831 |
+
out = outs[n]
|
832 |
+
mesh_gt = out['smplx_mesh_cam_target']
|
833 |
+
mesh_out = out['smplx_mesh_cam']
|
834 |
+
|
835 |
+
# print('zzz',mesh_gt.shape,mesh_out.shape)
|
836 |
+
# from pytorch3d.io import save_obj
|
837 |
+
# for m_i,(mesh_gt_i,mesh_out_i) in enumerate(zip(mesh_gt,mesh_out)):
|
838 |
+
# save_obj('temp_gt_%d.obj'%m_i,verts=torch.Tensor(mesh_gt_i),faces=torch.tensor([]))
|
839 |
+
# save_obj('temp_pred_%d.obj'%m_i,verts=torch.Tensor(mesh_out_i),faces=torch.tensor([]))
|
840 |
+
|
841 |
+
ann_idx = out['gt_ann_idx']
|
842 |
+
img_path = []
|
843 |
+
for ann_id in ann_idx:
|
844 |
+
img_path.append(annots[ann_id]['img_path'])
|
845 |
+
eval_result['img_path'] = img_path
|
846 |
+
eval_result['ann_idx'] = ann_idx
|
847 |
+
# MPVPE from all vertices
|
848 |
+
mesh_out_align = \
|
849 |
+
mesh_out - np.dot(
|
850 |
+
smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['pelvis'], None, :] + \
|
851 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['pelvis'], None, :]
|
852 |
+
|
853 |
+
eval_result['mpvpe_all'].extend(
|
854 |
+
np.sqrt(np.sum(
|
855 |
+
(mesh_out_align - mesh_gt)**2, -1)).mean(-1) * 1000)
|
856 |
+
mesh_out_align = rigid_align_batch(mesh_out, mesh_gt)
|
857 |
+
eval_result['pa_mpvpe_all'].extend(
|
858 |
+
np.sqrt(np.sum(
|
859 |
+
(mesh_out_align - mesh_gt)**2, -1)).mean(-1) * 1000)
|
860 |
+
|
861 |
+
# MPVPE from hand vertices
|
862 |
+
mesh_gt_lhand = mesh_gt[:, smpl_x.hand_vertex_idx['left_hand'], :]
|
863 |
+
mesh_out_lhand = mesh_out[:, smpl_x.hand_vertex_idx['left_hand'], :]
|
864 |
+
mesh_gt_rhand = mesh_gt[:, smpl_x.hand_vertex_idx['right_hand'], :]
|
865 |
+
mesh_out_rhand = mesh_out[:, smpl_x.hand_vertex_idx['right_hand'], :]
|
866 |
+
mesh_out_lhand_align = \
|
867 |
+
mesh_out_lhand - \
|
868 |
+
np.dot(smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['lwrist'], None, :] + \
|
869 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['lwrist'], None, :]
|
870 |
+
|
871 |
+
mesh_out_rhand_align = \
|
872 |
+
mesh_out_rhand - \
|
873 |
+
np.dot(smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['rwrist'], None, :] + \
|
874 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['rwrist'], None, :]
|
875 |
+
|
876 |
+
eval_result['mpvpe_l_hand'].extend(
|
877 |
+
np.sqrt(np.sum(
|
878 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean(-1) *
|
879 |
+
1000)
|
880 |
+
eval_result['mpvpe_r_hand'].extend(
|
881 |
+
np.sqrt(np.sum(
|
882 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean(-1) *
|
883 |
+
1000)
|
884 |
+
eval_result['mpvpe_hand'].extend(
|
885 |
+
(np.sqrt(np.sum(
|
886 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean(-1) *
|
887 |
+
1000 +
|
888 |
+
np.sqrt(np.sum(
|
889 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean(-1) *
|
890 |
+
1000) / 2.)
|
891 |
+
mesh_out_lhand_align = rigid_align_batch(mesh_out_lhand, mesh_gt_lhand)
|
892 |
+
mesh_out_rhand_align = rigid_align_batch(mesh_out_rhand, mesh_gt_rhand)
|
893 |
+
eval_result['pa_mpvpe_l_hand'].extend(
|
894 |
+
np.sqrt(np.sum(
|
895 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean(-1) *
|
896 |
+
1000)
|
897 |
+
eval_result['pa_mpvpe_r_hand'].extend(
|
898 |
+
np.sqrt(np.sum(
|
899 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean(-1) *
|
900 |
+
1000)
|
901 |
+
eval_result['pa_mpvpe_hand'].extend(
|
902 |
+
(np.sqrt(np.sum(
|
903 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean(-1) *
|
904 |
+
1000 +
|
905 |
+
np.sqrt(np.sum(
|
906 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean(-1) *
|
907 |
+
1000) / 2.)
|
908 |
+
|
909 |
+
|
910 |
+
save_error=True
|
911 |
+
if save_error:
|
912 |
+
writer = csv.writer(file)
|
913 |
+
new_line = [ann_idx[n],img_path[n], eval_result['mpvpe_all'][-1], eval_result['pa_mpvpe_all'][-1]]
|
914 |
+
writer.writerow(new_line)
|
915 |
+
self.save_idx += 1
|
916 |
+
|
917 |
+
|
918 |
+
return eval_result
|
919 |
+
|
920 |
+
|
921 |
+
def print_eval_result(self, eval_result):
|
922 |
+
|
923 |
+
print('AGORA test results are dumped at: ' +
|
924 |
+
osp.join(cfg.result_dir, 'predictions'))
|
925 |
+
|
926 |
+
if self.data_split == 'test' and self.test_set == 'test': # do not print. just submit the results to the official evaluation server
|
927 |
+
return
|
928 |
+
|
929 |
+
print('======AGORA-val======')
|
930 |
+
print('PA MPVPE (All): %.2f mm' % np.mean(eval_result['pa_mpvpe_all']))
|
931 |
+
print('PA MPVPE (L-Hands): %.2f mm' %
|
932 |
+
np.mean(eval_result['pa_mpvpe_l_hand']))
|
933 |
+
print('PA MPVPE (R-Hands): %.2f mm' %
|
934 |
+
np.mean(eval_result['pa_mpvpe_r_hand']))
|
935 |
+
print('PA MPVPE (Hands): %.2f mm' %
|
936 |
+
np.mean(eval_result['pa_mpvpe_hand']))
|
937 |
+
print('PA MPVPE (Face): %.2f mm' %
|
938 |
+
np.mean(eval_result['pa_mpvpe_face']))
|
939 |
+
print()
|
940 |
+
|
941 |
+
print('MPVPE (All): %.2f mm' % np.mean(eval_result['mpvpe_all']))
|
942 |
+
print('MPVPE (L-Hands): %.2f mm' %
|
943 |
+
np.mean(eval_result['mpvpe_l_hand']))
|
944 |
+
print('MPVPE (R-Hands): %.2f mm' %
|
945 |
+
np.mean(eval_result['mpvpe_r_hand']))
|
946 |
+
print('MPVPE (Hands): %.2f mm' % np.mean(eval_result['mpvpe_hand']))
|
947 |
+
print('MPVPE (Face): %.2f mm' % np.mean(eval_result['mpvpe_face']))
|
948 |
+
|
949 |
+
out_file = osp.join(cfg.result_dir,'agora_val.txt')
|
950 |
+
if os.path.exists(out_file):
|
951 |
+
f = open(out_file, 'a+')
|
952 |
+
else:
|
953 |
+
f = open(out_file, 'w', encoding="utf-8")
|
954 |
+
|
955 |
+
f.write('\n')
|
956 |
+
f.write(f'{cfg.exp_name}\n')
|
957 |
+
f.write(f'AGORA-val dataset: \n')
|
958 |
+
f.write('PA MPVPE (All): %.2f mm\n' %
|
959 |
+
np.mean(eval_result['pa_mpvpe_all']))
|
960 |
+
f.write('PA MPVPE (L-Hands): %.2f mm\n' %
|
961 |
+
np.mean(eval_result['pa_mpvpe_l_hand']))
|
962 |
+
f.write('PA MPVPE (R-Hands): %.2f mm\n' %
|
963 |
+
np.mean(eval_result['pa_mpvpe_r_hand']))
|
964 |
+
f.write('PA MPVPE (Hands): %.2f mm\n' %
|
965 |
+
np.mean(eval_result['pa_mpvpe_hand']))
|
966 |
+
f.write('PA MPVPE (Face): %.2f mm\n' %
|
967 |
+
np.mean(eval_result['pa_mpvpe_face']))
|
968 |
+
f.write('MPVPE (All): %.2f mm\n' % np.mean(eval_result['mpvpe_all']))
|
969 |
+
f.write('MPVPE (L-Hands): %.2f mm\n' %
|
970 |
+
np.mean(eval_result['mpvpe_l_hand']))
|
971 |
+
f.write('MPVPE (R-Hands): %.2f mm\n' %
|
972 |
+
np.mean(eval_result['mpvpe_r_hand']))
|
973 |
+
f.write('MPVPE (Hands): %.2f mm\n' % np.mean(eval_result['mpvpe_hand']))
|
974 |
+
f.write('MPVPE (Face): %.2f mm\n' % np.mean(eval_result['mpvpe_face']))
|
datasets/ARCTIC.py
ADDED
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import os.path as osp
|
3 |
+
from glob import glob
|
4 |
+
import numpy as np
|
5 |
+
from config.config import cfg
|
6 |
+
|
7 |
+
import csv
|
8 |
+
|
9 |
+
from util.human_models import smpl_x
|
10 |
+
|
11 |
+
from util.transforms import rigid_align_batch
|
12 |
+
|
13 |
+
from humandata import HumanDataset
|
14 |
+
|
15 |
+
class ARCTIC(HumanDataset):
|
16 |
+
def __init__(self, transform, data_split):
|
17 |
+
super(ARCTIC, self).__init__(transform, data_split)
|
18 |
+
|
19 |
+
self.img_dir = 'data/osx_data/ARCTIC'
|
20 |
+
|
21 |
+
|
22 |
+
if data_split == 'train':
|
23 |
+
self.annot_path = 'data/preprocessed_npz/multihuman_data/p1_train_multi.npz'
|
24 |
+
self.annot_path_cache = 'data/preprocessed_npz/cache/p1_train_cache_sample1000_080824.npz'
|
25 |
+
self.sample_interval = 1000
|
26 |
+
elif data_split == 'test':
|
27 |
+
self.annot_path = 'data/preprocessed_npz_old/multihuman_data/p1_val_multi.npz'
|
28 |
+
self.annot_path_cache = 'data/preprocessed_npz_old/cache/p1_val_cache_30.npz'
|
29 |
+
self.sample_interval = 30
|
30 |
+
|
31 |
+
|
32 |
+
self.use_cache = getattr(cfg, 'use_cache', False)
|
33 |
+
self.img_shape = None #1024, 1024) # (h, w)
|
34 |
+
self.cam_param = {}
|
35 |
+
self.use_cache=True
|
36 |
+
# load data
|
37 |
+
if self.use_cache and osp.isfile(self.annot_path_cache):
|
38 |
+
print(
|
39 |
+
f'[{self.__class__.__name__}] loading cache from {self.annot_path_cache}'
|
40 |
+
)
|
41 |
+
self.datalist = self.load_cache(self.annot_path_cache)
|
42 |
+
else:
|
43 |
+
if self.use_cache:
|
44 |
+
print(
|
45 |
+
f'[{self.__class__.__name__}] Cache not found, generating cache...'
|
46 |
+
)
|
47 |
+
self.datalist = self.load_data(train_sample_interval=getattr(
|
48 |
+
cfg, f'{self.__class__.__name__}_train_sample_interval', self.sample_interval))
|
49 |
+
if self.use_cache:
|
50 |
+
self.save_cache(self.annot_path_cache, self.datalist)
|
51 |
+
|
52 |
+
|
53 |
+
def evaluate(self, outs, cur_sample_idx):
|
54 |
+
annots = self.datalist
|
55 |
+
sample_num = len(outs)
|
56 |
+
eval_result = {
|
57 |
+
'pa_mpvpe_all': [],
|
58 |
+
'pa_mpvpe_l_hand': [],
|
59 |
+
'pa_mpvpe_r_hand': [],
|
60 |
+
'pa_mpvpe_hand': [],
|
61 |
+
'pa_mpvpe_face': [],
|
62 |
+
'mpvpe_all': [],
|
63 |
+
'mpvpe_l_hand': [],
|
64 |
+
'mpvpe_r_hand': [],
|
65 |
+
'mpvpe_hand': [],
|
66 |
+
'mpvpe_face': []
|
67 |
+
}
|
68 |
+
|
69 |
+
vis = getattr(cfg, 'vis', False)
|
70 |
+
vis_save_dir = cfg.vis_dir
|
71 |
+
csv_file = f'{cfg.result_dir}/arctic_smplx_error.csv'
|
72 |
+
file = open(csv_file, 'a', newline='')
|
73 |
+
|
74 |
+
for n in range(sample_num):
|
75 |
+
annot = annots[cur_sample_idx + n]
|
76 |
+
out = outs[n]
|
77 |
+
mesh_gt = out['smplx_mesh_cam_target']
|
78 |
+
mesh_out = out['smplx_mesh_cam']
|
79 |
+
ann_idx = out['gt_ann_idx']
|
80 |
+
img_path = []
|
81 |
+
for ann_id in ann_idx:
|
82 |
+
img_path.append(annots[ann_id]['img_path'])
|
83 |
+
eval_result['img_path'] = img_path
|
84 |
+
# MPVPE from all vertices
|
85 |
+
mesh_out_align = \
|
86 |
+
mesh_out - np.dot(
|
87 |
+
smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['pelvis'], None, :] + \
|
88 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['pelvis'], None, :]
|
89 |
+
|
90 |
+
eval_result['mpvpe_all'].append(
|
91 |
+
np.sqrt(np.sum(
|
92 |
+
(mesh_out_align - mesh_gt)**2, -1)).mean() * 1000)
|
93 |
+
mesh_out_align = rigid_align_batch(mesh_out, mesh_gt)
|
94 |
+
eval_result['pa_mpvpe_all'].append(
|
95 |
+
np.sqrt(np.sum(
|
96 |
+
(mesh_out_align - mesh_gt)**2, -1)).mean() * 1000)
|
97 |
+
|
98 |
+
# MPVPE from hand vertices
|
99 |
+
mesh_gt_lhand = mesh_gt[:, smpl_x.hand_vertex_idx['left_hand'], :]
|
100 |
+
mesh_out_lhand = mesh_out[:, smpl_x.hand_vertex_idx['left_hand'], :]
|
101 |
+
mesh_gt_rhand = mesh_gt[:, smpl_x.hand_vertex_idx['right_hand'], :]
|
102 |
+
mesh_out_rhand = mesh_out[:, smpl_x.hand_vertex_idx['right_hand'], :]
|
103 |
+
mesh_out_lhand_align = \
|
104 |
+
mesh_out_lhand - \
|
105 |
+
np.dot(smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['lwrist'], None, :] + \
|
106 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['lwrist'], None, :]
|
107 |
+
|
108 |
+
mesh_out_rhand_align = \
|
109 |
+
mesh_out_rhand - \
|
110 |
+
np.dot(smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['rwrist'], None, :] + \
|
111 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['rwrist'], None, :]
|
112 |
+
|
113 |
+
eval_result['mpvpe_l_hand'].append(
|
114 |
+
np.sqrt(np.sum(
|
115 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean() *
|
116 |
+
1000)
|
117 |
+
eval_result['mpvpe_r_hand'].append(
|
118 |
+
np.sqrt(np.sum(
|
119 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean() *
|
120 |
+
1000)
|
121 |
+
eval_result['mpvpe_hand'].append(
|
122 |
+
(np.sqrt(np.sum(
|
123 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean() *
|
124 |
+
1000 +
|
125 |
+
np.sqrt(np.sum(
|
126 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean() *
|
127 |
+
1000) / 2.)
|
128 |
+
mesh_out_lhand_align = rigid_align_batch(mesh_out_lhand, mesh_gt_lhand)
|
129 |
+
mesh_out_rhand_align = rigid_align_batch(mesh_out_rhand, mesh_gt_rhand)
|
130 |
+
eval_result['pa_mpvpe_l_hand'].append(
|
131 |
+
np.sqrt(np.sum(
|
132 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean() *
|
133 |
+
1000)
|
134 |
+
eval_result['pa_mpvpe_r_hand'].append(
|
135 |
+
np.sqrt(np.sum(
|
136 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean() *
|
137 |
+
1000)
|
138 |
+
eval_result['pa_mpvpe_hand'].append(
|
139 |
+
(np.sqrt(np.sum(
|
140 |
+
(mesh_out_lhand_align - mesh_gt_lhand)**2, -1)).mean() *
|
141 |
+
1000 +
|
142 |
+
np.sqrt(np.sum(
|
143 |
+
(mesh_out_rhand_align - mesh_gt_rhand)**2, -1)).mean() *
|
144 |
+
1000) / 2.)
|
145 |
+
|
146 |
+
# MPVPE from face vertices
|
147 |
+
mesh_gt_face = mesh_gt[:, smpl_x.face_vertex_idx, :]
|
148 |
+
mesh_out_face = mesh_out[:, smpl_x.face_vertex_idx, :]
|
149 |
+
mesh_out_face_align = \
|
150 |
+
mesh_out_face - \
|
151 |
+
np.dot(smpl_x.J_regressor, mesh_out).transpose(1,0,2)[:, smpl_x.J_regressor_idx['neck'], None, :] + \
|
152 |
+
np.dot(smpl_x.J_regressor, mesh_gt).transpose(1,0,2)[:, smpl_x.J_regressor_idx['neck'], None, :]
|
153 |
+
eval_result['mpvpe_face'].append(
|
154 |
+
np.sqrt(np.sum(
|
155 |
+
(mesh_out_face_align - mesh_gt_face)**2, -1)).mean() * 1000)
|
156 |
+
mesh_out_face_align = rigid_align_batch(mesh_out_face, mesh_gt_face)
|
157 |
+
eval_result['pa_mpvpe_face'].append(
|
158 |
+
np.sqrt(np.sum(
|
159 |
+
(mesh_out_face_align - mesh_gt_face)**2, -1)).mean() * 1000)
|
160 |
+
|
161 |
+
save_error=True
|
162 |
+
if save_error:
|
163 |
+
writer = csv.writer(file)
|
164 |
+
new_line = [ann_idx[n], img_path[n], eval_result['mpvpe_all'][-1], eval_result['pa_mpvpe_all'][-1]]
|
165 |
+
writer.writerow(new_line)
|
166 |
+
# self.save_idx += 1
|
167 |
+
return eval_result
|
168 |
+
|
169 |
+
def print_eval_result(self, eval_result):
|
170 |
+
|
171 |
+
print('======ARCTIC-val======')
|
172 |
+
print('PA MPVPE (All): %.2f mm' % np.mean(eval_result['pa_mpvpe_all']))
|
173 |
+
print('PA MPVPE (L-Hands): %.2f mm' %
|
174 |
+
np.mean(eval_result['pa_mpvpe_l_hand']))
|
175 |
+
print('PA MPVPE (R-Hands): %.2f mm' %
|
176 |
+
np.mean(eval_result['pa_mpvpe_r_hand']))
|
177 |
+
print('PA MPVPE (Hands): %.2f mm' %
|
178 |
+
np.mean(eval_result['pa_mpvpe_hand']))
|
179 |
+
print('PA MPVPE (Face): %.2f mm' %
|
180 |
+
np.mean(eval_result['pa_mpvpe_face']))
|
181 |
+
print()
|
182 |
+
|
183 |
+
print('MPVPE (All): %.2f mm' % np.mean(eval_result['mpvpe_all']))
|
184 |
+
print('MPVPE (L-Hands): %.2f mm' %
|
185 |
+
np.mean(eval_result['mpvpe_l_hand']))
|
186 |
+
print('MPVPE (R-Hands): %.2f mm' %
|
187 |
+
np.mean(eval_result['mpvpe_r_hand']))
|
188 |
+
print('MPVPE (Hands): %.2f mm' % np.mean(eval_result['mpvpe_hand']))
|
189 |
+
print('MPVPE (Face): %.2f mm' % np.mean(eval_result['mpvpe_face']))
|
190 |
+
|
191 |
+
out_file = osp.join(cfg.result_dir,'arctic_val.txt')
|
192 |
+
if os.path.exists(out_file):
|
193 |
+
f = open(out_file, 'a+')
|
194 |
+
else:
|
195 |
+
f = open(out_file, 'w', encoding="utf-8")
|
196 |
+
f.write('\n')
|
197 |
+
f.write(f'{cfg.exp_name}\n')
|
198 |
+
f.write(f'ARCTIC-val dataset: \n')
|
199 |
+
f.write('PA MPVPE (All): %.2f mm\n' %
|
200 |
+
np.mean(eval_result['pa_mpvpe_all']))
|
201 |
+
f.write('PA MPVPE (L-Hands): %.2f mm\n' %
|
202 |
+
np.mean(eval_result['pa_mpvpe_l_hand']))
|
203 |
+
f.write('PA MPVPE (R-Hands): %.2f mm\n' %
|
204 |
+
np.mean(eval_result['pa_mpvpe_r_hand']))
|
205 |
+
f.write('PA MPVPE (Hands): %.2f mm\n' %
|
206 |
+
np.mean(eval_result['pa_mpvpe_hand']))
|
207 |
+
f.write('PA MPVPE (Face): %.2f mm\n' %
|
208 |
+
np.mean(eval_result['pa_mpvpe_face']))
|
209 |
+
f.write('MPVPE (All): %.2f mm\n' % np.mean(eval_result['mpvpe_all']))
|
210 |
+
f.write('MPVPE (L-Hands): %.2f mm\n' %
|
211 |
+
np.mean(eval_result['mpvpe_l_hand']))
|
212 |
+
f.write('MPVPE (R-Hands): %.2f mm\n' %
|
213 |
+
np.mean(eval_result['mpvpe_r_hand']))
|
214 |
+
f.write('MPVPE (Hands): %.2f mm\n' % np.mean(eval_result['mpvpe_hand']))
|
215 |
+
f.write('MPVPE (Face): %.2f mm\n' % np.mean(eval_result['mpvpe_face']))
|
datasets/BEDLAM.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os.path as osp
|
2 |
+
from config.config import cfg
|
3 |
+
from humandata import HumanDataset
|
4 |
+
|
5 |
+
|
6 |
+
class BEDLAM(HumanDataset):
|
7 |
+
def __init__(self, transform, data_split):
|
8 |
+
super(BEDLAM, self).__init__(transform, data_split)
|
9 |
+
|
10 |
+
self.img_dir = './data/datasets/bedlam/train_images/'
|
11 |
+
self.annot_path = 'data/preprocessed_npz/multihuman_data/bedlam_train_multi_0915.npz'
|
12 |
+
self.annot_path_cache = 'data/preprocessed_npz/cache/bedlam_train_cache_080824.npz'
|
13 |
+
self.use_cache = getattr(cfg, 'use_cache', False)
|
14 |
+
|
15 |
+
self.img_shape = None #1024, 1024) # (h, w)
|
16 |
+
self.cam_param = {}
|
17 |
+
|
18 |
+
# load data or cache
|
19 |
+
if self.use_cache and osp.isfile(self.annot_path_cache):
|
20 |
+
print(
|
21 |
+
f'[{self.__class__.__name__}] loading cache from {self.annot_path_cache}'
|
22 |
+
)
|
23 |
+
self.datalist = self.load_cache(self.annot_path_cache)
|
24 |
+
else:
|
25 |
+
if self.use_cache:
|
26 |
+
print(
|
27 |
+
f'[{self.__class__.__name__}] Cache not found, generating cache...'
|
28 |
+
)
|
29 |
+
self.datalist = self.load_data(train_sample_interval=getattr(
|
30 |
+
cfg, f'{self.__class__.__name__}_train_sample_interval', 5))
|
31 |
+
if self.use_cache:
|
32 |
+
self.save_cache(self.annot_path_cache, self.datalist)
|