Spaces:
vztu
/
Runtime error

nanushio commited on
Commit
feb2918
1 Parent(s): 2665db2

+ [MAJOR] [ROOT] [CREATE] 1. fork repo from COVER github

Browse files
Files changed (49) hide show
  1. .gitignore +164 -0
  2. LICENSE +21 -0
  3. README copy.md +163 -0
  4. _config.yaml +25 -0
  5. cover.yml +236 -0
  6. cover/__init__.py +2 -0
  7. cover/datasets/__init__.py +3 -0
  8. cover/datasets/basic_datasets.py +812 -0
  9. cover/datasets/cover_datasets.py +442 -0
  10. cover/models/__init__.py +17 -0
  11. cover/models/backbone_get_attention.py +990 -0
  12. cover/models/backbone_v0_1.py +862 -0
  13. cover/models/clip_model.py +640 -0
  14. cover/models/clipiqa_arch.py +165 -0
  15. cover/models/constants.py +8 -0
  16. cover/models/conv_backbone.py +651 -0
  17. cover/models/evaluator.py +374 -0
  18. cover/models/head.py +101 -0
  19. cover/models/swin_backbone.py +1097 -0
  20. cover/models/xclip_backbone.py +902 -0
  21. cover/version.py +16 -0
  22. demo/video_1.mp4 +0 -0
  23. demo/video_2.mp4 +0 -0
  24. evaluate_a_set_of_videos.py +119 -0
  25. evaluate_one_dataset.py +190 -0
  26. evaluate_one_video.py +105 -0
  27. examplar_data_labels/CVD2014/labels.txt +234 -0
  28. examplar_data_labels/DIVIDE_MaxWell/train_labels.txt +0 -0
  29. examplar_data_labels/DIVIDE_MaxWell/val_labels.txt +909 -0
  30. examplar_data_labels/KoNViD/labels.txt +1200 -0
  31. examplar_data_labels/KoNiQ10k/test_labels.txt +2015 -0
  32. examplar_data_labels/KoNiQ10k/training_labels.txt +0 -0
  33. examplar_data_labels/KoNiQ10k/validation_labels.txt +1000 -0
  34. examplar_data_labels/LIVE_Qualcomm/labels.txt +208 -0
  35. examplar_data_labels/LIVE_Qualcomm/mp4labels.txt +208 -0
  36. examplar_data_labels/LIVE_VQA/labels.txt +148 -0
  37. examplar_data_labels/LIVE_VQA/names.txt +150 -0
  38. examplar_data_labels/LIVE_VQA/scores.txt +150 -0
  39. examplar_data_labels/LIVE_VQC/labels.txt +585 -0
  40. examplar_data_labels/LSVQ/labels.txt +0 -0
  41. examplar_data_labels/LSVQ/labels_1080p.txt +0 -0
  42. examplar_data_labels/LSVQ/labels_test.txt +0 -0
  43. examplar_data_labels/PIPAL/labels.txt +0 -0
  44. examplar_data_labels/PIPAL_NTIRE22/labels.txt +1650 -0
  45. examplar_data_labels/YouTubeUGC/labels.txt +1147 -0
  46. examplar_data_labels/train_labels.txt +0 -0
  47. requirements.txt +15 -0
  48. setup.py +53 -0
  49. train_one_dataset.py +616 -0
.gitignore ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
130
+
131
+ # pytype static type analyzer
132
+ .pytype/
133
+
134
+ # Cython debug symbols
135
+ cython_debug/
136
+
137
+ # PyCharm
138
+ .idea/
139
+
140
+ # VSCode
141
+ .vscode/
142
+
143
+ # JupyterLab
144
+ .jupyterlab/
145
+
146
+ # Data directories (you might want to keep them in version control, depending on your project)
147
+ #/data
148
+ #/models
149
+
150
+ # Log files
151
+ *.log
152
+
153
+ # Others
154
+ .DS_Store
155
+ .pretrained_weights/*
156
+ .datasets/
157
+ datasets/KoNViD
158
+ datasets/LIVE_VQC
159
+ datasets/YouTubeUGC
160
+ *.pth
161
+ *.swp
162
+ *.bak
163
+ *.tmp
164
+ *.temp
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Zhengzhong Tu
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README copy.md ADDED
@@ -0,0 +1,163 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # COVER
2
+
3
+ Official Code for [CVPR Workshop2024] Paper *"COVER: A Comprehensive Video Quality Evaluator"*.
4
+ Official Code, Demo, Weights for the [Comprehensive Video Quality Evaluator (COVER)].
5
+
6
+ # Todo:: update date, hugging face model below
7
+ - xx xxx, 2024: We upload weights of [COVER](https://github.com/vztu/COVER/release/Model/COVER.pth) and [COVER++](TobeContinue) to Hugging Face models.
8
+ - xx xxx, 2024: We upload Code of [COVER](https://github.com/vztu/COVER)
9
+ - 12 Apr, 2024: COVER has been accepted by CVPR Workshop2024.
10
+
11
+
12
+ # Todo:: update [visitors](link) below
13
+ ![visitors](https://visitor-badge.laobi.icu/badge?page_id=teowu/TobeContinue) [![](https://img.shields.io/github/stars/vztu/COVER)](https://github.com/vztu/COVER)
14
+ [![State-of-the-Art](https://cdn.rawgit.com/sindresorhus/awesome/d7305f38d29fed78fa85652e3a63e154dd8e8829/media/badge.svg)](https://github.com/QualityAssessment/COVER)
15
+ <a href="https://colab.research.google.com/github/taskswithcode/COVER/blob/master/TWCCOVER.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="google colab logo"></a>
16
+
17
+
18
+ # Todo:: update predicted score for YT-UGC challenge dataset specified by AIS
19
+ **COVER** Pseudo-labelled Quality scores of [YT-UGC](https://www.deepmind.com/open-source/kinetics): [CSV](https://github.com/QualityAssessment/COVER/raw/master/cover_predictions/kinetics_400_1.csv)
20
+
21
+
22
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/disentangling-aesthetic-and-technical-effects/video-quality-assessment-on-youtube-ugc)](https://paperswithcode.com/sota/video-quality-assessment-on-youtube-ugc?p=disentangling-aesthetic-and-technical-effects)
23
+
24
+
25
+ ## Introduction
26
+ # Todo:: Add Introduction here
27
+
28
+ ### the proposed COVER
29
+
30
+ *This inspires us to*
31
+
32
+ ![Fig](figs/approach.png)
33
+
34
+ ## Install
35
+
36
+ The repository can be installed via the following commands:
37
+ ```shell
38
+ git clone https://github.com/vztu/COVER
39
+ cd COVER
40
+ pip install -e .
41
+ mkdir pretrained_weights
42
+ cd pretrained_weights
43
+ wget https://github.com/vztu/COVER/release/Model/COVER.pth
44
+ cd ..
45
+ ```
46
+
47
+
48
+ ## Evaluation: Judge the Quality of Any Video
49
+
50
+ ### Try on Demos
51
+ You can run a single command to judge the quality of the demo videos in comparison with videos in VQA datasets.
52
+
53
+ ```shell
54
+ python evaluate_one_video.py -v ./demo/video_1.mp4
55
+ ```
56
+
57
+ or
58
+
59
+ ```shell
60
+ python evaluate_one_video.py -v ./demo/video_2.mp4
61
+ ```
62
+
63
+ Or choose any video you like to predict its quality:
64
+
65
+
66
+ ```shell
67
+ python evaluate_one_video.py -v $YOUR_SPECIFIED_VIDEO_PATH$
68
+ ```
69
+
70
+ ### Outputs
71
+
72
+ #### ITU-Standarized Overall Video Quality Score
73
+
74
+ The script can directly score the video's overall quality (considering all perspectives).
75
+
76
+ ```shell
77
+ python evaluate_one_video.py -v $YOUR_SPECIFIED_VIDEO_PATH$
78
+ ```
79
+
80
+ The final output score is averaged among all perspectives.
81
+
82
+
83
+ ## Evaluate on a Exsiting Video Dataset
84
+
85
+
86
+ ```shell
87
+ python evaluate_one_dataset.py -in $YOUR_SPECIFIED_DIR$ -out $OUTPUT_CSV_PATH$
88
+ ```
89
+
90
+ ## Evaluate on a Set of Unlabelled Videos
91
+
92
+
93
+ ```shell
94
+ python evaluate_a_set_of_videos.py -in $YOUR_SPECIFIED_DIR$ -out $OUTPUT_CSV_PATH$
95
+ ```
96
+
97
+ The results are stored as `.csv` files in cover_predictions in your `OUTPUT_CSV_PATH`.
98
+
99
+ Please feel free to use COVER to pseudo-label your non-quality video datasets.
100
+
101
+
102
+ ## Data Preparation
103
+
104
+ We have already converted the labels for most popular datasets you will need for Blind Video Quality Assessment,
105
+ and the download links for the **videos** are as follows:
106
+
107
+ :book: LSVQ: [Github](https://github.com/baidut/PatchVQ)
108
+
109
+ :book: KoNViD-1k: [Official Site](http://database.mmsp-kn.de/konvid-1k-database.html)
110
+
111
+ :book: LIVE-VQC: [Official Site](http://live.ece.utexas.edu/research/LIVEVQC)
112
+
113
+ :book: YouTube-UGC: [Official Site](https://media.withyoutube.com)
114
+
115
+ *(Please contact the original authors if the download links were unavailable.)*
116
+
117
+ After downloading, kindly put them under the `../datasets` or anywhere but remember to change the `data_prefix` respectively in the [config file](cover.yml).
118
+
119
+ # Training: Adapt COVER to your video quality dataset!
120
+
121
+ Now you can employ ***head-only/end-to-end transfer*** of COVER to get dataset-specific VQA prediction heads.
122
+
123
+ We still recommend **head-only** transfer. As we have evaluated in the paper, this method has very similar performance with *end-to-end transfer* (usually 1%~2% difference), but will require **much less** GPU memory, as follows:
124
+
125
+ ```shell
126
+ python transfer_learning.py -t $YOUR_SPECIFIED_DATASET_NAME$
127
+ ```
128
+
129
+ For existing public datasets, type the following commands for respective ones:
130
+
131
+ - `python transfer_learning.py -t val-kv1k` for KoNViD-1k.
132
+ - `python transfer_learning.py -t val-ytugc` for YouTube-UGC.
133
+ - `python transfer_learning.py -t val-cvd2014` for CVD2014.
134
+ - `python transfer_learning.py -t val-livevqc` for LIVE-VQC.
135
+
136
+
137
+ As the backbone will not be updated here, the checkpoint saving process will only save the regression heads with only `398KB` file size (compared with `200+MB` size of the full model). To use it, simply replace the head weights with the official weights [COVER.pth](https://github.com/vztu/COVER/release/Model/COVER.pth).
138
+
139
+ We also support ***end-to-end*** fine-tune right now (by modifying the `num_epochs: 0` to `num_epochs: 15` in `./cover.yml`). It will require more memory cost and more storage cost for the weights (with full parameters) saved, but will result in optimal accuracy.
140
+
141
+ Fine-tuning curves by authors can be found here: [Official Curves](https://wandb.ai/timothyhwu/COVER) for reference.
142
+
143
+
144
+ ## Visualization
145
+
146
+ ### WandB Training and Evaluation Curves
147
+
148
+ You can be monitoring your results on WandB!
149
+
150
+ ## Acknowledgement
151
+
152
+ Thanks for every participant of the subjective studies!
153
+
154
+ ## Citation
155
+
156
+ Should you find our work interesting and would like to cite it, please feel free to add these in your references!
157
+
158
+
159
+ # Todo, add bibtex of cover below
160
+ ```bibtex
161
+ %cover
162
+
163
+ ```
_config.yaml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ theme: minima
2
+
3
+
4
+ encoding: "utf-8"
5
+ markdown_ext: "markdown,mkdown,mkdn,mkd,md"
6
+
7
+
8
+ # Conversion
9
+ markdown: kramdown
10
+ highlighter: rouge
11
+ lsi: false
12
+ excerpt_separator: "\n\n"
13
+ incremental: false
14
+
15
+
16
+ # Markdown Processing
17
+ kramdown:
18
+ input: GFM
19
+ hard_wrap: false
20
+ auto_ids: true
21
+ footnote_nr: 1
22
+ entity_output: as_char
23
+ toc_levels: 1..6
24
+ smart_quotes: lsquo,rsquo,ldquo,rdquo
25
+ enable_coderay: false
cover.yml ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: COVER
2
+ num_epochs: 0
3
+ l_num_epochs: 10
4
+ warmup_epochs: 2.5
5
+ ema: true
6
+ save_model: true
7
+ batch_size: 8
8
+ num_workers: 6
9
+ split_seed: 42
10
+
11
+ wandb:
12
+ project_name: COVER
13
+
14
+ data:
15
+ val-livevqc:
16
+ type: ViewDecompositionDataset
17
+ args:
18
+ weight: 0.598
19
+ phase: test
20
+ anno_file: ./examplar_data_labels/LIVE_VQC/labels.txt
21
+ data_prefix: ./datasets/LIVE_VQC/ # revert before submit
22
+ sample_types:
23
+ semantic:
24
+ size_h: 512
25
+ size_w: 512
26
+ clip_len: 20
27
+ frame_interval: 2
28
+ t_frag: 20
29
+ num_clips: 1
30
+ technical:
31
+ fragments_h: 7
32
+ fragments_w: 7
33
+ fsize_h: 32
34
+ fsize_w: 32
35
+ aligned: 40
36
+ clip_len: 40
37
+ t_frag: 20
38
+ frame_interval: 2
39
+ num_clips: 1
40
+ aesthetic:
41
+ size_h: 224
42
+ size_w: 224
43
+ clip_len: 40
44
+ frame_interval: 2
45
+ t_frag: 20
46
+ num_clips: 1
47
+
48
+ val-kv1k:
49
+ type: ViewDecompositionDataset
50
+ args:
51
+ weight: 0.540
52
+ phase: test
53
+ anno_file: ./examplar_data_labels/KoNViD/labels.txt
54
+ data_prefix: ./datasets/KoNViD/ # revert before submit
55
+ sample_types:
56
+ semantic:
57
+ size_h: 512
58
+ size_w: 512
59
+ clip_len: 20
60
+ frame_interval: 2
61
+ t_frag: 20
62
+ num_clips: 1
63
+ technical:
64
+ fragments_h: 7
65
+ fragments_w: 7
66
+ fsize_h: 32
67
+ fsize_w: 32
68
+ aligned: 40
69
+ clip_len: 40
70
+ t_frag: 20
71
+ frame_interval: 2
72
+ num_clips: 1
73
+ aesthetic:
74
+ size_h: 224
75
+ size_w: 224
76
+ clip_len: 40
77
+ frame_interval: 2
78
+ t_frag: 20
79
+ num_clips: 1
80
+
81
+ val-ltest:
82
+ type: ViewDecompositionDataset
83
+ args:
84
+ weight: 0.603
85
+ phase: test
86
+ anno_file: ./examplar_data_labels/LSVQ/labels_test.txt
87
+ data_prefix: ./datasets/LSVQ/ # revert before submit
88
+ sample_types:
89
+ semantic:
90
+ size_h: 512
91
+ size_w: 512
92
+ clip_len: 20
93
+ frame_interval: 2
94
+ t_frag: 20
95
+ num_clips: 1
96
+ technical:
97
+ fragments_h: 7
98
+ fragments_w: 7
99
+ fsize_h: 32
100
+ fsize_w: 32
101
+ aligned: 40
102
+ clip_len: 40
103
+ t_frag: 20
104
+ frame_interval: 2
105
+ num_clips: 1
106
+ aesthetic:
107
+ size_h: 224
108
+ size_w: 224
109
+ clip_len: 40
110
+ frame_interval: 2
111
+ t_frag: 20
112
+ num_clips: 1
113
+
114
+ val-l1080p:
115
+ type: ViewDecompositionDataset
116
+ args:
117
+ weight: 0.620
118
+ phase: test
119
+ anno_file: ./examplar_data_labels/LSVQ/labels_1080p.txt
120
+ data_prefix: ./datasets/LSVQ/ # revert before submit
121
+ sample_types:
122
+ semantic:
123
+ size_h: 512
124
+ size_w: 512
125
+ clip_len: 20
126
+ frame_interval: 2
127
+ t_frag: 20
128
+ num_clips: 1
129
+ technical:
130
+ fragments_h: 7
131
+ fragments_w: 7
132
+ fsize_h: 32
133
+ fsize_w: 32
134
+ aligned: 40
135
+ clip_len: 40
136
+ t_frag: 20
137
+ frame_interval: 2
138
+ num_clips: 1
139
+ aesthetic:
140
+ size_h: 224
141
+ size_w: 224
142
+ clip_len: 40
143
+ frame_interval: 2
144
+ t_frag: 20
145
+ num_clips: 1
146
+
147
+ val-cvd2014:
148
+ type: ViewDecompositionDataset
149
+ args:
150
+ weight: 0.576
151
+ phase: test
152
+ anno_file: ./examplar_data_labels/CVD2014/labels.txt
153
+ data_prefix: ./datasets/CVD2014/ # revert before submit
154
+ sample_types:
155
+ semantic:
156
+ size_h: 512
157
+ size_w: 512
158
+ clip_len: 20
159
+ frame_interval: 2
160
+ t_frag: 20
161
+ num_clips: 1
162
+ technical:
163
+ fragments_h: 7
164
+ fragments_w: 7
165
+ fsize_h: 32
166
+ fsize_w: 32
167
+ aligned: 40
168
+ clip_len: 40
169
+ t_frag: 20
170
+ frame_interval: 2
171
+ num_clips: 1
172
+ aesthetic:
173
+ size_h: 224
174
+ size_w: 224
175
+ clip_len: 40
176
+ frame_interval: 2
177
+ t_frag: 20
178
+ num_clips: 1
179
+
180
+ val-ytugc:
181
+ type: ViewDecompositionDataset
182
+ args:
183
+ weight: 0.443
184
+ phase: test
185
+ anno_file: ./examplar_data_labels/YouTubeUGC/labels.txt
186
+ data_prefix: ./dataset/YouTubeUGC/ # revert before submit
187
+ sample_types:
188
+ semantic:
189
+ size_h: 512
190
+ size_w: 512
191
+ clip_len: 20
192
+ frame_interval: 2
193
+ t_frag: 20
194
+ num_clips: 1
195
+ technical:
196
+ fragments_h: 7
197
+ fragments_w: 7
198
+ fsize_h: 32
199
+ fsize_w: 32
200
+ aligned: 40
201
+ clip_len: 40
202
+ t_frag: 20
203
+ frame_interval: 2
204
+ num_clips: 1
205
+ aesthetic:
206
+ size_h: 224
207
+ size_w: 224
208
+ clip_len: 40
209
+ frame_interval: 2
210
+ t_frag: 20
211
+ num_clips: 1
212
+
213
+ model:
214
+ type: COVER
215
+ args:
216
+ backbone:
217
+ technical:
218
+ type: swin_tiny_grpb
219
+ checkpoint: true
220
+ pretrained:
221
+ aesthetic:
222
+ type: conv_tiny
223
+ semantic:
224
+ type: clip_iqa+
225
+ backbone_preserve_keys: technical,aesthetic,semantic
226
+ divide_head: true
227
+ vqa_head:
228
+ in_channels: 768
229
+ hidden_channels: 64
230
+
231
+ optimizer:
232
+ lr: !!float 1e-3
233
+ backbone_lr_mult: !!float 1e-1
234
+ wd: 0.05
235
+
236
+ test_load_path: ./pretrained_weights/COVER.pth # revert before submit
cover/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .datasets import *
2
+ from .models import *
cover/datasets/__init__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ ## API for COVER and its variants
2
+ from .basic_datasets import *
3
+ from .cover_datasets import *
cover/datasets/basic_datasets.py ADDED
@@ -0,0 +1,812 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os.path as osp
2
+ import random
3
+
4
+ import cv2
5
+ import decord
6
+ import numpy as np
7
+ import skvideo.io
8
+ import torch
9
+ import torchvision
10
+ from decord import VideoReader, cpu, gpu
11
+ from tqdm import tqdm
12
+
13
+ random.seed(42)
14
+
15
+ decord.bridge.set_bridge("torch")
16
+
17
+
18
+ def get_spatial_fragments(
19
+ video,
20
+ fragments_h=7,
21
+ fragments_w=7,
22
+ fsize_h=32,
23
+ fsize_w=32,
24
+ aligned=32,
25
+ nfrags=1,
26
+ random=False,
27
+ fallback_type="upsample",
28
+ ):
29
+ size_h = fragments_h * fsize_h
30
+ size_w = fragments_w * fsize_w
31
+
32
+ ## situation for images
33
+ if video.shape[1] == 1:
34
+ aligned = 1
35
+
36
+ dur_t, res_h, res_w = video.shape[-3:]
37
+ ratio = min(res_h / size_h, res_w / size_w)
38
+ if fallback_type == "upsample" and ratio < 1:
39
+
40
+ ovideo = video
41
+ video = torch.nn.functional.interpolate(
42
+ video / 255.0, scale_factor=1 / ratio, mode="bilinear"
43
+ )
44
+ video = (video * 255.0).type_as(ovideo)
45
+
46
+ assert dur_t % aligned == 0, "Please provide match vclip and align index"
47
+ size = size_h, size_w
48
+
49
+ ## make sure that sampling will not run out of the picture
50
+ hgrids = torch.LongTensor(
51
+ [min(res_h // fragments_h * i, res_h - fsize_h) for i in range(fragments_h)]
52
+ )
53
+ wgrids = torch.LongTensor(
54
+ [min(res_w // fragments_w * i, res_w - fsize_w) for i in range(fragments_w)]
55
+ )
56
+ hlength, wlength = res_h // fragments_h, res_w // fragments_w
57
+
58
+ if random:
59
+ print("This part is deprecated. Please remind that.")
60
+ if res_h > fsize_h:
61
+ rnd_h = torch.randint(
62
+ res_h - fsize_h, (len(hgrids), len(wgrids), dur_t // aligned)
63
+ )
64
+ else:
65
+ rnd_h = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
66
+ if res_w > fsize_w:
67
+ rnd_w = torch.randint(
68
+ res_w - fsize_w, (len(hgrids), len(wgrids), dur_t // aligned)
69
+ )
70
+ else:
71
+ rnd_w = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
72
+ else:
73
+ if hlength > fsize_h:
74
+ rnd_h = torch.randint(
75
+ hlength - fsize_h, (len(hgrids), len(wgrids), dur_t // aligned)
76
+ )
77
+ else:
78
+ rnd_h = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
79
+ if wlength > fsize_w:
80
+ rnd_w = torch.randint(
81
+ wlength - fsize_w, (len(hgrids), len(wgrids), dur_t // aligned)
82
+ )
83
+ else:
84
+ rnd_w = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
85
+
86
+ target_video = torch.zeros(video.shape[:-2] + size).to(video.device)
87
+ # target_videos = []
88
+
89
+ for i, hs in enumerate(hgrids):
90
+ for j, ws in enumerate(wgrids):
91
+ for t in range(dur_t // aligned):
92
+ t_s, t_e = t * aligned, (t + 1) * aligned
93
+ h_s, h_e = i * fsize_h, (i + 1) * fsize_h
94
+ w_s, w_e = j * fsize_w, (j + 1) * fsize_w
95
+ if random:
96
+ h_so, h_eo = rnd_h[i][j][t], rnd_h[i][j][t] + fsize_h
97
+ w_so, w_eo = rnd_w[i][j][t], rnd_w[i][j][t] + fsize_w
98
+ else:
99
+ h_so, h_eo = hs + rnd_h[i][j][t], hs + rnd_h[i][j][t] + fsize_h
100
+ w_so, w_eo = ws + rnd_w[i][j][t], ws + rnd_w[i][j][t] + fsize_w
101
+ target_video[:, t_s:t_e, h_s:h_e, w_s:w_e] = video[
102
+ :, t_s:t_e, h_so:h_eo, w_so:w_eo
103
+ ]
104
+ # target_videos.append(video[:,t_s:t_e,h_so:h_eo,w_so:w_eo])
105
+ # target_video = torch.stack(target_videos, 0).reshape((dur_t // aligned, fragments, fragments,) + target_videos[0].shape).permute(3,0,4,1,5,2,6)
106
+ # target_video = target_video.reshape((-1, dur_t,) + size) ## Splicing Fragments
107
+ return target_video
108
+
109
+
110
+ class FragmentSampleFrames:
111
+ def __init__(self, fsize_t, fragments_t, frame_interval=1, num_clips=1):
112
+
113
+ self.fragments_t = fragments_t
114
+ self.fsize_t = fsize_t
115
+ self.size_t = fragments_t * fsize_t
116
+ self.frame_interval = frame_interval
117
+ self.num_clips = num_clips
118
+
119
+ def get_frame_indices(self, num_frames):
120
+
121
+ tgrids = np.array(
122
+ [num_frames // self.fragments_t * i for i in range(self.fragments_t)],
123
+ dtype=np.int32,
124
+ )
125
+ tlength = num_frames // self.fragments_t
126
+
127
+ if tlength > self.fsize_t * self.frame_interval:
128
+ rnd_t = np.random.randint(
129
+ 0, tlength - self.fsize_t * self.frame_interval, size=len(tgrids)
130
+ )
131
+ else:
132
+ rnd_t = np.zeros(len(tgrids), dtype=np.int32)
133
+
134
+ ranges_t = (
135
+ np.arange(self.fsize_t)[None, :] * self.frame_interval
136
+ + rnd_t[:, None]
137
+ + tgrids[:, None]
138
+ )
139
+ return np.concatenate(ranges_t)
140
+
141
+ def __call__(self, total_frames, train=False, start_index=0):
142
+ frame_inds = []
143
+ for i in range(self.num_clips):
144
+ frame_inds += [self.get_frame_indices(total_frames)]
145
+ frame_inds = np.concatenate(frame_inds)
146
+ frame_inds = np.mod(frame_inds + start_index, total_frames)
147
+ return frame_inds
148
+
149
+
150
+ class SampleFrames:
151
+ def __init__(self, clip_len, frame_interval=1, num_clips=1):
152
+
153
+ self.clip_len = clip_len
154
+ self.frame_interval = frame_interval
155
+ self.num_clips = num_clips
156
+
157
+ def _get_train_clips(self, num_frames):
158
+ """Get clip offsets in train mode.
159
+
160
+ It will calculate the average interval for selected frames,
161
+ and randomly shift them within offsets between [0, avg_interval].
162
+ If the total number of frames is smaller than clips num or origin
163
+ frames length, it will return all zero indices.
164
+
165
+ Args:
166
+ num_frames (int): Total number of frame in the video.
167
+
168
+ Returns:
169
+ np.ndarray: Sampled frame indices in train mode.
170
+ """
171
+ ori_clip_len = self.clip_len * self.frame_interval
172
+ avg_interval = (num_frames - ori_clip_len + 1) // self.num_clips
173
+
174
+ if avg_interval > 0:
175
+ base_offsets = np.arange(self.num_clips) * avg_interval
176
+ clip_offsets = base_offsets + np.random.randint(
177
+ avg_interval, size=self.num_clips
178
+ )
179
+ elif num_frames > max(self.num_clips, ori_clip_len):
180
+ clip_offsets = np.sort(
181
+ np.random.randint(num_frames - ori_clip_len + 1, size=self.num_clips)
182
+ )
183
+ elif avg_interval == 0:
184
+ ratio = (num_frames - ori_clip_len + 1.0) / self.num_clips
185
+ clip_offsets = np.around(np.arange(self.num_clips) * ratio)
186
+ else:
187
+ clip_offsets = np.zeros((self.num_clips,), dtype=np.int)
188
+ return clip_offsets
189
+
190
+ def _get_test_clips(self, num_frames, start_index=0):
191
+ """Get clip offsets in test mode.
192
+
193
+ Calculate the average interval for selected frames, and shift them
194
+ fixedly by avg_interval/2.
195
+
196
+ Args:
197
+ num_frames (int): Total number of frame in the video.
198
+
199
+ Returns:
200
+ np.ndarray: Sampled frame indices in test mode.
201
+ """
202
+ ori_clip_len = self.clip_len * self.frame_interval
203
+ avg_interval = (num_frames - ori_clip_len + 1) / float(self.num_clips)
204
+ if num_frames > ori_clip_len - 1:
205
+ base_offsets = np.arange(self.num_clips) * avg_interval
206
+ clip_offsets = (base_offsets + avg_interval / 2.0).astype(np.int32)
207
+ else:
208
+ clip_offsets = np.zeros((self.num_clips,), dtype=np.int32)
209
+ return clip_offsets
210
+
211
+ def __call__(self, total_frames, train=False, start_index=0):
212
+ """Perform the SampleFrames loading.
213
+
214
+ Args:
215
+ results (dict): The resulting dict to be modified and passed
216
+ to the next transform in pipeline.
217
+ """
218
+ if train:
219
+ clip_offsets = self._get_train_clips(total_frames)
220
+ else:
221
+ clip_offsets = self._get_test_clips(total_frames)
222
+ frame_inds = (
223
+ clip_offsets[:, None]
224
+ + np.arange(self.clip_len)[None, :] * self.frame_interval
225
+ )
226
+ frame_inds = np.concatenate(frame_inds)
227
+
228
+ frame_inds = frame_inds.reshape((-1, self.clip_len))
229
+ frame_inds = np.mod(frame_inds, total_frames)
230
+ frame_inds = np.concatenate(frame_inds) + start_index
231
+ return frame_inds.astype(np.int32)
232
+
233
+
234
+ class FastVQAPlusPlusDataset(torch.utils.data.Dataset):
235
+ def __init__(
236
+ self,
237
+ ann_file,
238
+ data_prefix,
239
+ frame_interval=2,
240
+ aligned=32,
241
+ fragments=(8, 8, 8),
242
+ fsize=(4, 32, 32),
243
+ num_clips=1,
244
+ nfrags=1,
245
+ cache_in_memory=False,
246
+ phase="test",
247
+ fallback_type="oversample",
248
+ ):
249
+ """
250
+ Fragments.
251
+ args:
252
+ fragments: G_f as in the paper.
253
+ fsize: S_f as in the paper.
254
+ nfrags: number of samples (spatially) as in the paper.
255
+ num_clips: number of samples (temporally) as in the paper.
256
+ """
257
+ self.ann_file = ann_file
258
+ self.data_prefix = data_prefix
259
+ self.frame_interval = frame_interval
260
+ self.num_clips = num_clips
261
+ self.fragments = fragments
262
+ self.fsize = fsize
263
+ self.nfrags = nfrags
264
+ self.clip_len = fragments[0] * fsize[0]
265
+ self.aligned = aligned
266
+ self.fallback_type = fallback_type
267
+ self.sampler = FragmentSampleFrames(
268
+ fsize[0], fragments[0], frame_interval, num_clips
269
+ )
270
+ self.video_infos = []
271
+ self.phase = phase
272
+ self.mean = torch.FloatTensor([123.675, 116.28, 103.53])
273
+ self.std = torch.FloatTensor([58.395, 57.12, 57.375])
274
+ if isinstance(self.ann_file, list):
275
+ self.video_infos = self.ann_file
276
+ else:
277
+ with open(self.ann_file, "r") as fin:
278
+ for line in fin:
279
+ line_split = line.strip().split(",")
280
+ filename, _, _, label = line_split
281
+ label = float(label)
282
+ filename = osp.join(self.data_prefix, filename)
283
+ self.video_infos.append(dict(filename=filename, label=label))
284
+ if cache_in_memory:
285
+ self.cache = {}
286
+ for i in tqdm(range(len(self)), desc="Caching fragments"):
287
+ self.cache[i] = self.__getitem__(i, tocache=True)
288
+ else:
289
+ self.cache = None
290
+
291
+ def __getitem__(
292
+ self, index, tocache=False, need_original_frames=False,
293
+ ):
294
+ if tocache or self.cache is None:
295
+ fx, fy = self.fragments[1:]
296
+ fsx, fsy = self.fsize[1:]
297
+ video_info = self.video_infos[index]
298
+ filename = video_info["filename"]
299
+ label = video_info["label"]
300
+ if filename.endswith(".yuv"):
301
+ video = skvideo.io.vread(
302
+ filename, 1080, 1920, inputdict={"-pix_fmt": "yuvj420p"}
303
+ )
304
+ frame_inds = self.sampler(video.shape[0], self.phase == "train")
305
+ imgs = [torch.from_numpy(video[idx]) for idx in frame_inds]
306
+ else:
307
+ vreader = VideoReader(filename)
308
+ frame_inds = self.sampler(len(vreader), self.phase == "train")
309
+ frame_dict = {idx: vreader[idx] for idx in np.unique(frame_inds)}
310
+ imgs = [frame_dict[idx] for idx in frame_inds]
311
+ img_shape = imgs[0].shape
312
+ video = torch.stack(imgs, 0)
313
+ video = video.permute(3, 0, 1, 2)
314
+ if self.nfrags == 1:
315
+ vfrag = get_spatial_fragments(
316
+ video,
317
+ fx,
318
+ fy,
319
+ fsx,
320
+ fsy,
321
+ aligned=self.aligned,
322
+ fallback_type=self.fallback_type,
323
+ )
324
+ else:
325
+ vfrag = get_spatial_fragments(
326
+ video,
327
+ fx,
328
+ fy,
329
+ fsx,
330
+ fsy,
331
+ aligned=self.aligned,
332
+ fallback_type=self.fallback_type,
333
+ )
334
+ for i in range(1, self.nfrags):
335
+ vfrag = torch.cat(
336
+ (
337
+ vfrag,
338
+ get_spatial_fragments(
339
+ video,
340
+ fragments,
341
+ fx,
342
+ fy,
343
+ fsx,
344
+ fsy,
345
+ aligned=self.aligned,
346
+ fallback_type=self.fallback_type,
347
+ ),
348
+ ),
349
+ 1,
350
+ )
351
+ if tocache:
352
+ return (vfrag, frame_inds, label, img_shape)
353
+ else:
354
+ vfrag, frame_inds, label, img_shape = self.cache[index]
355
+ vfrag = ((vfrag.permute(1, 2, 3, 0) - self.mean) / self.std).permute(3, 0, 1, 2)
356
+ data = {
357
+ "video": vfrag.reshape(
358
+ (-1, self.nfrags * self.num_clips, self.clip_len) + vfrag.shape[2:]
359
+ ).transpose(
360
+ 0, 1
361
+ ), # B, V, T, C, H, W
362
+ "frame_inds": frame_inds,
363
+ "gt_label": label,
364
+ "original_shape": img_shape,
365
+ }
366
+ if need_original_frames:
367
+ data["original_video"] = video.reshape(
368
+ (-1, self.nfrags * self.num_clips, self.clip_len) + video.shape[2:]
369
+ ).transpose(0, 1)
370
+ return data
371
+
372
+ def __len__(self):
373
+ return len(self.video_infos)
374
+
375
+
376
+ class FragmentVideoDataset(torch.utils.data.Dataset):
377
+ def __init__(
378
+ self,
379
+ ann_file,
380
+ data_prefix,
381
+ clip_len=32,
382
+ frame_interval=2,
383
+ num_clips=4,
384
+ aligned=32,
385
+ fragments=7,
386
+ fsize=32,
387
+ nfrags=1,
388
+ cache_in_memory=False,
389
+ phase="test",
390
+ ):
391
+ """
392
+ Fragments.
393
+ args:
394
+ fragments: G_f as in the paper.
395
+ fsize: S_f as in the paper.
396
+ nfrags: number of samples as in the paper.
397
+ """
398
+ self.ann_file = ann_file
399
+ self.data_prefix = data_prefix
400
+ self.clip_len = clip_len
401
+ self.frame_interval = frame_interval
402
+ self.num_clips = num_clips
403
+ self.fragments = fragments
404
+ self.fsize = fsize
405
+ self.nfrags = nfrags
406
+ self.aligned = aligned
407
+ self.sampler = SampleFrames(clip_len, frame_interval, num_clips)
408
+ self.video_infos = []
409
+ self.phase = phase
410
+ self.mean = torch.FloatTensor([123.675, 116.28, 103.53])
411
+ self.std = torch.FloatTensor([58.395, 57.12, 57.375])
412
+ if isinstance(self.ann_file, list):
413
+ self.video_infos = self.ann_file
414
+ else:
415
+ with open(self.ann_file, "r") as fin:
416
+ for line in fin:
417
+ line_split = line.strip().split(",")
418
+ filename, _, _, label = line_split
419
+ label = float(label)
420
+ filename = osp.join(self.data_prefix, filename)
421
+ self.video_infos.append(dict(filename=filename, label=label))
422
+ if cache_in_memory:
423
+ self.cache = {}
424
+ for i in tqdm(range(len(self)), desc="Caching fragments"):
425
+ self.cache[i] = self.__getitem__(i, tocache=True)
426
+ else:
427
+ self.cache = None
428
+
429
+ def __getitem__(
430
+ self, index, fragments=-1, fsize=-1, tocache=False, need_original_frames=False,
431
+ ):
432
+ if tocache or self.cache is None:
433
+ if fragments == -1:
434
+ fragments = self.fragments
435
+ if fsize == -1:
436
+ fsize = self.fsize
437
+ video_info = self.video_infos[index]
438
+ filename = video_info["filename"]
439
+ label = video_info["label"]
440
+ if filename.endswith(".yuv"):
441
+ video = skvideo.io.vread(
442
+ filename, 1080, 1920, inputdict={"-pix_fmt": "yuvj420p"}
443
+ )
444
+ frame_inds = self.sampler(video.shape[0], self.phase == "train")
445
+ imgs = [torch.from_numpy(video[idx]) for idx in frame_inds]
446
+ else:
447
+ vreader = VideoReader(filename)
448
+ frame_inds = self.sampler(len(vreader), self.phase == "train")
449
+ frame_dict = {idx: vreader[idx] for idx in np.unique(frame_inds)}
450
+ imgs = [frame_dict[idx] for idx in frame_inds]
451
+ img_shape = imgs[0].shape
452
+ video = torch.stack(imgs, 0)
453
+ video = video.permute(3, 0, 1, 2)
454
+ if self.nfrags == 1:
455
+ vfrag = get_spatial_fragments(
456
+ video, fragments, fragments, fsize, fsize, aligned=self.aligned
457
+ )
458
+ else:
459
+ vfrag = get_spatial_fragments(
460
+ video, fragments, fragments, fsize, fsize, aligned=self.aligned
461
+ )
462
+ for i in range(1, self.nfrags):
463
+ vfrag = torch.cat(
464
+ (
465
+ vfrag,
466
+ get_spatial_fragments(
467
+ video,
468
+ fragments,
469
+ fragments,
470
+ fsize,
471
+ fsize,
472
+ aligned=self.aligned,
473
+ ),
474
+ ),
475
+ 1,
476
+ )
477
+ if tocache:
478
+ return (vfrag, frame_inds, label, img_shape)
479
+ else:
480
+ vfrag, frame_inds, label, img_shape = self.cache[index]
481
+ vfrag = ((vfrag.permute(1, 2, 3, 0) - self.mean) / self.std).permute(3, 0, 1, 2)
482
+ data = {
483
+ "video": vfrag.reshape(
484
+ (-1, self.nfrags * self.num_clips, self.clip_len) + vfrag.shape[2:]
485
+ ).transpose(
486
+ 0, 1
487
+ ), # B, V, T, C, H, W
488
+ "frame_inds": frame_inds,
489
+ "gt_label": label,
490
+ "original_shape": img_shape,
491
+ }
492
+ if need_original_frames:
493
+ data["original_video"] = video.reshape(
494
+ (-1, self.nfrags * self.num_clips, self.clip_len) + video.shape[2:]
495
+ ).transpose(0, 1)
496
+ return data
497
+
498
+ def __len__(self):
499
+ return len(self.video_infos)
500
+
501
+
502
+ class ResizedVideoDataset(torch.utils.data.Dataset):
503
+ def __init__(
504
+ self,
505
+ ann_file,
506
+ data_prefix,
507
+ clip_len=32,
508
+ frame_interval=2,
509
+ num_clips=4,
510
+ aligned=32,
511
+ size=224,
512
+ cache_in_memory=False,
513
+ phase="test",
514
+ ):
515
+ """
516
+ Using resizing.
517
+ """
518
+ self.ann_file = ann_file
519
+ self.data_prefix = data_prefix
520
+ self.clip_len = clip_len
521
+ self.frame_interval = frame_interval
522
+ self.num_clips = num_clips
523
+ self.size = size
524
+ self.aligned = aligned
525
+ self.sampler = SampleFrames(clip_len, frame_interval, num_clips)
526
+ self.video_infos = []
527
+ self.phase = phase
528
+ self.mean = torch.FloatTensor([123.675, 116.28, 103.53])
529
+ self.std = torch.FloatTensor([58.395, 57.12, 57.375])
530
+ if isinstance(self.ann_file, list):
531
+ self.video_infos = self.ann_file
532
+ else:
533
+ with open(self.ann_file, "r") as fin:
534
+ for line in fin:
535
+ line_split = line.strip().split(",")
536
+ filename, _, _, label = line_split
537
+ label = float(label)
538
+ filename = osp.join(self.data_prefix, filename)
539
+ self.video_infos.append(dict(filename=filename, label=label))
540
+ if cache_in_memory:
541
+ self.cache = {}
542
+ for i in tqdm(range(len(self)), desc="Caching resized videos"):
543
+ self.cache[i] = self.__getitem__(i, tocache=True)
544
+ else:
545
+ self.cache = None
546
+
547
+ def __getitem__(self, index, tocache=False, need_original_frames=False):
548
+ if tocache or self.cache is None:
549
+ video_info = self.video_infos[index]
550
+ filename = video_info["filename"]
551
+ label = video_info["label"]
552
+ vreader = VideoReader(filename)
553
+ frame_inds = self.sampler(len(vreader), self.phase == "train")
554
+ frame_dict = {idx: vreader[idx] for idx in np.unique(frame_inds)}
555
+ imgs = [frame_dict[idx] for idx in frame_inds]
556
+ img_shape = imgs[0].shape
557
+ video = torch.stack(imgs, 0)
558
+ video = video.permute(3, 0, 1, 2)
559
+ video = torch.nn.functional.interpolate(video, size=(self.size, self.size))
560
+ if tocache:
561
+ return (vfrag, frame_inds, label, img_shape)
562
+ else:
563
+ vfrag, frame_inds, label, img_shape = self.cache[index]
564
+ vfrag = ((vfrag.permute(1, 2, 3, 0) - self.mean) / self.std).permute(3, 0, 1, 2)
565
+ data = {
566
+ "video": vfrag.reshape(
567
+ (-1, self.num_clips, self.clip_len) + vfrag.shape[2:]
568
+ ).transpose(
569
+ 0, 1
570
+ ), # B, V, T, C, H, W
571
+ "frame_inds": frame_inds,
572
+ "gt_label": label,
573
+ "original_shape": img_shape,
574
+ }
575
+ if need_original_frames:
576
+ data["original_video"] = video.reshape(
577
+ (-1, self.nfrags * self.num_clips, self.clip_len) + video.shape[2:]
578
+ ).transpose(0, 1)
579
+ return data
580
+
581
+ def __len__(self):
582
+ return len(self.video_infos)
583
+
584
+
585
+ class CroppedVideoDataset(FragmentVideoDataset):
586
+ def __init__(
587
+ self,
588
+ ann_file,
589
+ data_prefix,
590
+ clip_len=32,
591
+ frame_interval=2,
592
+ num_clips=4,
593
+ aligned=32,
594
+ size=224,
595
+ ncrops=1,
596
+ cache_in_memory=False,
597
+ phase="test",
598
+ ):
599
+
600
+ """
601
+ Regard Cropping as a special case for Fragments in Grid 1*1.
602
+ """
603
+ super().__init__(
604
+ ann_file,
605
+ data_prefix,
606
+ clip_len=clip_len,
607
+ frame_interval=frame_interval,
608
+ num_clips=num_clips,
609
+ aligned=aligned,
610
+ fragments=1,
611
+ fsize=224,
612
+ nfrags=ncrops,
613
+ cache_in_memory=cache_in_memory,
614
+ phase=phase,
615
+ )
616
+
617
+
618
+ class FragmentImageDataset(torch.utils.data.Dataset):
619
+ def __init__(
620
+ self,
621
+ ann_file,
622
+ data_prefix,
623
+ fragments=7,
624
+ fsize=32,
625
+ nfrags=1,
626
+ cache_in_memory=False,
627
+ phase="test",
628
+ ):
629
+ self.ann_file = ann_file
630
+ self.data_prefix = data_prefix
631
+ self.fragments = fragments
632
+ self.fsize = fsize
633
+ self.nfrags = nfrags
634
+ self.image_infos = []
635
+ self.phase = phase
636
+ self.mean = torch.FloatTensor([123.675, 116.28, 103.53])
637
+ self.std = torch.FloatTensor([58.395, 57.12, 57.375])
638
+ if isinstance(self.ann_file, list):
639
+ self.image_infos = self.ann_file
640
+ else:
641
+ with open(self.ann_file, "r") as fin:
642
+ for line in fin:
643
+ line_split = line.strip().split(",")
644
+ filename, _, _, label = line_split
645
+ label = float(label)
646
+ filename = osp.join(self.data_prefix, filename)
647
+ self.image_infos.append(dict(filename=filename, label=label))
648
+ if cache_in_memory:
649
+ self.cache = {}
650
+ for i in tqdm(range(len(self)), desc="Caching fragments"):
651
+ self.cache[i] = self.__getitem__(i, tocache=True)
652
+ else:
653
+ self.cache = None
654
+
655
+ def __getitem__(
656
+ self, index, fragments=-1, fsize=-1, tocache=False, need_original_frames=False
657
+ ):
658
+ if tocache or self.cache is None:
659
+ if fragments == -1:
660
+ fragments = self.fragments
661
+ if fsize == -1:
662
+ fsize = self.fsize
663
+ image_info = self.image_infos[index]
664
+ filename = image_info["filename"]
665
+ label = image_info["label"]
666
+ try:
667
+ img = torchvision.io.read_image(filename)
668
+ except:
669
+ img = cv2.imread(filename)
670
+ img = torch.from_numpy(img[:, :, [2, 1, 0]]).permute(2, 0, 1)
671
+ img_shape = img.shape[1:]
672
+ image = img.unsqueeze(1)
673
+ if self.nfrags == 1:
674
+ ifrag = get_spatial_fragments(image, fragments, fragments, fsize, fsize)
675
+ else:
676
+ ifrag = get_spatial_fragments(image, fragments, fragments, fsize, fsize)
677
+ for i in range(1, self.nfrags):
678
+ ifrag = torch.cat(
679
+ (
680
+ ifrag,
681
+ get_spatial_fragments(
682
+ image, fragments, fragments, fsize, fsize
683
+ ),
684
+ ),
685
+ 1,
686
+ )
687
+ if tocache:
688
+ return (ifrag, label, img_shape)
689
+ else:
690
+ ifrag, label, img_shape = self.cache[index]
691
+ if self.nfrags == 1:
692
+ ifrag = (
693
+ ((ifrag.permute(1, 2, 3, 0) - self.mean) / self.std)
694
+ .squeeze(0)
695
+ .permute(2, 0, 1)
696
+ )
697
+ else:
698
+ ### During testing, one image as a batch
699
+ ifrag = (
700
+ ((ifrag.permute(1, 2, 3, 0) - self.mean) / self.std)
701
+ .squeeze(0)
702
+ .permute(0, 3, 1, 2)
703
+ )
704
+ data = {
705
+ "image": ifrag,
706
+ "gt_label": label,
707
+ "original_shape": img_shape,
708
+ "name": filename,
709
+ }
710
+ if need_original_frames:
711
+ data["original_image"] = image.squeeze(1)
712
+ return data
713
+
714
+ def __len__(self):
715
+ return len(self.image_infos)
716
+
717
+
718
+ class ResizedImageDataset(torch.utils.data.Dataset):
719
+ def __init__(
720
+ self, ann_file, data_prefix, size=224, cache_in_memory=False, phase="test",
721
+ ):
722
+ self.ann_file = ann_file
723
+ self.data_prefix = data_prefix
724
+ self.size = size
725
+ self.image_infos = []
726
+ self.phase = phase
727
+ self.mean = torch.FloatTensor([123.675, 116.28, 103.53])
728
+ self.std = torch.FloatTensor([58.395, 57.12, 57.375])
729
+ if isinstance(self.ann_file, list):
730
+ self.image_infos = self.ann_file
731
+ else:
732
+ with open(self.ann_file, "r") as fin:
733
+ for line in fin:
734
+ line_split = line.strip().split(",")
735
+ filename, _, _, label = line_split
736
+ label = float(label)
737
+ filename = osp.join(self.data_prefix, filename)
738
+ self.image_infos.append(dict(filename=filename, label=label))
739
+ if cache_in_memory:
740
+ self.cache = {}
741
+ for i in tqdm(range(len(self)), desc="Caching fragments"):
742
+ self.cache[i] = self.__getitem__(i, tocache=True)
743
+ else:
744
+ self.cache = None
745
+
746
+ def __getitem__(
747
+ self, index, fragments=-1, fsize=-1, tocache=False, need_original_frames=False
748
+ ):
749
+ if tocache or self.cache is None:
750
+ if fragments == -1:
751
+ fragments = self.fragments
752
+ if fsize == -1:
753
+ fsize = self.fsize
754
+ image_info = self.image_infos[index]
755
+ filename = image_info["filename"]
756
+ label = image_info["label"]
757
+ img = torchvision.io.read_image(filename)
758
+ img_shape = img.shape[1:]
759
+ image = img.unsqueeze(1)
760
+ if self.nfrags == 1:
761
+ ifrag = get_spatial_fragments(image, fragments, fsize)
762
+ else:
763
+ ifrag = get_spatial_fragments(image, fragments, fsize)
764
+ for i in range(1, self.nfrags):
765
+ ifrag = torch.cat(
766
+ (ifrag, get_spatial_fragments(image, fragments, fsize)), 1
767
+ )
768
+ if tocache:
769
+ return (ifrag, label, img_shape)
770
+ else:
771
+ ifrag, label, img_shape = self.cache[index]
772
+ ifrag = (
773
+ ((ifrag.permute(1, 2, 3, 0) - self.mean) / self.std)
774
+ .squeeze(0)
775
+ .permute(2, 0, 1)
776
+ )
777
+ data = {
778
+ "image": ifrag,
779
+ "gt_label": label,
780
+ "original_shape": img_shape,
781
+ }
782
+ if need_original_frames:
783
+ data["original_image"] = image.squeeze(1)
784
+ return data
785
+
786
+ def __len__(self):
787
+ return len(self.image_infos)
788
+
789
+
790
+ class CroppedImageDataset(FragmentImageDataset):
791
+ def __init__(
792
+ self,
793
+ ann_file,
794
+ data_prefix,
795
+ size=224,
796
+ ncrops=1,
797
+ cache_in_memory=False,
798
+ phase="test",
799
+ ):
800
+
801
+ """
802
+ Regard Cropping as a special case for Fragments in Grid 1*1.
803
+ """
804
+ super().__init__(
805
+ ann_file,
806
+ data_prefix,
807
+ fragments=1,
808
+ fsize=224,
809
+ nfrags=ncrops,
810
+ cache_in_memory=cache_in_memory,
811
+ phase=phase,
812
+ )
cover/datasets/cover_datasets.py ADDED
@@ -0,0 +1,442 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import glob
3
+ import os
4
+ import os.path as osp
5
+ import random
6
+ from functools import lru_cache
7
+
8
+ import cv2
9
+ import decord
10
+ import numpy as np
11
+ import skvideo.io
12
+ import torch
13
+ import torchvision
14
+ from decord import VideoReader, cpu, gpu
15
+ from tqdm import tqdm
16
+
17
+ random.seed(42)
18
+
19
+ decord.bridge.set_bridge("torch")
20
+
21
+
22
+ def get_spatial_fragments(
23
+ video,
24
+ fragments_h=7,
25
+ fragments_w=7,
26
+ fsize_h=32,
27
+ fsize_w=32,
28
+ aligned=32,
29
+ nfrags=1,
30
+ random=False,
31
+ random_upsample=False,
32
+ fallback_type="upsample",
33
+ upsample=-1,
34
+ **kwargs,
35
+ ):
36
+ if upsample > 0:
37
+ old_h, old_w = video.shape[-2], video.shape[-1]
38
+ if old_h >= old_w:
39
+ w = upsample
40
+ h = int(upsample * old_h / old_w)
41
+ else:
42
+ h = upsample
43
+ w = int(upsample * old_w / old_h)
44
+
45
+ video = get_resized_video(video, h, w)
46
+ size_h = fragments_h * fsize_h
47
+ size_w = fragments_w * fsize_w
48
+ ## video: [C,T,H,W]
49
+ ## situation for images
50
+ if video.shape[1] == 1:
51
+ aligned = 1
52
+
53
+ dur_t, res_h, res_w = video.shape[-3:]
54
+ ratio = min(res_h / size_h, res_w / size_w)
55
+ if fallback_type == "upsample" and ratio < 1:
56
+
57
+ ovideo = video
58
+ video = torch.nn.functional.interpolate(
59
+ video / 255.0, scale_factor=1 / ratio, mode="bilinear"
60
+ )
61
+ video = (video * 255.0).type_as(ovideo)
62
+
63
+ if random_upsample:
64
+
65
+ randratio = random.random() * 0.5 + 1
66
+ video = torch.nn.functional.interpolate(
67
+ video / 255.0, scale_factor=randratio, mode="bilinear"
68
+ )
69
+ video = (video * 255.0).type_as(ovideo)
70
+
71
+ assert dur_t % aligned == 0, "Please provide match vclip and align index"
72
+ size = size_h, size_w
73
+
74
+ ## make sure that sampling will not run out of the picture
75
+ hgrids = torch.LongTensor(
76
+ [min(res_h // fragments_h * i, res_h - fsize_h) for i in range(fragments_h)]
77
+ )
78
+ wgrids = torch.LongTensor(
79
+ [min(res_w // fragments_w * i, res_w - fsize_w) for i in range(fragments_w)]
80
+ )
81
+ hlength, wlength = res_h // fragments_h, res_w // fragments_w
82
+
83
+ if random:
84
+ print("This part is deprecated. Please remind that.")
85
+ if res_h > fsize_h:
86
+ rnd_h = torch.randint(
87
+ res_h - fsize_h, (len(hgrids), len(wgrids), dur_t // aligned)
88
+ )
89
+ else:
90
+ rnd_h = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
91
+ if res_w > fsize_w:
92
+ rnd_w = torch.randint(
93
+ res_w - fsize_w, (len(hgrids), len(wgrids), dur_t // aligned)
94
+ )
95
+ else:
96
+ rnd_w = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
97
+ else:
98
+ if hlength > fsize_h:
99
+ rnd_h = torch.randint(
100
+ hlength - fsize_h, (len(hgrids), len(wgrids), dur_t // aligned)
101
+ )
102
+ else:
103
+ rnd_h = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
104
+ if wlength > fsize_w:
105
+ rnd_w = torch.randint(
106
+ wlength - fsize_w, (len(hgrids), len(wgrids), dur_t // aligned)
107
+ )
108
+ else:
109
+ rnd_w = torch.zeros((len(hgrids), len(wgrids), dur_t // aligned)).int()
110
+
111
+ target_video = torch.zeros(video.shape[:-2] + size).to(video.device)
112
+ # target_videos = []
113
+
114
+ for i, hs in enumerate(hgrids):
115
+ for j, ws in enumerate(wgrids):
116
+ for t in range(dur_t // aligned):
117
+ t_s, t_e = t * aligned, (t + 1) * aligned
118
+ h_s, h_e = i * fsize_h, (i + 1) * fsize_h
119
+ w_s, w_e = j * fsize_w, (j + 1) * fsize_w
120
+ if random:
121
+ h_so, h_eo = rnd_h[i][j][t], rnd_h[i][j][t] + fsize_h
122
+ w_so, w_eo = rnd_w[i][j][t], rnd_w[i][j][t] + fsize_w
123
+ else:
124
+ h_so, h_eo = hs + rnd_h[i][j][t], hs + rnd_h[i][j][t] + fsize_h
125
+ w_so, w_eo = ws + rnd_w[i][j][t], ws + rnd_w[i][j][t] + fsize_w
126
+ target_video[:, t_s:t_e, h_s:h_e, w_s:w_e] = video[
127
+ :, t_s:t_e, h_so:h_eo, w_so:w_eo
128
+ ]
129
+ # target_videos.append(video[:,t_s:t_e,h_so:h_eo,w_so:w_eo])
130
+ # target_video = torch.stack(target_videos, 0).reshape((dur_t // aligned, fragments, fragments,) + target_videos[0].shape).permute(3,0,4,1,5,2,6)
131
+ # target_video = target_video.reshape((-1, dur_t,) + size) ## Splicing Fragments
132
+ return target_video
133
+
134
+
135
+ @lru_cache
136
+ def get_resize_function(size_h, size_w, target_ratio=1, random_crop=False):
137
+ if random_crop:
138
+ return torchvision.transforms.RandomResizedCrop(
139
+ (size_h, size_w), scale=(0.40, 1.0)
140
+ )
141
+ if target_ratio > 1:
142
+ size_h = int(target_ratio * size_w)
143
+ assert size_h > size_w
144
+ elif target_ratio < 1:
145
+ size_w = int(size_h / target_ratio)
146
+ assert size_w > size_h
147
+ return torchvision.transforms.Resize((size_h, size_w))
148
+
149
+
150
+ def get_resized_video(
151
+ video, size_h=224, size_w=224, random_crop=False, arp=False, **kwargs,
152
+ ):
153
+ video = video.permute(1, 0, 2, 3)
154
+ resize_opt = get_resize_function(
155
+ size_h, size_w, video.shape[-2] / video.shape[-1] if arp else 1, random_crop
156
+ )
157
+ video = resize_opt(video).permute(1, 0, 2, 3)
158
+ return video
159
+
160
+
161
+ def get_arp_resized_video(
162
+ video, short_edge=224, train=False, **kwargs,
163
+ ):
164
+ if train: ## if during training, will random crop into square and then resize
165
+ res_h, res_w = video.shape[-2:]
166
+ ori_short_edge = min(video.shape[-2:])
167
+ if res_h > ori_short_edge:
168
+ rnd_h = random.randrange(res_h - ori_short_edge)
169
+ video = video[..., rnd_h : rnd_h + ori_short_edge, :]
170
+ elif res_w > ori_short_edge:
171
+ rnd_w = random.randrange(res_w - ori_short_edge)
172
+ video = video[..., :, rnd_h : rnd_h + ori_short_edge]
173
+ ori_short_edge = min(video.shape[-2:])
174
+ scale_factor = short_edge / ori_short_edge
175
+ ovideo = video
176
+ video = torch.nn.functional.interpolate(
177
+ video / 255.0, scale_factors=scale_factor, mode="bilinear"
178
+ )
179
+ video = (video * 255.0).type_as(ovideo)
180
+ return video
181
+
182
+
183
+ def get_arp_fragment_video(
184
+ video, short_fragments=7, fsize=32, train=False, **kwargs,
185
+ ):
186
+ if (
187
+ train
188
+ ): ## if during training, will random crop into square and then get fragments
189
+ res_h, res_w = video.shape[-2:]
190
+ ori_short_edge = min(video.shape[-2:])
191
+ if res_h > ori_short_edge:
192
+ rnd_h = random.randrange(res_h - ori_short_edge)
193
+ video = video[..., rnd_h : rnd_h + ori_short_edge, :]
194
+ elif res_w > ori_short_edge:
195
+ rnd_w = random.randrange(res_w - ori_short_edge)
196
+ video = video[..., :, rnd_h : rnd_h + ori_short_edge]
197
+ kwargs["fsize_h"], kwargs["fsize_w"] = fsize, fsize
198
+ res_h, res_w = video.shape[-2:]
199
+ if res_h > res_w:
200
+ kwargs["fragments_w"] = short_fragments
201
+ kwargs["fragments_h"] = int(short_fragments * res_h / res_w)
202
+ else:
203
+ kwargs["fragments_h"] = short_fragments
204
+ kwargs["fragments_w"] = int(short_fragments * res_w / res_h)
205
+ return get_spatial_fragments(video, **kwargs)
206
+
207
+
208
+ def get_cropped_video(
209
+ video, size_h=224, size_w=224, **kwargs,
210
+ ):
211
+ kwargs["fragments_h"], kwargs["fragments_w"] = 1, 1
212
+ kwargs["fsize_h"], kwargs["fsize_w"] = size_h, size_w
213
+ return get_spatial_fragments(video, **kwargs)
214
+
215
+
216
+ def get_single_view(
217
+ video, sample_type="aesthetic", **kwargs,
218
+ ):
219
+ if sample_type.startswith("aesthetic"):
220
+ video = get_resized_video(video, **kwargs)
221
+ elif sample_type.startswith("technical"):
222
+ video = get_spatial_fragments(video, **kwargs)
223
+ elif sample_type.startswith("semantic"):
224
+ video = get_resized_video(video, **kwargs)
225
+ elif sample_type == "original":
226
+ return video
227
+
228
+ return video
229
+
230
+
231
+ def spatial_temporal_view_decomposition(
232
+ video_path, sample_types, samplers, is_train=False, augment=False,
233
+ ):
234
+ video = {}
235
+ if video_path.endswith(".yuv"):
236
+ print("This part will be deprecated due to large memory cost.")
237
+ ## This is only an adaptation to LIVE-Qualcomm
238
+ ovideo = skvideo.io.vread(
239
+ video_path, 1080, 1920, inputdict={"-pix_fmt": "yuvj420p"}
240
+ )
241
+ for stype in samplers:
242
+ frame_inds = samplers[stype](ovideo.shape[0], is_train)
243
+ imgs = [torch.from_numpy(ovideo[idx]) for idx in frame_inds]
244
+ video[stype] = torch.stack(imgs, 0).permute(3, 0, 1, 2)
245
+ del ovideo
246
+ else:
247
+ decord.bridge.set_bridge("torch")
248
+ vreader = VideoReader(video_path)
249
+ ### Avoid duplicated video decoding!!! Important!!!!
250
+ all_frame_inds = []
251
+ frame_inds = {}
252
+ for stype in samplers:
253
+ frame_inds[stype] = samplers[stype](len(vreader), is_train)
254
+ all_frame_inds.append(frame_inds[stype])
255
+
256
+ ### Each frame is only decoded one time!!!
257
+ all_frame_inds = np.concatenate(all_frame_inds, 0)
258
+ frame_dict = {idx: vreader[idx] for idx in np.unique(all_frame_inds)}
259
+
260
+ for stype in samplers:
261
+ imgs = [frame_dict[idx] for idx in frame_inds[stype]]
262
+ video[stype] = torch.stack(imgs, 0).permute(3, 0, 1, 2)
263
+
264
+ sampled_video = {}
265
+ for stype, sopt in sample_types.items():
266
+ sampled_video[stype] = get_single_view(video[stype], stype, **sopt)
267
+ return sampled_video, frame_inds
268
+
269
+
270
+ import random
271
+
272
+ import numpy as np
273
+
274
+
275
+ class UnifiedFrameSampler:
276
+ def __init__(
277
+ self, fsize_t, fragments_t, frame_interval=1, num_clips=1, drop_rate=0.0,
278
+ ):
279
+
280
+ self.fragments_t = fragments_t
281
+ self.fsize_t = fsize_t
282
+ self.size_t = fragments_t * fsize_t
283
+ self.frame_interval = frame_interval
284
+ self.num_clips = num_clips
285
+ self.drop_rate = drop_rate
286
+
287
+ def get_frame_indices(self, num_frames, train=False):
288
+
289
+ tgrids = np.array(
290
+ [num_frames // self.fragments_t * i for i in range(self.fragments_t)],
291
+ dtype=np.int32,
292
+ )
293
+ tlength = num_frames // self.fragments_t
294
+
295
+ if tlength > self.fsize_t * self.frame_interval:
296
+ rnd_t = np.random.randint(
297
+ 0, tlength - self.fsize_t * self.frame_interval, size=len(tgrids)
298
+ )
299
+ else:
300
+ rnd_t = np.zeros(len(tgrids), dtype=np.int32)
301
+
302
+ ranges_t = (
303
+ np.arange(self.fsize_t)[None, :] * self.frame_interval
304
+ + rnd_t[:, None]
305
+ + tgrids[:, None]
306
+ )
307
+
308
+ drop = random.sample(
309
+ list(range(self.fragments_t)), int(self.fragments_t * self.drop_rate)
310
+ )
311
+ dropped_ranges_t = []
312
+ for i, rt in enumerate(ranges_t):
313
+ if i not in drop:
314
+ dropped_ranges_t.append(rt)
315
+ return np.concatenate(dropped_ranges_t)
316
+
317
+ def __call__(self, total_frames, train=False, start_index=0):
318
+ frame_inds = []
319
+
320
+ for i in range(self.num_clips):
321
+ frame_inds += [self.get_frame_indices(total_frames)]
322
+
323
+ frame_inds = np.concatenate(frame_inds)
324
+ frame_inds = np.mod(frame_inds + start_index, total_frames)
325
+ return frame_inds.astype(np.int32)
326
+
327
+
328
+ class ViewDecompositionDataset(torch.utils.data.Dataset):
329
+ def __init__(self, opt):
330
+ ## opt is a dictionary that includes options for video sampling
331
+
332
+ super().__init__()
333
+
334
+ self.weight = opt.get("weight", 0.5)
335
+
336
+ self.fully_supervised = opt.get("fully_supervised", False)
337
+ print("Fully supervised:", self.fully_supervised)
338
+
339
+ self.video_infos = []
340
+ self.ann_file = opt["anno_file"]
341
+ self.data_prefix = opt["data_prefix"]
342
+ self.opt = opt
343
+ self.sample_types = opt["sample_types"]
344
+ self.data_backend = opt.get("data_backend", "disk")
345
+ self.augment = opt.get("augment", False)
346
+ if self.data_backend == "petrel":
347
+ from petrel_client import client
348
+
349
+ self.client = client.Client(enable_mc=True)
350
+
351
+ self.phase = opt["phase"]
352
+ self.crop = opt.get("random_crop", False)
353
+ self.mean = torch.FloatTensor([123.675, 116.28, 103.53])
354
+ self.std = torch.FloatTensor([58.395, 57.12, 57.375])
355
+ self.mean_semantic = torch.FloatTensor([122.77, 116.75, 104.09])
356
+ self.std_semantic = torch.FloatTensor([68.50, 66.63, 70.32])
357
+ self.samplers = {}
358
+ for stype, sopt in opt["sample_types"].items():
359
+ if "t_frag" not in sopt:
360
+ # resized temporal sampling for TQE in COVER
361
+ self.samplers[stype] = UnifiedFrameSampler(
362
+ sopt["clip_len"], sopt["num_clips"], sopt["frame_interval"]
363
+ )
364
+ else:
365
+ # temporal sampling for AQE in COVER
366
+ self.samplers[stype] = UnifiedFrameSampler(
367
+ sopt["clip_len"] // sopt["t_frag"],
368
+ sopt["t_frag"],
369
+ sopt["frame_interval"],
370
+ sopt["num_clips"],
371
+ )
372
+ print(
373
+ stype + " branch sampled frames:",
374
+ self.samplers[stype](240, self.phase == "train"),
375
+ )
376
+
377
+ if isinstance(self.ann_file, list):
378
+ self.video_infos = self.ann_file
379
+ else:
380
+ try:
381
+ with open(self.ann_file, "r") as fin:
382
+ for line in fin:
383
+ line_split = line.strip().split(",")
384
+ filename, a, t, label = line_split
385
+ if self.fully_supervised:
386
+ label = float(a), float(t), float(label)
387
+ else:
388
+ label = float(label)
389
+ filename = osp.join(self.data_prefix, filename)
390
+ self.video_infos.append(dict(filename=filename, label=label))
391
+ except:
392
+ #### No Label Testing
393
+ video_filenames = []
394
+ for (root, dirs, files) in os.walk(self.data_prefix, topdown=True):
395
+ for file in files:
396
+ if file.endswith(".mp4"):
397
+ video_filenames += [os.path.join(root, file)]
398
+ print(len(video_filenames))
399
+ video_filenames = sorted(video_filenames)
400
+ for filename in video_filenames:
401
+ self.video_infos.append(dict(filename=filename, label=-1))
402
+
403
+ def __getitem__(self, index):
404
+ video_info = self.video_infos[index]
405
+ filename = video_info["filename"]
406
+ label = video_info["label"]
407
+
408
+ try:
409
+ ## Read Original Frames
410
+ ## Process Frames
411
+ data, frame_inds = spatial_temporal_view_decomposition(
412
+ filename,
413
+ self.sample_types,
414
+ self.samplers,
415
+ self.phase == "train",
416
+ self.augment and (self.phase == "train"),
417
+ )
418
+
419
+ for k, v in data.items():
420
+ if k == 'technical' or k == 'aesthetic':
421
+ data[k] = ((v.permute(1, 2, 3, 0) - self.mean) / self.std).permute(
422
+ 3, 0, 1, 2
423
+ )
424
+ elif k == 'semantic' :
425
+ data[k] = ((v.permute(1, 2, 3, 0) - self.mean_semantic) / self.std_semantic).permute(
426
+ 3, 0, 1, 2
427
+ )
428
+
429
+ data["num_clips"] = {}
430
+ for stype, sopt in self.sample_types.items():
431
+ data["num_clips"][stype] = sopt["num_clips"]
432
+ data["frame_inds"] = frame_inds
433
+ data["gt_label"] = label
434
+ data["name"] = filename # osp.basename(video_info["filename"])
435
+ except:
436
+ # exception flow
437
+ return {"name": filename}
438
+
439
+ return data
440
+
441
+ def __len__(self):
442
+ return len(self.video_infos)
cover/models/__init__.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .conv_backbone import convnext_3d_small, convnext_3d_tiny
2
+ from .evaluator import COVER, BaseEvaluator, BaseImageEvaluator
3
+ from .head import IQAHead, VARHead, VQAHead
4
+ from .swin_backbone import SwinTransformer2D as IQABackbone
5
+ from .swin_backbone import SwinTransformer3D as VQABackbone
6
+ from .swin_backbone import swin_3d_small, swin_3d_tiny
7
+
8
+ __all__ = [
9
+ "VQABackbone",
10
+ "IQABackbone",
11
+ "VQAHead",
12
+ "IQAHead",
13
+ "VARHead",
14
+ "BaseEvaluator",
15
+ "BaseImageEvaluator",
16
+ "COVER",
17
+ ]
cover/models/backbone_get_attention.py ADDED
@@ -0,0 +1,990 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import lru_cache, reduce
2
+ from operator import mul
3
+
4
+ import numpy as np
5
+ import torch
6
+ import torch.nn as nn
7
+ import torch.nn.functional as F
8
+ import torch.utils.checkpoint as checkpoint
9
+ from einops import rearrange
10
+ from timm.models.layers import DropPath, trunc_normal_
11
+
12
+
13
+ def fragment_infos(D, H, W, fragments=7, device="cuda"):
14
+ m = torch.arange(fragments).unsqueeze(-1).float()
15
+ m = (m + m.t() * fragments).reshape(1, 1, 1, fragments, fragments)
16
+ m = F.interpolate(m.to(device), size=(D, H, W)).permute(0, 2, 3, 4, 1)
17
+ return m.long()
18
+
19
+
20
+ @lru_cache
21
+ def global_position_index(
22
+ D,
23
+ H,
24
+ W,
25
+ fragments=(1, 7, 7),
26
+ window_size=(8, 7, 7),
27
+ shift_size=(0, 0, 0),
28
+ device="cuda",
29
+ ):
30
+ frags_d = torch.arange(fragments[0])
31
+ frags_h = torch.arange(fragments[1])
32
+ frags_w = torch.arange(fragments[2])
33
+ frags = torch.stack(
34
+ torch.meshgrid(frags_d, frags_h, frags_w)
35
+ ).float() # 3, Fd, Fh, Fw
36
+ coords = (
37
+ torch.nn.functional.interpolate(frags[None].to(device), size=(D, H, W))
38
+ .long()
39
+ .permute(0, 2, 3, 4, 1)
40
+ )
41
+ # print(shift_size)
42
+ coords = torch.roll(
43
+ coords, shifts=(-shift_size[0], -shift_size[1], -shift_size[2]), dims=(1, 2, 3)
44
+ )
45
+ window_coords = window_partition(coords, window_size)
46
+ relative_coords = (
47
+ window_coords[:, None, :] - window_coords[:, :, None]
48
+ ) # Wd*Wh*Ww, Wd*Wh*Ww, 3
49
+ return relative_coords # relative_coords
50
+
51
+
52
+ class Mlp(nn.Module):
53
+ """Multilayer perceptron."""
54
+
55
+ def __init__(
56
+ self,
57
+ in_features,
58
+ hidden_features=None,
59
+ out_features=None,
60
+ act_layer=nn.GELU,
61
+ drop=0.0,
62
+ ):
63
+ super().__init__()
64
+ out_features = out_features or in_features
65
+ hidden_features = hidden_features or in_features
66
+ self.fc1 = nn.Linear(in_features, hidden_features)
67
+ self.act = act_layer()
68
+ self.fc2 = nn.Linear(hidden_features, out_features)
69
+ self.drop = nn.Dropout(drop)
70
+
71
+ def forward(self, x):
72
+ x = self.fc1(x)
73
+ x = self.act(x)
74
+ x = self.drop(x)
75
+ x = self.fc2(x)
76
+ x = self.drop(x)
77
+ return x
78
+
79
+
80
+ def window_partition(x, window_size):
81
+ """
82
+ Args:
83
+ x: (B, D, H, W, C)
84
+ window_size (tuple[int]): window size
85
+
86
+ Returns:
87
+ windows: (B*num_windows, window_size*window_size, C)
88
+ """
89
+ B, D, H, W, C = x.shape
90
+ x = x.view(
91
+ B,
92
+ D // window_size[0],
93
+ window_size[0],
94
+ H // window_size[1],
95
+ window_size[1],
96
+ W // window_size[2],
97
+ window_size[2],
98
+ C,
99
+ )
100
+ windows = (
101
+ x.permute(0, 1, 3, 5, 2, 4, 6, 7)
102
+ .contiguous()
103
+ .view(-1, reduce(mul, window_size), C)
104
+ )
105
+ return windows
106
+
107
+
108
+ def window_reverse(windows, window_size, B, D, H, W):
109
+ """
110
+ Args:
111
+ windows: (B*num_windows, window_size, window_size, C)
112
+ window_size (tuple[int]): Window size
113
+ H (int): Height of image
114
+ W (int): Width of image
115
+
116
+ Returns:
117
+ x: (B, D, H, W, C)
118
+ """
119
+ x = windows.view(
120
+ B,
121
+ D // window_size[0],
122
+ H // window_size[1],
123
+ W // window_size[2],
124
+ window_size[0],
125
+ window_size[1],
126
+ window_size[2],
127
+ -1,
128
+ )
129
+ x = x.permute(0, 1, 4, 2, 5, 3, 6, 7).contiguous().view(B, D, H, W, -1)
130
+ return x
131
+
132
+
133
+ def get_window_size(x_size, window_size, shift_size=None):
134
+ use_window_size = list(window_size)
135
+ if shift_size is not None:
136
+ use_shift_size = list(shift_size)
137
+ for i in range(len(x_size)):
138
+ if x_size[i] <= window_size[i]:
139
+ use_window_size[i] = x_size[i]
140
+ if shift_size is not None:
141
+ use_shift_size[i] = 0
142
+
143
+ if shift_size is None:
144
+ return tuple(use_window_size)
145
+ else:
146
+ return tuple(use_window_size), tuple(use_shift_size)
147
+
148
+
149
+ class WindowAttention3D(nn.Module):
150
+ """Window based multi-head self attention (W-MSA) module with relative position bias.
151
+ It supports both of shifted and non-shifted window.
152
+ Args:
153
+ dim (int): Number of input channels.
154
+ window_size (tuple[int]): The temporal length, height and width of the window.
155
+ num_heads (int): Number of attention heads.
156
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
157
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set
158
+ attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
159
+ proj_drop (float, optional): Dropout ratio of output. Default: 0.0
160
+ """
161
+
162
+ def __init__(
163
+ self,
164
+ dim,
165
+ window_size,
166
+ num_heads,
167
+ qkv_bias=False,
168
+ qk_scale=None,
169
+ attn_drop=0.0,
170
+ proj_drop=0.0,
171
+ frag_bias=False,
172
+ ):
173
+
174
+ super().__init__()
175
+ self.dim = dim
176
+ self.window_size = window_size # Wd, Wh, Ww
177
+ self.num_heads = num_heads
178
+ head_dim = dim // num_heads
179
+ self.scale = qk_scale or head_dim ** -0.5
180
+
181
+ # define a parameter table of relative position bias
182
+ self.relative_position_bias_table = nn.Parameter(
183
+ torch.zeros(
184
+ (2 * window_size[0] - 1)
185
+ * (2 * window_size[1] - 1)
186
+ * (2 * window_size[2] - 1),
187
+ num_heads,
188
+ )
189
+ ) # 2*Wd-1 * 2*Wh-1 * 2*Ww-1, nH
190
+ if frag_bias:
191
+ self.fragment_position_bias_table = nn.Parameter(
192
+ torch.zeros(
193
+ (2 * window_size[0] - 1)
194
+ * (2 * window_size[1] - 1)
195
+ * (2 * window_size[2] - 1),
196
+ num_heads,
197
+ )
198
+ )
199
+
200
+ # get pair-wise relative position index for each token inside the window
201
+ coords_d = torch.arange(self.window_size[0])
202
+ coords_h = torch.arange(self.window_size[1])
203
+ coords_w = torch.arange(self.window_size[2])
204
+ coords = torch.stack(
205
+ torch.meshgrid(coords_d, coords_h, coords_w)
206
+ ) # 3, Wd, Wh, Ww
207
+ coords_flatten = torch.flatten(coords, 1) # 3, Wd*Wh*Ww
208
+ relative_coords = (
209
+ coords_flatten[:, :, None] - coords_flatten[:, None, :]
210
+ ) # 3, Wd*Wh*Ww, Wd*Wh*Ww
211
+ relative_coords = relative_coords.permute(
212
+ 1, 2, 0
213
+ ).contiguous() # Wd*Wh*Ww, Wd*Wh*Ww, 3
214
+ relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
215
+ relative_coords[:, :, 1] += self.window_size[1] - 1
216
+ relative_coords[:, :, 2] += self.window_size[2] - 1
217
+
218
+ relative_coords[:, :, 0] *= (2 * self.window_size[1] - 1) * (
219
+ 2 * self.window_size[2] - 1
220
+ )
221
+ relative_coords[:, :, 1] *= 2 * self.window_size[2] - 1
222
+ relative_position_index = relative_coords.sum(-1) # Wd*Wh*Ww, Wd*Wh*Ww
223
+ self.register_buffer("relative_position_index", relative_position_index)
224
+
225
+ self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
226
+ self.attn_drop = nn.Dropout(attn_drop)
227
+ self.proj = nn.Linear(dim, dim)
228
+ self.proj_drop = nn.Dropout(proj_drop)
229
+
230
+ trunc_normal_(self.relative_position_bias_table, std=0.02)
231
+ self.softmax = nn.Softmax(dim=-1)
232
+
233
+ def forward(self, x, mask=None, fmask=None):
234
+ """Forward function.
235
+ Args:
236
+ x: input features with shape of (num_windows*B, N, C)
237
+ mask: (0/-inf) mask with shape of (num_windows, N, N) or None
238
+ """
239
+ B_, N, C = x.shape
240
+ qkv = (
241
+ self.qkv(x)
242
+ .reshape(B_, N, 3, self.num_heads, C // self.num_heads)
243
+ .permute(2, 0, 3, 1, 4)
244
+ )
245
+ q, k, v = qkv[0], qkv[1], qkv[2] # B_, nH, N, C
246
+
247
+ q = q * self.scale
248
+ attn = q @ k.transpose(-2, -1)
249
+
250
+ relative_position_bias = self.relative_position_bias_table[
251
+ self.relative_position_index[:N, :N].reshape(-1)
252
+ ].reshape(
253
+ N, N, -1
254
+ ) # Wd*Wh*Ww,Wd*Wh*Ww,nH
255
+ relative_position_bias = relative_position_bias.permute(
256
+ 2, 0, 1
257
+ ).contiguous() # nH, Wd*Wh*Ww, Wd*Wh*Ww
258
+ if hasattr(self, "fragment_position_bias_table"):
259
+ fragment_position_bias = self.fragment_position_bias_table[
260
+ self.relative_position_index[:N, :N].reshape(-1)
261
+ ].reshape(
262
+ N, N, -1
263
+ ) # Wd*Wh*Ww,Wd*Wh*Ww,nH
264
+ fragment_position_bias = fragment_position_bias.permute(
265
+ 2, 0, 1
266
+ ).contiguous() # nH, Wd*Wh*Ww, Wd*Wh*Ww
267
+
268
+ ### Mask Position Bias
269
+ if fmask is not None:
270
+ # fgate = torch.where(fmask - fmask.transpose(-1, -2) == 0, 1, 0).float()
271
+ fgate = fmask.abs().sum(-1)
272
+ nW = fmask.shape[0]
273
+ relative_position_bias = relative_position_bias.unsqueeze(0)
274
+ fgate = fgate.unsqueeze(1)
275
+ # print(fgate.shape, relative_position_bias.shape)
276
+ if hasattr(self, "fragment_position_bias_table"):
277
+ relative_position_bias = (
278
+ relative_position_bias * fgate
279
+ + fragment_position_bias * (1 - fgate)
280
+ )
281
+
282
+ attn = attn.view(
283
+ B_ // nW, nW, self.num_heads, N, N
284
+ ) + relative_position_bias.unsqueeze(0)
285
+ attn = attn.view(-1, self.num_heads, N, N)
286
+ else:
287
+ attn = attn + relative_position_bias.unsqueeze(0) # B_, nH, N, N
288
+
289
+ if mask is not None:
290
+ nW = mask.shape[0]
291
+ attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(
292
+ 1
293
+ ).unsqueeze(0)
294
+ attn = attn.view(-1, self.num_heads, N, N)
295
+ attn = self.softmax(attn)
296
+ else:
297
+ attn = self.softmax(attn)
298
+ attn = self.attn_drop(attn)
299
+
300
+ if B_ < 16:
301
+ avg_attn = (attn.mean((1, 2)).detach(), attn.mean((1, 3)).detach())
302
+ else:
303
+ avg_attn = None
304
+
305
+ x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
306
+ x = self.proj(x)
307
+ x = self.proj_drop(x)
308
+
309
+ return x, avg_attn
310
+
311
+
312
+ class SwinTransformerBlock3D(nn.Module):
313
+ """Swin Transformer Block.
314
+
315
+ Args:
316
+ dim (int): Number of input channels.
317
+ num_heads (int): Number of attention heads.
318
+ window_size (tuple[int]): Window size.
319
+ shift_size (tuple[int]): Shift size for SW-MSA.
320
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
321
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
322
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
323
+ drop (float, optional): Dropout rate. Default: 0.0
324
+ attn_drop (float, optional): Attention dropout rate. Default: 0.0
325
+ drop_path (float, optional): Stochastic depth rate. Default: 0.0
326
+ act_layer (nn.Module, optional): Activation layer. Default: nn.GELU
327
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
328
+ """
329
+
330
+ def __init__(
331
+ self,
332
+ dim,
333
+ num_heads,
334
+ window_size=(2, 7, 7),
335
+ shift_size=(0, 0, 0),
336
+ mlp_ratio=4.0,
337
+ qkv_bias=True,
338
+ qk_scale=None,
339
+ drop=0.0,
340
+ attn_drop=0.0,
341
+ drop_path=0.0,
342
+ act_layer=nn.GELU,
343
+ norm_layer=nn.LayerNorm,
344
+ use_checkpoint=False,
345
+ jump_attention=False,
346
+ frag_bias=False,
347
+ ):
348
+ super().__init__()
349
+ self.dim = dim
350
+ self.num_heads = num_heads
351
+ self.window_size = window_size
352
+ self.shift_size = shift_size
353
+ self.mlp_ratio = mlp_ratio
354
+ self.use_checkpoint = use_checkpoint
355
+ self.jump_attention = jump_attention
356
+ self.frag_bias = frag_bias
357
+
358
+ assert (
359
+ 0 <= self.shift_size[0] < self.window_size[0]
360
+ ), "shift_size must in 0-window_size"
361
+ assert (
362
+ 0 <= self.shift_size[1] < self.window_size[1]
363
+ ), "shift_size must in 0-window_size"
364
+ assert (
365
+ 0 <= self.shift_size[2] < self.window_size[2]
366
+ ), "shift_size must in 0-window_size"
367
+
368
+ self.norm1 = norm_layer(dim)
369
+ self.attn = WindowAttention3D(
370
+ dim,
371
+ window_size=self.window_size,
372
+ num_heads=num_heads,
373
+ qkv_bias=qkv_bias,
374
+ qk_scale=qk_scale,
375
+ attn_drop=attn_drop,
376
+ proj_drop=drop,
377
+ frag_bias=frag_bias,
378
+ )
379
+
380
+ self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
381
+ self.norm2 = norm_layer(dim)
382
+ mlp_hidden_dim = int(dim * mlp_ratio)
383
+ self.mlp = Mlp(
384
+ in_features=dim,
385
+ hidden_features=mlp_hidden_dim,
386
+ act_layer=act_layer,
387
+ drop=drop,
388
+ )
389
+
390
+ def forward_part1(self, x, mask_matrix):
391
+ B, D, H, W, C = x.shape
392
+ window_size, shift_size = get_window_size(
393
+ (D, H, W), self.window_size, self.shift_size
394
+ )
395
+
396
+ x = self.norm1(x)
397
+ # pad feature maps to multiples of window size
398
+ pad_l = pad_t = pad_d0 = 0
399
+ pad_d1 = (window_size[0] - D % window_size[0]) % window_size[0]
400
+ pad_b = (window_size[1] - H % window_size[1]) % window_size[1]
401
+ pad_r = (window_size[2] - W % window_size[2]) % window_size[2]
402
+
403
+ x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b, pad_d0, pad_d1))
404
+ _, Dp, Hp, Wp, _ = x.shape
405
+ if False: # not hasattr(self, 'finfo_windows'):
406
+ finfo = fragment_infos(Dp, Hp, Wp)
407
+
408
+ # cyclic shift
409
+ if any(i > 0 for i in shift_size):
410
+ shifted_x = torch.roll(
411
+ x,
412
+ shifts=(-shift_size[0], -shift_size[1], -shift_size[2]),
413
+ dims=(1, 2, 3),
414
+ )
415
+ if False: # not hasattr(self, 'finfo_windows'):
416
+ shifted_finfo = torch.roll(
417
+ finfo,
418
+ shifts=(-shift_size[0], -shift_size[1], -shift_size[2]),
419
+ dims=(1, 2, 3),
420
+ )
421
+ attn_mask = mask_matrix
422
+ else:
423
+ shifted_x = x
424
+ if False: # not hasattr(self, 'finfo_windows'):
425
+ shifted_finfo = finfo
426
+ attn_mask = None
427
+ # partition windows
428
+ x_windows = window_partition(shifted_x, window_size) # B*nW, Wd*Wh*Ww, C
429
+ if False: # not hasattr(self, 'finfo_windows'):
430
+ self.finfo_windows = window_partition(shifted_finfo, window_size)
431
+ # W-MSA/SW-MSA
432
+ # print(shift_size)
433
+ gpi = global_position_index(
434
+ Dp, Hp, Wp, window_size=window_size, shift_size=shift_size, device=x.device
435
+ )
436
+ attn_windows, avg_attn = self.attn(
437
+ x_windows, mask=attn_mask, fmask=gpi
438
+ ) # self.finfo_windows) # B*nW, Wd*Wh*Ww, C
439
+ # merge windows
440
+ attn_windows = attn_windows.view(-1, *(window_size + (C,)))
441
+ shifted_x = window_reverse(
442
+ attn_windows, window_size, B, Dp, Hp, Wp
443
+ ) # B D' H' W' C
444
+ # reverse cyclic shift
445
+ if any(i > 0 for i in shift_size):
446
+ x = torch.roll(
447
+ shifted_x,
448
+ shifts=(shift_size[0], shift_size[1], shift_size[2]),
449
+ dims=(1, 2, 3),
450
+ )
451
+ else:
452
+ x = shifted_x
453
+
454
+ if pad_d1 > 0 or pad_r > 0 or pad_b > 0:
455
+ x = x[:, :D, :H, :W, :].contiguous()
456
+ return x, avg_attn
457
+
458
+ def forward_part2(self, x):
459
+ return self.drop_path(self.mlp(self.norm2(x)))
460
+
461
+ def forward(self, x, mask_matrix):
462
+ """Forward function.
463
+
464
+ Args:
465
+ x: Input feature, tensor size (B, D, H, W, C).
466
+ mask_matrix: Attention mask for cyclic shift.
467
+ """
468
+
469
+ shortcut = x
470
+ if not self.jump_attention:
471
+ if self.use_checkpoint:
472
+ x = checkpoint.checkpoint(self.forward_part1, x, mask_matrix)
473
+ else:
474
+ x, avg_attn = self.forward_part1(x, mask_matrix)
475
+ x = shortcut + self.drop_path(x)
476
+
477
+ if self.use_checkpoint:
478
+ x = x + checkpoint.checkpoint(self.forward_part2, x)
479
+ else:
480
+ x = x + self.forward_part2(x)
481
+
482
+ return x, avg_attn
483
+
484
+
485
+ class PatchMerging(nn.Module):
486
+ """Patch Merging Layer
487
+
488
+ Args:
489
+ dim (int): Number of input channels.
490
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
491
+ """
492
+
493
+ def __init__(self, dim, norm_layer=nn.LayerNorm):
494
+ super().__init__()
495
+ self.dim = dim
496
+ self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
497
+ self.norm = norm_layer(4 * dim)
498
+
499
+ def forward(self, x):
500
+ """Forward function.
501
+
502
+ Args:
503
+ x: Input feature, tensor size (B, D, H, W, C).
504
+ """
505
+ B, D, H, W, C = x.shape
506
+
507
+ # padding
508
+ pad_input = (H % 2 == 1) or (W % 2 == 1)
509
+ if pad_input:
510
+ x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2))
511
+
512
+ x0 = x[:, :, 0::2, 0::2, :] # B D H/2 W/2 C
513
+ x1 = x[:, :, 1::2, 0::2, :] # B D H/2 W/2 C
514
+ x2 = x[:, :, 0::2, 1::2, :] # B D H/2 W/2 C
515
+ x3 = x[:, :, 1::2, 1::2, :] # B D H/2 W/2 C
516
+ x = torch.cat([x0, x1, x2, x3], -1) # B D H/2 W/2 4*C
517
+
518
+ x = self.norm(x)
519
+ x = self.reduction(x)
520
+
521
+ return x
522
+
523
+
524
+ # cache each stage results
525
+ @lru_cache()
526
+ def compute_mask(D, H, W, window_size, shift_size, device):
527
+ img_mask = torch.zeros((1, D, H, W, 1), device=device) # 1 Dp Hp Wp 1
528
+ cnt = 0
529
+ for d in (
530
+ slice(-window_size[0]),
531
+ slice(-window_size[0], -shift_size[0]),
532
+ slice(-shift_size[0], None),
533
+ ):
534
+ for h in (
535
+ slice(-window_size[1]),
536
+ slice(-window_size[1], -shift_size[1]),
537
+ slice(-shift_size[1], None),
538
+ ):
539
+ for w in (
540
+ slice(-window_size[2]),
541
+ slice(-window_size[2], -shift_size[2]),
542
+ slice(-shift_size[2], None),
543
+ ):
544
+ img_mask[:, d, h, w, :] = cnt
545
+ cnt += 1
546
+ mask_windows = window_partition(img_mask, window_size) # nW, ws[0]*ws[1]*ws[2], 1
547
+ mask_windows = mask_windows.squeeze(-1) # nW, ws[0]*ws[1]*ws[2]
548
+ attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
549
+ attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(
550
+ attn_mask == 0, float(0.0)
551
+ )
552
+ return attn_mask
553
+
554
+
555
+ class BasicLayer(nn.Module):
556
+ """A basic Swin Transformer layer for one stage.
557
+
558
+ Args:
559
+ dim (int): Number of feature channels
560
+ depth (int): Depths of this stage.
561
+ num_heads (int): Number of attention head.
562
+ window_size (tuple[int]): Local window size. Default: (1,7,7).
563
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
564
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
565
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
566
+ drop (float, optional): Dropout rate. Default: 0.0
567
+ attn_drop (float, optional): Attention dropout rate. Default: 0.0
568
+ drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
569
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
570
+ downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
571
+ """
572
+
573
+ def __init__(
574
+ self,
575
+ dim,
576
+ depth,
577
+ num_heads,
578
+ window_size=(1, 7, 7),
579
+ mlp_ratio=4.0,
580
+ qkv_bias=False,
581
+ qk_scale=None,
582
+ drop=0.0,
583
+ attn_drop=0.0,
584
+ drop_path=0.0,
585
+ norm_layer=nn.LayerNorm,
586
+ downsample=None,
587
+ use_checkpoint=False,
588
+ jump_attention=False,
589
+ frag_bias=False,
590
+ ):
591
+ super().__init__()
592
+ self.window_size = window_size
593
+ self.shift_size = tuple(i // 2 for i in window_size)
594
+ self.depth = depth
595
+ self.use_checkpoint = use_checkpoint
596
+
597
+ # build blocks
598
+ self.blocks = nn.ModuleList(
599
+ [
600
+ SwinTransformerBlock3D(
601
+ dim=dim,
602
+ num_heads=num_heads,
603
+ window_size=window_size,
604
+ shift_size=(0, 0, 0) if (i % 2 == 0) else self.shift_size,
605
+ mlp_ratio=mlp_ratio,
606
+ qkv_bias=qkv_bias,
607
+ qk_scale=qk_scale,
608
+ drop=drop,
609
+ attn_drop=attn_drop,
610
+ drop_path=drop_path[i]
611
+ if isinstance(drop_path, list)
612
+ else drop_path,
613
+ norm_layer=norm_layer,
614
+ use_checkpoint=use_checkpoint,
615
+ jump_attention=jump_attention,
616
+ frag_bias=frag_bias,
617
+ )
618
+ for i in range(depth)
619
+ ]
620
+ )
621
+
622
+ self.downsample = downsample
623
+ if self.downsample is not None:
624
+ self.downsample = downsample(dim=dim, norm_layer=norm_layer)
625
+
626
+ def forward(self, x):
627
+ """Forward function.
628
+
629
+ Args:
630
+ x: Input feature, tensor size (B, C, D, H, W).
631
+ """
632
+ # calculate attention mask for SW-MSA
633
+ B, C, D, H, W = x.shape
634
+ window_size, shift_size = get_window_size(
635
+ (D, H, W), self.window_size, self.shift_size
636
+ )
637
+ x = rearrange(x, "b c d h w -> b d h w c")
638
+ Dp = int(np.ceil(D / window_size[0])) * window_size[0]
639
+ Hp = int(np.ceil(H / window_size[1])) * window_size[1]
640
+ Wp = int(np.ceil(W / window_size[2])) * window_size[2]
641
+ attn_mask = compute_mask(Dp, Hp, Wp, window_size, shift_size, x.device)
642
+ avg_attns = []
643
+ for blk in self.blocks:
644
+ x, avg_attn = blk(x, attn_mask)
645
+ if avg_attn is not None:
646
+ avg_attns.append(avg_attn)
647
+ x = x.view(B, D, H, W, -1)
648
+
649
+ if self.downsample is not None:
650
+ x = self.downsample(x)
651
+ x = rearrange(x, "b d h w c -> b c d h w")
652
+ return x, avg_attns
653
+
654
+
655
+ class PatchEmbed3D(nn.Module):
656
+ """Video to Patch Embedding.
657
+
658
+ Args:
659
+ patch_size (int): Patch token size. Default: (2,4,4).
660
+ in_chans (int): Number of input video channels. Default: 3.
661
+ embed_dim (int): Number of linear projection output channels. Default: 96.
662
+ norm_layer (nn.Module, optional): Normalization layer. Default: None
663
+ """
664
+
665
+ def __init__(self, patch_size=(2, 4, 4), in_chans=3, embed_dim=96, norm_layer=None):
666
+ super().__init__()
667
+ self.patch_size = patch_size
668
+
669
+ self.in_chans = in_chans
670
+ self.embed_dim = embed_dim
671
+
672
+ self.proj = nn.Conv3d(
673
+ in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
674
+ )
675
+ if norm_layer is not None:
676
+ self.norm = norm_layer(embed_dim)
677
+ else:
678
+ self.norm = None
679
+
680
+ def forward(self, x):
681
+ """Forward function."""
682
+ # padding
683
+ _, _, D, H, W = x.size()
684
+ if W % self.patch_size[2] != 0:
685
+ x = F.pad(x, (0, self.patch_size[2] - W % self.patch_size[2]))
686
+ if H % self.patch_size[1] != 0:
687
+ x = F.pad(x, (0, 0, 0, self.patch_size[1] - H % self.patch_size[1]))
688
+ if D % self.patch_size[0] != 0:
689
+ x = F.pad(x, (0, 0, 0, 0, 0, self.patch_size[0] - D % self.patch_size[0]))
690
+
691
+ x = self.proj(x) # B C D Wh Ww
692
+ if self.norm is not None:
693
+ D, Wh, Ww = x.size(2), x.size(3), x.size(4)
694
+ x = x.flatten(2).transpose(1, 2)
695
+ x = self.norm(x)
696
+ x = x.transpose(1, 2).view(-1, self.embed_dim, D, Wh, Ww)
697
+
698
+ return x
699
+
700
+
701
+ class SwinTransformer3D(nn.Module):
702
+ """Swin Transformer backbone.
703
+ A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` -
704
+ https://arxiv.org/pdf/2103.14030
705
+
706
+ Args:
707
+ patch_size (int | tuple(int)): Patch size. Default: (4,4,4).
708
+ in_chans (int): Number of input image channels. Default: 3.
709
+ embed_dim (int): Number of linear projection output channels. Default: 96.
710
+ depths (tuple[int]): Depths of each Swin Transformer stage.
711
+ num_heads (tuple[int]): Number of attention head of each stage.
712
+ window_size (int): Window size. Default: 7.
713
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
714
+ qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: Truee
715
+ qk_scale (float): Override default qk scale of head_dim ** -0.5 if set.
716
+ drop_rate (float): Dropout rate.
717
+ attn_drop_rate (float): Attention dropout rate. Default: 0.
718
+ drop_path_rate (float): Stochastic depth rate. Default: 0.2.
719
+ norm_layer: Normalization layer. Default: nn.LayerNorm.
720
+ patch_norm (bool): If True, add normalization after patch embedding. Default: False.
721
+ frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
722
+ -1 means not freezing any parameters.
723
+ """
724
+
725
+ def __init__(
726
+ self,
727
+ pretrained=None,
728
+ pretrained2d=False,
729
+ patch_size=(2, 4, 4),
730
+ in_chans=3,
731
+ embed_dim=96,
732
+ depths=[2, 2, 6, 2],
733
+ num_heads=[3, 6, 12, 24],
734
+ window_size=(8, 7, 7),
735
+ mlp_ratio=4.0,
736
+ qkv_bias=True,
737
+ qk_scale=None,
738
+ drop_rate=0.0,
739
+ attn_drop_rate=0.0,
740
+ drop_path_rate=0.1,
741
+ norm_layer=nn.LayerNorm,
742
+ patch_norm=True,
743
+ frozen_stages=-1,
744
+ use_checkpoint=True,
745
+ jump_attention=[False, False, False, False],
746
+ frag_biases=[True, True, True, False],
747
+ ):
748
+ super().__init__()
749
+ print(frag_biases)
750
+
751
+ self.pretrained = pretrained
752
+ self.pretrained2d = pretrained2d
753
+ self.num_layers = len(depths)
754
+ self.embed_dim = embed_dim
755
+ self.patch_norm = patch_norm
756
+ self.frozen_stages = frozen_stages
757
+ self.window_size = window_size
758
+ self.patch_size = patch_size
759
+
760
+ # split image into non-overlapping patches
761
+ self.patch_embed = PatchEmbed3D(
762
+ patch_size=patch_size,
763
+ in_chans=in_chans,
764
+ embed_dim=embed_dim,
765
+ norm_layer=norm_layer if self.patch_norm else None,
766
+ )
767
+
768
+ self.pos_drop = nn.Dropout(p=drop_rate)
769
+
770
+ # stochastic depth
771
+ dpr = [
772
+ x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))
773
+ ] # stochastic depth decay rule
774
+
775
+ # build layers
776
+ self.layers = nn.ModuleList()
777
+ for i_layer in range(self.num_layers):
778
+ layer = BasicLayer(
779
+ dim=int(embed_dim * 2 ** i_layer),
780
+ depth=depths[i_layer],
781
+ num_heads=num_heads[i_layer],
782
+ window_size=window_size,
783
+ mlp_ratio=mlp_ratio,
784
+ qkv_bias=qkv_bias,
785
+ qk_scale=qk_scale,
786
+ drop=drop_rate,
787
+ attn_drop=attn_drop_rate,
788
+ drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])],
789
+ norm_layer=norm_layer,
790
+ downsample=PatchMerging if i_layer < self.num_layers - 1 else None,
791
+ use_checkpoint=use_checkpoint,
792
+ jump_attention=jump_attention[i_layer],
793
+ frag_bias=frag_biases[i_layer],
794
+ )
795
+ self.layers.append(layer)
796
+
797
+ self.num_features = int(embed_dim * 2 ** (self.num_layers - 1))
798
+
799
+ # add a norm layer for each output
800
+ self.norm = norm_layer(self.num_features)
801
+
802
+ self._freeze_stages()
803
+
804
+ def _freeze_stages(self):
805
+ if self.frozen_stages >= 0:
806
+ self.patch_embed.eval()
807
+ for param in self.patch_embed.parameters():
808
+ param.requires_grad = False
809
+
810
+ if self.frozen_stages >= 1:
811
+ self.pos_drop.eval()
812
+ for i in range(0, self.frozen_stages):
813
+ m = self.layers[i]
814
+ m.eval()
815
+ for param in m.parameters():
816
+ param.requires_grad = False
817
+
818
+ def inflate_weights(self, logger):
819
+ """Inflate the swin2d parameters to swin3d.
820
+
821
+ The differences between swin3d and swin2d mainly lie in an extra
822
+ axis. To utilize the pretrained parameters in 2d model,
823
+ the weight of swin2d models should be inflated to fit in the shapes of
824
+ the 3d counterpart.
825
+
826
+ Args:
827
+ logger (logging.Logger): The logger used to print
828
+ debugging infomation.
829
+ """
830
+ checkpoint = torch.load(self.pretrained, map_location="cpu")
831
+ state_dict = checkpoint["model"]
832
+
833
+ # delete relative_position_index since we always re-init it
834
+ relative_position_index_keys = [
835
+ k for k in state_dict.keys() if "relative_position_index" in k
836
+ ]
837
+ for k in relative_position_index_keys:
838
+ del state_dict[k]
839
+
840
+ # delete attn_mask since we always re-init it
841
+ attn_mask_keys = [k for k in state_dict.keys() if "attn_mask" in k]
842
+ for k in attn_mask_keys:
843
+ del state_dict[k]
844
+
845
+ state_dict["patch_embed.proj.weight"] = (
846
+ state_dict["patch_embed.proj.weight"]
847
+ .unsqueeze(2)
848
+ .repeat(1, 1, self.patch_size[0], 1, 1)
849
+ / self.patch_size[0]
850
+ )
851
+
852
+ # bicubic interpolate relative_position_bias_table if not match
853
+ relative_position_bias_table_keys = [
854
+ k for k in state_dict.keys() if "relative_position_bias_table" in k
855
+ ]
856
+ for k in relative_position_bias_table_keys:
857
+ relative_position_bias_table_pretrained = state_dict[k]
858
+ relative_position_bias_table_current = self.state_dict()[k]
859
+ L1, nH1 = relative_position_bias_table_pretrained.size()
860
+ L2, nH2 = relative_position_bias_table_current.size()
861
+ L2 = (2 * self.window_size[1] - 1) * (2 * self.window_size[2] - 1)
862
+ wd = self.window_size[0]
863
+ if nH1 != nH2:
864
+ logger.warning(f"Error in loading {k}, passing")
865
+ else:
866
+ if L1 != L2:
867
+ S1 = int(L1 ** 0.5)
868
+ relative_position_bias_table_pretrained_resized = torch.nn.functional.interpolate(
869
+ relative_position_bias_table_pretrained.permute(1, 0).view(
870
+ 1, nH1, S1, S1
871
+ ),
872
+ size=(
873
+ 2 * self.window_size[1] - 1,
874
+ 2 * self.window_size[2] - 1,
875
+ ),
876
+ mode="bicubic",
877
+ )
878
+ relative_position_bias_table_pretrained = relative_position_bias_table_pretrained_resized.view(
879
+ nH2, L2
880
+ ).permute(
881
+ 1, 0
882
+ )
883
+ state_dict[k] = relative_position_bias_table_pretrained.repeat(
884
+ 2 * wd - 1, 1
885
+ )
886
+
887
+ msg = self.load_state_dict(state_dict, strict=False)
888
+ logger.info(msg)
889
+ logger.info(f"=> loaded successfully '{self.pretrained}'")
890
+ del checkpoint
891
+ torch.cuda.empty_cache()
892
+
893
+ def load_checkpoint(self, load_path, strict=False):
894
+ from collections import OrderedDict
895
+
896
+ model_state_dict = self.state_dict()
897
+ state_dict = torch.load(load_path)["state_dict"]
898
+
899
+ clean_dict = OrderedDict()
900
+ for key, value in state_dict.items():
901
+ if "backbone" in key:
902
+ clean_key = key[9:]
903
+ clean_dict[clean_key] = value
904
+ if "relative_position_bias_table" in clean_key:
905
+ forked_key = clean_key.replace(
906
+ "relative_position_bias_table", "fragment_position_bias_table"
907
+ )
908
+ if forked_key in clean_dict:
909
+ print(
910
+ f"Passing key {forked_key} as it is already in state_dict."
911
+ )
912
+ else:
913
+ clean_dict[forked_key] = value
914
+
915
+ ## Only Support for 2X
916
+ for key, value in model_state_dict.items():
917
+ if key in clean_dict:
918
+ if value.shape != clean_dict[key].shape:
919
+ clean_dict.pop(key)
920
+
921
+ self.load_state_dict(clean_dict, strict=strict)
922
+
923
+ def init_weights(self, pretrained=None):
924
+ print(self.pretrained, self.pretrained2d)
925
+ """Initialize the weights in backbone.
926
+
927
+ Args:
928
+ pretrained (str, optional): Path to pre-trained weights.
929
+ Defaults to None.
930
+ """
931
+
932
+ def _init_weights(m):
933
+ if isinstance(m, nn.Linear):
934
+ trunc_normal_(m.weight, std=0.02)
935
+ if isinstance(m, nn.Linear) and m.bias is not None:
936
+ nn.init.constant_(m.bias, 0)
937
+ elif isinstance(m, nn.LayerNorm):
938
+ nn.init.constant_(m.bias, 0)
939
+ nn.init.constant_(m.weight, 1.0)
940
+
941
+ if pretrained:
942
+ self.pretrained = pretrained
943
+ if isinstance(self.pretrained, str):
944
+ self.apply(_init_weights)
945
+ logger = get_root_logger()
946
+ logger.info(f"load model from: {self.pretrained}")
947
+
948
+ if self.pretrained2d:
949
+ # Inflate 2D model into 3D model.
950
+ self.inflate_weights(logger)
951
+ else:
952
+ # Directly load 3D model.
953
+ self.load_checkpoint(self.pretrained, strict=False) # , logger=logger)
954
+ elif self.pretrained is None:
955
+ self.apply(_init_weights)
956
+ else:
957
+ raise TypeError("pretrained must be a str or None")
958
+
959
+ def forward(self, x, multi=False, require_attn=False):
960
+ """Forward function."""
961
+ x = self.patch_embed(x)
962
+
963
+ x = self.pos_drop(x)
964
+
965
+ if multi:
966
+ feats = [x]
967
+
968
+ for layer in self.layers:
969
+ x, avg_attns = layer(x.contiguous())
970
+ if multi:
971
+ feats += [x]
972
+
973
+ x = rearrange(x, "n c d h w -> n d h w c")
974
+ x = self.norm(x)
975
+ x = rearrange(x, "n d h w c -> n c d h w")
976
+
977
+ if multi:
978
+ x = feats[:-1] + [x]
979
+ else:
980
+ x = x
981
+
982
+ if require_attn:
983
+ return x, avg_attns
984
+ else:
985
+ return x
986
+
987
+ def train(self, mode=True):
988
+ """Convert the model into training mode while keep layers freezed."""
989
+ super(SwinTransformer3D, self).train(mode)
990
+ self._freeze_stages()
cover/models/backbone_v0_1.py ADDED
@@ -0,0 +1,862 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import lru_cache, reduce
2
+ from operator import mul
3
+
4
+ import numpy as np
5
+ import torch
6
+ import torch.nn as nn
7
+ import torch.nn.functional as F
8
+ import torch.utils.checkpoint as checkpoint
9
+ from einops import rearrange
10
+ from timm.models.layers import DropPath, trunc_normal_
11
+
12
+
13
+ class Mlp(nn.Module):
14
+ """Multilayer perceptron."""
15
+
16
+ def __init__(
17
+ self,
18
+ in_features,
19
+ hidden_features=None,
20
+ out_features=None,
21
+ act_layer=nn.GELU,
22
+ drop=0.0,
23
+ ):
24
+ super().__init__()
25
+ out_features = out_features or in_features
26
+ hidden_features = hidden_features or in_features
27
+ self.fc1 = nn.Linear(in_features, hidden_features)
28
+ self.act = act_layer()
29
+ self.fc2 = nn.Linear(hidden_features, out_features)
30
+ self.drop = nn.Dropout(drop)
31
+
32
+ def forward(self, x):
33
+ x = self.fc1(x)
34
+ x = self.act(x)
35
+ x = self.drop(x)
36
+ x = self.fc2(x)
37
+ x = self.drop(x)
38
+ return x
39
+
40
+
41
+ def window_partition(x, window_size):
42
+ """
43
+ Args:
44
+ x: (B, D, H, W, C)
45
+ window_size (tuple[int]): window size
46
+
47
+ Returns:
48
+ windows: (B*num_windows, window_size*window_size, C)
49
+ """
50
+ B, D, H, W, C = x.shape
51
+ x = x.view(
52
+ B,
53
+ D // window_size[0],
54
+ window_size[0],
55
+ H // window_size[1],
56
+ window_size[1],
57
+ W // window_size[2],
58
+ window_size[2],
59
+ C,
60
+ )
61
+ windows = (
62
+ x.permute(0, 1, 3, 5, 2, 4, 6, 7)
63
+ .contiguous()
64
+ .view(-1, reduce(mul, window_size), C)
65
+ )
66
+ return windows
67
+
68
+
69
+ def window_reverse(windows, window_size, B, D, H, W):
70
+ """
71
+ Args:
72
+ windows: (B*num_windows, window_size, window_size, C)
73
+ window_size (tuple[int]): Window size
74
+ H (int): Height of image
75
+ W (int): Width of image
76
+
77
+ Returns:
78
+ x: (B, D, H, W, C)
79
+ """
80
+ x = windows.view(
81
+ B,
82
+ D // window_size[0],
83
+ H // window_size[1],
84
+ W // window_size[2],
85
+ window_size[0],
86
+ window_size[1],
87
+ window_size[2],
88
+ -1,
89
+ )
90
+ x = x.permute(0, 1, 4, 2, 5, 3, 6, 7).contiguous().view(B, D, H, W, -1)
91
+ return x
92
+
93
+
94
+ def get_window_size(x_size, window_size, shift_size=None):
95
+ use_window_size = list(window_size)
96
+ if shift_size is not None:
97
+ use_shift_size = list(shift_size)
98
+ for i in range(len(x_size)):
99
+ if x_size[i] <= window_size[i]:
100
+ use_window_size[i] = x_size[i]
101
+ if shift_size is not None:
102
+ use_shift_size[i] = 0
103
+
104
+ if shift_size is None:
105
+ return tuple(use_window_size)
106
+ else:
107
+ return tuple(use_window_size), tuple(use_shift_size)
108
+
109
+
110
+ class WindowAttention3D(nn.Module):
111
+ """Window based multi-head self attention (W-MSA) module with relative position bias.
112
+ It supports both of shifted and non-shifted window.
113
+ Args:
114
+ dim (int): Number of input channels.
115
+ window_size (tuple[int]): The temporal length, height and width of the window.
116
+ num_heads (int): Number of attention heads.
117
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
118
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set
119
+ attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
120
+ proj_drop (float, optional): Dropout ratio of output. Default: 0.0
121
+ """
122
+
123
+ def __init__(
124
+ self,
125
+ dim,
126
+ window_size,
127
+ num_heads,
128
+ qkv_bias=False,
129
+ qk_scale=None,
130
+ attn_drop=0.0,
131
+ proj_drop=0.0,
132
+ ):
133
+
134
+ super().__init__()
135
+ self.dim = dim
136
+ self.window_size = window_size # Wd, Wh, Ww
137
+ self.num_heads = num_heads
138
+ head_dim = dim // num_heads
139
+ self.scale = qk_scale or head_dim ** -0.5
140
+
141
+ # define a parameter table of relative position bias
142
+ self.relative_position_bias_table = nn.Parameter(
143
+ torch.zeros(
144
+ (2 * window_size[0] - 1)
145
+ * (2 * window_size[1] - 1)
146
+ * (2 * window_size[2] - 1),
147
+ num_heads,
148
+ )
149
+ ) # 2*Wd-1 * 2*Wh-1 * 2*Ww-1, nH
150
+
151
+ # get pair-wise relative position index for each token inside the window
152
+ coords_d = torch.arange(self.window_size[0])
153
+ coords_h = torch.arange(self.window_size[1])
154
+ coords_w = torch.arange(self.window_size[2])
155
+ coords = torch.stack(
156
+ torch.meshgrid(coords_d, coords_h, coords_w)
157
+ ) # 3, Wd, Wh, Ww
158
+ coords_flatten = torch.flatten(coords, 1) # 3, Wd*Wh*Ww
159
+ relative_coords = (
160
+ coords_flatten[:, :, None] - coords_flatten[:, None, :]
161
+ ) # 3, Wd*Wh*Ww, Wd*Wh*Ww
162
+ relative_coords = relative_coords.permute(
163
+ 1, 2, 0
164
+ ).contiguous() # Wd*Wh*Ww, Wd*Wh*Ww, 3
165
+ relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
166
+ relative_coords[:, :, 1] += self.window_size[1] - 1
167
+ relative_coords[:, :, 2] += self.window_size[2] - 1
168
+
169
+ relative_coords[:, :, 0] *= (2 * self.window_size[1] - 1) * (
170
+ 2 * self.window_size[2] - 1
171
+ )
172
+ relative_coords[:, :, 1] *= 2 * self.window_size[2] - 1
173
+ relative_position_index = relative_coords.sum(-1) # Wd*Wh*Ww, Wd*Wh*Ww
174
+ self.register_buffer("relative_position_index", relative_position_index)
175
+
176
+ self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
177
+ self.attn_drop = nn.Dropout(attn_drop)
178
+ self.proj = nn.Linear(dim, dim)
179
+ self.proj_drop = nn.Dropout(proj_drop)
180
+
181
+ trunc_normal_(self.relative_position_bias_table, std=0.02)
182
+ self.softmax = nn.Softmax(dim=-1)
183
+
184
+ def forward(self, x, mask=None):
185
+ """Forward function.
186
+ Args:
187
+ x: input features with shape of (num_windows*B, N, C)
188
+ mask: (0/-inf) mask with shape of (num_windows, N, N) or None
189
+ """
190
+ B_, N, C = x.shape
191
+ qkv = (
192
+ self.qkv(x)
193
+ .reshape(B_, N, 3, self.num_heads, C // self.num_heads)
194
+ .permute(2, 0, 3, 1, 4)
195
+ )
196
+ q, k, v = qkv[0], qkv[1], qkv[2] # B_, nH, N, C
197
+
198
+ q = q * self.scale
199
+ attn = q @ k.transpose(-2, -1)
200
+
201
+ relative_position_bias = self.relative_position_bias_table[
202
+ self.relative_position_index[:N, :N].reshape(-1)
203
+ ].reshape(
204
+ N, N, -1
205
+ ) # Wd*Wh*Ww,Wd*Wh*Ww,nH
206
+ relative_position_bias = relative_position_bias.permute(
207
+ 2, 0, 1
208
+ ).contiguous() # nH, Wd*Wh*Ww, Wd*Wh*Ww
209
+ attn = attn + relative_position_bias.unsqueeze(0) # B_, nH, N, N
210
+
211
+ if mask is not None:
212
+ nW = mask.shape[0]
213
+ attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(
214
+ 1
215
+ ).unsqueeze(0)
216
+ attn = attn.view(-1, self.num_heads, N, N)
217
+ attn = self.softmax(attn)
218
+ else:
219
+ attn = self.softmax(attn)
220
+
221
+ attn = self.attn_drop(attn)
222
+
223
+ x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
224
+ x = self.proj(x)
225
+ x = self.proj_drop(x)
226
+ return x
227
+
228
+
229
+ class SwinTransformerBlock3D(nn.Module):
230
+ """Swin Transformer Block.
231
+
232
+ Args:
233
+ dim (int): Number of input channels.
234
+ num_heads (int): Number of attention heads.
235
+ window_size (tuple[int]): Window size.
236
+ shift_size (tuple[int]): Shift size for SW-MSA.
237
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
238
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
239
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
240
+ drop (float, optional): Dropout rate. Default: 0.0
241
+ attn_drop (float, optional): Attention dropout rate. Default: 0.0
242
+ drop_path (float, optional): Stochastic depth rate. Default: 0.0
243
+ act_layer (nn.Module, optional): Activation layer. Default: nn.GELU
244
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
245
+ """
246
+
247
+ def __init__(
248
+ self,
249
+ dim,
250
+ num_heads,
251
+ window_size=(2, 7, 7),
252
+ shift_size=(0, 0, 0),
253
+ mlp_ratio=4.0,
254
+ qkv_bias=True,
255
+ qk_scale=None,
256
+ drop=0.0,
257
+ attn_drop=0.0,
258
+ drop_path=0.0,
259
+ act_layer=nn.GELU,
260
+ norm_layer=nn.LayerNorm,
261
+ use_checkpoint=False,
262
+ jump_attention=False,
263
+ ):
264
+ super().__init__()
265
+ self.dim = dim
266
+ self.num_heads = num_heads
267
+ self.window_size = window_size
268
+ self.shift_size = shift_size
269
+ self.mlp_ratio = mlp_ratio
270
+ self.use_checkpoint = use_checkpoint
271
+ self.jump_attention = jump_attention
272
+
273
+ assert (
274
+ 0 <= self.shift_size[0] < self.window_size[0]
275
+ ), "shift_size must in 0-window_size"
276
+ assert (
277
+ 0 <= self.shift_size[1] < self.window_size[1]
278
+ ), "shift_size must in 0-window_size"
279
+ assert (
280
+ 0 <= self.shift_size[2] < self.window_size[2]
281
+ ), "shift_size must in 0-window_size"
282
+
283
+ self.norm1 = norm_layer(dim)
284
+ self.attn = WindowAttention3D(
285
+ dim,
286
+ window_size=self.window_size,
287
+ num_heads=num_heads,
288
+ qkv_bias=qkv_bias,
289
+ qk_scale=qk_scale,
290
+ attn_drop=attn_drop,
291
+ proj_drop=drop,
292
+ )
293
+
294
+ self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
295
+ self.norm2 = norm_layer(dim)
296
+ mlp_hidden_dim = int(dim * mlp_ratio)
297
+ self.mlp = Mlp(
298
+ in_features=dim,
299
+ hidden_features=mlp_hidden_dim,
300
+ act_layer=act_layer,
301
+ drop=drop,
302
+ )
303
+
304
+ def forward_part1(self, x, mask_matrix):
305
+ B, D, H, W, C = x.shape
306
+ window_size, shift_size = get_window_size(
307
+ (D, H, W), self.window_size, self.shift_size
308
+ )
309
+
310
+ x = self.norm1(x)
311
+ # pad feature maps to multiples of window size
312
+ pad_l = pad_t = pad_d0 = 0
313
+ pad_d1 = (window_size[0] - D % window_size[0]) % window_size[0]
314
+ pad_b = (window_size[1] - H % window_size[1]) % window_size[1]
315
+ pad_r = (window_size[2] - W % window_size[2]) % window_size[2]
316
+ x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b, pad_d0, pad_d1))
317
+ _, Dp, Hp, Wp, _ = x.shape
318
+ # cyclic shift
319
+ if any(i > 0 for i in shift_size):
320
+ shifted_x = torch.roll(
321
+ x,
322
+ shifts=(-shift_size[0], -shift_size[1], -shift_size[2]),
323
+ dims=(1, 2, 3),
324
+ )
325
+ attn_mask = mask_matrix
326
+ else:
327
+ shifted_x = x
328
+ attn_mask = None
329
+ # partition windows
330
+ x_windows = window_partition(shifted_x, window_size) # B*nW, Wd*Wh*Ww, C
331
+ # W-MSA/SW-MSA
332
+ attn_windows = self.attn(x_windows, mask=attn_mask) # B*nW, Wd*Wh*Ww, C
333
+ # merge windows
334
+ attn_windows = attn_windows.view(-1, *(window_size + (C,)))
335
+ shifted_x = window_reverse(
336
+ attn_windows, window_size, B, Dp, Hp, Wp
337
+ ) # B D' H' W' C
338
+ # reverse cyclic shift
339
+ if any(i > 0 for i in shift_size):
340
+ x = torch.roll(
341
+ shifted_x,
342
+ shifts=(shift_size[0], shift_size[1], shift_size[2]),
343
+ dims=(1, 2, 3),
344
+ )
345
+ else:
346
+ x = shifted_x
347
+
348
+ if pad_d1 > 0 or pad_r > 0 or pad_b > 0:
349
+ x = x[:, :D, :H, :W, :].contiguous()
350
+ return x
351
+
352
+ def forward_part2(self, x):
353
+ return self.drop_path(self.mlp(self.norm2(x)))
354
+
355
+ def forward(self, x, mask_matrix):
356
+ """Forward function.
357
+
358
+ Args:
359
+ x: Input feature, tensor size (B, D, H, W, C).
360
+ mask_matrix: Attention mask for cyclic shift.
361
+ """
362
+
363
+ shortcut = x
364
+ if not self.jump_attention:
365
+ if self.use_checkpoint:
366
+ x = checkpoint.checkpoint(self.forward_part1, x, mask_matrix)
367
+ else:
368
+ x = self.forward_part1(x, mask_matrix)
369
+ x = shortcut + self.drop_path(x)
370
+
371
+ if self.use_checkpoint:
372
+ x = x + checkpoint.checkpoint(self.forward_part2, x)
373
+ else:
374
+ x = x + self.forward_part2(x)
375
+
376
+ return x
377
+
378
+
379
+ class PatchMerging(nn.Module):
380
+ """Patch Merging Layer
381
+
382
+ Args:
383
+ dim (int): Number of input channels.
384
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
385
+ """
386
+
387
+ def __init__(self, dim, norm_layer=nn.LayerNorm):
388
+ super().__init__()
389
+ self.dim = dim
390
+ self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
391
+ self.norm = norm_layer(4 * dim)
392
+
393
+ def forward(self, x):
394
+ """Forward function.
395
+
396
+ Args:
397
+ x: Input feature, tensor size (B, D, H, W, C).
398
+ """
399
+ B, D, H, W, C = x.shape
400
+
401
+ # padding
402
+ pad_input = (H % 2 == 1) or (W % 2 == 1)
403
+ if pad_input:
404
+ x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2))
405
+
406
+ x0 = x[:, :, 0::2, 0::2, :] # B D H/2 W/2 C
407
+ x1 = x[:, :, 1::2, 0::2, :] # B D H/2 W/2 C
408
+ x2 = x[:, :, 0::2, 1::2, :] # B D H/2 W/2 C
409
+ x3 = x[:, :, 1::2, 1::2, :] # B D H/2 W/2 C
410
+ x = torch.cat([x0, x1, x2, x3], -1) # B D H/2 W/2 4*C
411
+
412
+ x = self.norm(x)
413
+ x = self.reduction(x)
414
+
415
+ return x
416
+
417
+
418
+ # cache each stage results
419
+ @lru_cache()
420
+ def compute_mask(D, H, W, window_size, shift_size, device):
421
+ img_mask = torch.zeros((1, D, H, W, 1), device=device) # 1 Dp Hp Wp 1
422
+ cnt = 0
423
+ for d in (
424
+ slice(-window_size[0]),
425
+ slice(-window_size[0], -shift_size[0]),
426
+ slice(-shift_size[0], None),
427
+ ):
428
+ for h in (
429
+ slice(-window_size[1]),
430
+ slice(-window_size[1], -shift_size[1]),
431
+ slice(-shift_size[1], None),
432
+ ):
433
+ for w in (
434
+ slice(-window_size[2]),
435
+ slice(-window_size[2], -shift_size[2]),
436
+ slice(-shift_size[2], None),
437
+ ):
438
+ img_mask[:, d, h, w, :] = cnt
439
+ cnt += 1
440
+ mask_windows = window_partition(img_mask, window_size) # nW, ws[0]*ws[1]*ws[2], 1
441
+ mask_windows = mask_windows.squeeze(-1) # nW, ws[0]*ws[1]*ws[2]
442
+ attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
443
+ attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(
444
+ attn_mask == 0, float(0.0)
445
+ )
446
+ return attn_mask
447
+
448
+
449
+ class BasicLayer(nn.Module):
450
+ """A basic Swin Transformer layer for one stage.
451
+
452
+ Args:
453
+ dim (int): Number of feature channels
454
+ depth (int): Depths of this stage.
455
+ num_heads (int): Number of attention head.
456
+ window_size (tuple[int]): Local window size. Default: (1,7,7).
457
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
458
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
459
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
460
+ drop (float, optional): Dropout rate. Default: 0.0
461
+ attn_drop (float, optional): Attention dropout rate. Default: 0.0
462
+ drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
463
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
464
+ downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
465
+ """
466
+
467
+ def __init__(
468
+ self,
469
+ dim,
470
+ depth,
471
+ num_heads,
472
+ window_size=(1, 7, 7),
473
+ mlp_ratio=4.0,
474
+ qkv_bias=False,
475
+ qk_scale=None,
476
+ drop=0.0,
477
+ attn_drop=0.0,
478
+ drop_path=0.0,
479
+ norm_layer=nn.LayerNorm,
480
+ downsample=None,
481
+ use_checkpoint=False,
482
+ jump_attention=False,
483
+ ):
484
+ super().__init__()
485
+ self.window_size = window_size
486
+ self.shift_size = tuple(i // 2 for i in window_size)
487
+ self.depth = depth
488
+ self.use_checkpoint = use_checkpoint
489
+
490
+ # build blocks
491
+ self.blocks = nn.ModuleList(
492
+ [
493
+ SwinTransformerBlock3D(
494
+ dim=dim,
495
+ num_heads=num_heads,
496
+ window_size=window_size,
497
+ shift_size=(0, 0, 0) if (i % 2 == 0) else self.shift_size,
498
+ mlp_ratio=mlp_ratio,
499
+ qkv_bias=qkv_bias,
500
+ qk_scale=qk_scale,
501
+ drop=drop,
502
+ attn_drop=attn_drop,
503
+ drop_path=drop_path[i]
504
+ if isinstance(drop_path, list)
505
+ else drop_path,
506
+ norm_layer=norm_layer,
507
+ use_checkpoint=use_checkpoint,
508
+ jump_attention=jump_attention,
509
+ )
510
+ for i in range(depth)
511
+ ]
512
+ )
513
+
514
+ self.downsample = downsample
515
+ if self.downsample is not None:
516
+ self.downsample = downsample(dim=dim, norm_layer=norm_layer)
517
+
518
+ def forward(self, x):
519
+ """Forward function.
520
+
521
+ Args:
522
+ x: Input feature, tensor size (B, C, D, H, W).
523
+ """
524
+ # calculate attention mask for SW-MSA
525
+ B, C, D, H, W = x.shape
526
+ window_size, shift_size = get_window_size(
527
+ (D, H, W), self.window_size, self.shift_size
528
+ )
529
+ x = rearrange(x, "b c d h w -> b d h w c")
530
+ Dp = int(np.ceil(D / window_size[0])) * window_size[0]
531
+ Hp = int(np.ceil(H / window_size[1])) * window_size[1]
532
+ Wp = int(np.ceil(W / window_size[2])) * window_size[2]
533
+ attn_mask = compute_mask(Dp, Hp, Wp, window_size, shift_size, x.device)
534
+ for blk in self.blocks:
535
+ x = blk(x, attn_mask)
536
+ x = x.view(B, D, H, W, -1)
537
+
538
+ if self.downsample is not None:
539
+ x = self.downsample(x)
540
+ x = rearrange(x, "b d h w c -> b c d h w")
541
+ return x
542
+
543
+
544
+ class PatchEmbed3D(nn.Module):
545
+ """Video to Patch Embedding.
546
+
547
+ Args:
548
+ patch_size (int): Patch token size. Default: (2,4,4).
549
+ in_chans (int): Number of input video channels. Default: 3.
550
+ embed_dim (int): Number of linear projection output channels. Default: 96.
551
+ norm_layer (nn.Module, optional): Normalization layer. Default: None
552
+ """
553
+
554
+ def __init__(self, patch_size=(2, 4, 4), in_chans=3, embed_dim=96, norm_layer=None):
555
+ super().__init__()
556
+ self.patch_size = patch_size
557
+
558
+ self.in_chans = in_chans
559
+ self.embed_dim = embed_dim
560
+
561
+ self.proj = nn.Conv3d(
562
+ in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
563
+ )
564
+ if norm_layer is not None:
565
+ self.norm = norm_layer(embed_dim)
566
+ else:
567
+ self.norm = None
568
+
569
+ def forward(self, x):
570
+ """Forward function."""
571
+ # padding
572
+ _, _, D, H, W = x.size()
573
+ if W % self.patch_size[2] != 0:
574
+ x = F.pad(x, (0, self.patch_size[2] - W % self.patch_size[2]))
575
+ if H % self.patch_size[1] != 0:
576
+ x = F.pad(x, (0, 0, 0, self.patch_size[1] - H % self.patch_size[1]))
577
+ if D % self.patch_size[0] != 0:
578
+ x = F.pad(x, (0, 0, 0, 0, 0, self.patch_size[0] - D % self.patch_size[0]))
579
+
580
+ x = self.proj(x) # B C D Wh Ww
581
+ if self.norm is not None:
582
+ D, Wh, Ww = x.size(2), x.size(3), x.size(4)
583
+ x = x.flatten(2).transpose(1, 2)
584
+ x = self.norm(x)
585
+ x = x.transpose(1, 2).view(-1, self.embed_dim, D, Wh, Ww)
586
+
587
+ return x
588
+
589
+
590
+ class SwinTransformer3D(nn.Module):
591
+ """Swin Transformer backbone.
592
+ A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` -
593
+ https://arxiv.org/pdf/2103.14030
594
+
595
+ Args:
596
+ patch_size (int | tuple(int)): Patch size. Default: (4,4,4).
597
+ in_chans (int): Number of input image channels. Default: 3.
598
+ embed_dim (int): Number of linear projection output channels. Default: 96.
599
+ depths (tuple[int]): Depths of each Swin Transformer stage.
600
+ num_heads (tuple[int]): Number of attention head of each stage.
601
+ window_size (int): Window size. Default: 7.
602
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
603
+ qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: Truee
604
+ qk_scale (float): Override default qk scale of head_dim ** -0.5 if set.
605
+ drop_rate (float): Dropout rate.
606
+ attn_drop_rate (float): Attention dropout rate. Default: 0.
607
+ drop_path_rate (float): Stochastic depth rate. Default: 0.2.
608
+ norm_layer: Normalization layer. Default: nn.LayerNorm.
609
+ patch_norm (bool): If True, add normalization after patch embedding. Default: False.
610
+ frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
611
+ -1 means not freezing any parameters.
612
+ """
613
+
614
+ def __init__(
615
+ self,
616
+ pretrained=None,
617
+ pretrained2d=False,
618
+ patch_size=(2, 4, 4),
619
+ in_chans=3,
620
+ embed_dim=96,
621
+ depths=[2, 2, 6, 2],
622
+ num_heads=[3, 6, 12, 24],
623
+ window_size=(8, 7, 7),
624
+ mlp_ratio=4.0,
625
+ qkv_bias=True,
626
+ qk_scale=None,
627
+ drop_rate=0.0,
628
+ attn_drop_rate=0.0,
629
+ drop_path_rate=0.1,
630
+ norm_layer=nn.LayerNorm,
631
+ patch_norm=True,
632
+ frozen_stages=-1,
633
+ use_checkpoint=True,
634
+ jump_attention=[False, False, False, False],
635
+ ):
636
+ super().__init__()
637
+
638
+ self.pretrained = pretrained
639
+ self.pretrained2d = pretrained2d
640
+ self.num_layers = len(depths)
641
+ self.embed_dim = embed_dim
642
+ self.patch_norm = patch_norm
643
+ self.frozen_stages = frozen_stages
644
+ self.window_size = window_size
645
+ self.patch_size = patch_size
646
+
647
+ # split image into non-overlapping patches
648
+ self.patch_embed = PatchEmbed3D(
649
+ patch_size=patch_size,
650
+ in_chans=in_chans,
651
+ embed_dim=embed_dim,
652
+ norm_layer=norm_layer if self.patch_norm else None,
653
+ )
654
+
655
+ self.pos_drop = nn.Dropout(p=drop_rate)
656
+
657
+ # stochastic depth
658
+ dpr = [
659
+ x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))
660
+ ] # stochastic depth decay rule
661
+
662
+ # build layers
663
+ self.layers = nn.ModuleList()
664
+ for i_layer in range(self.num_layers):
665
+ layer = BasicLayer(
666
+ dim=int(embed_dim * 2 ** i_layer),
667
+ depth=depths[i_layer],
668
+ num_heads=num_heads[i_layer],
669
+ window_size=window_size,
670
+ mlp_ratio=mlp_ratio,
671
+ qkv_bias=qkv_bias,
672
+ qk_scale=qk_scale,
673
+ drop=drop_rate,
674
+ attn_drop=attn_drop_rate,
675
+ drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])],
676
+ norm_layer=norm_layer,
677
+ downsample=PatchMerging if i_layer < self.num_layers - 1 else None,
678
+ use_checkpoint=use_checkpoint,
679
+ jump_attention=jump_attention[i_layer],
680
+ )
681
+ self.layers.append(layer)
682
+
683
+ self.num_features = int(embed_dim * 2 ** (self.num_layers - 1))
684
+
685
+ # add a norm layer for each output
686
+ self.norm = norm_layer(self.num_features)
687
+
688
+ self._freeze_stages()
689
+
690
+ def _freeze_stages(self):
691
+ if self.frozen_stages >= 0:
692
+ self.patch_embed.eval()
693
+ for param in self.patch_embed.parameters():
694
+ param.requires_grad = False
695
+
696
+ if self.frozen_stages >= 1:
697
+ self.pos_drop.eval()
698
+ for i in range(0, self.frozen_stages):
699
+ m = self.layers[i]
700
+ m.eval()
701
+ for param in m.parameters():
702
+ param.requires_grad = False
703
+
704
+ def inflate_weights(self, logger):
705
+ """Inflate the swin2d parameters to swin3d.
706
+
707
+ The differences between swin3d and swin2d mainly lie in an extra
708
+ axis. To utilize the pretrained parameters in 2d model,
709
+ the weight of swin2d models should be inflated to fit in the shapes of
710
+ the 3d counterpart.
711
+
712
+ Args:
713
+ logger (logging.Logger): The logger used to print
714
+ debugging infomation.
715
+ """
716
+ checkpoint = torch.load(self.pretrained, map_location="cpu")
717
+ state_dict = checkpoint["model"]
718
+
719
+ # delete relative_position_index since we always re-init it
720
+ relative_position_index_keys = [
721
+ k for k in state_dict.keys() if "relative_position_index" in k
722
+ ]
723
+ for k in relative_position_index_keys:
724
+ del state_dict[k]
725
+
726
+ # delete attn_mask since we always re-init it
727
+ attn_mask_keys = [k for k in state_dict.keys() if "attn_mask" in k]
728
+ for k in attn_mask_keys:
729
+ del state_dict[k]
730
+
731
+ state_dict["patch_embed.proj.weight"] = (
732
+ state_dict["patch_embed.proj.weight"]
733
+ .unsqueeze(2)
734
+ .repeat(1, 1, self.patch_size[0], 1, 1)
735
+ / self.patch_size[0]
736
+ )
737
+
738
+ # bicubic interpolate relative_position_bias_table if not match
739
+ relative_position_bias_table_keys = [
740
+ k for k in state_dict.keys() if "relative_position_bias_table" in k
741
+ ]
742
+ for k in relative_position_bias_table_keys:
743
+ relative_position_bias_table_pretrained = state_dict[k]
744
+ relative_position_bias_table_current = self.state_dict()[k]
745
+ L1, nH1 = relative_position_bias_table_pretrained.size()
746
+ L2, nH2 = relative_position_bias_table_current.size()
747
+ L2 = (2 * self.window_size[1] - 1) * (2 * self.window_size[2] - 1)
748
+ wd = self.window_size[0]
749
+ if nH1 != nH2:
750
+ logger.warning(f"Error in loading {k}, passing")
751
+ else:
752
+ if L1 != L2:
753
+ S1 = int(L1 ** 0.5)
754
+ relative_position_bias_table_pretrained_resized = torch.nn.functional.interpolate(
755
+ relative_position_bias_table_pretrained.permute(1, 0).view(
756
+ 1, nH1, S1, S1
757
+ ),
758
+ size=(
759
+ 2 * self.window_size[1] - 1,
760
+ 2 * self.window_size[2] - 1,
761
+ ),
762
+ mode="bicubic",
763
+ )
764
+ relative_position_bias_table_pretrained = relative_position_bias_table_pretrained_resized.view(
765
+ nH2, L2
766
+ ).permute(
767
+ 1, 0
768
+ )
769
+ state_dict[k] = relative_position_bias_table_pretrained.repeat(
770
+ 2 * wd - 1, 1
771
+ )
772
+
773
+ msg = self.load_state_dict(state_dict, strict=False)
774
+ logger.info(msg)
775
+ logger.info(f"=> loaded successfully '{self.pretrained}'")
776
+ del checkpoint
777
+ torch.cuda.empty_cache()
778
+
779
+ def load_checkpoint(self, load_path, strict=False):
780
+ from collections import OrderedDict
781
+
782
+ model_state_dict = self.state_dict()
783
+ state_dict = torch.load(load_path)
784
+ if "state_dict" in state_dict.keys():
785
+ state_dict = state_dict["state_dict"]
786
+
787
+ clean_dict = OrderedDict()
788
+ for key, value in state_dict.items():
789
+ if "backbone" in key:
790
+ clean_key = key[9:]
791
+ clean_dict[clean_key] = value
792
+
793
+ if not strict:
794
+ for key, value in model_state_dict.items():
795
+ if key in clean_dict:
796
+ if value.shape != clean_dict[key].shape:
797
+ clean_dict.pop(key)
798
+
799
+ self.load_state_dict(clean_dict, strict=strict)
800
+
801
+ def init_weights(self, pretrained=None):
802
+ """Initialize the weights in backbone.
803
+
804
+ Args:
805
+ pretrained (str, optional): Path to pre-trained weights.
806
+ Defaults to None.
807
+ """
808
+
809
+ def _init_weights(m):
810
+ if isinstance(m, nn.Linear):
811
+ trunc_normal_(m.weight, std=0.02)
812
+ if isinstance(m, nn.Linear) and m.bias is not None:
813
+ nn.init.constant_(m.bias, 0)
814
+ elif isinstance(m, nn.LayerNorm):
815
+ nn.init.constant_(m.bias, 0)
816
+ nn.init.constant_(m.weight, 1.0)
817
+
818
+ if pretrained:
819
+ self.pretrained = pretrained
820
+ if isinstance(self.pretrained, str):
821
+ self.apply(_init_weights)
822
+ logger = get_root_logger()
823
+ logger.info(f"load model from: {self.pretrained}")
824
+
825
+ if self.pretrained2d:
826
+ # Inflate 2D model into 3D model.
827
+ self.inflate_weights(logger)
828
+ else:
829
+ # Directly load 3D model.
830
+ self.load_checkpoint(self.pretrained, strict=False) # , logger=logger)
831
+ elif self.pretrained is None:
832
+ self.apply(_init_weights)
833
+ else:
834
+ raise TypeError("pretrained must be a str or None")
835
+
836
+ def forward(self, x, multi=False):
837
+ """Forward function."""
838
+ x = self.patch_embed(x)
839
+
840
+ x = self.pos_drop(x)
841
+
842
+ if multi:
843
+ feats = [x]
844
+
845
+ for layer in self.layers:
846
+ x = layer(x.contiguous())
847
+ if multi:
848
+ feats += [x]
849
+
850
+ x = rearrange(x, "n c d h w -> n d h w c")
851
+ x = self.norm(x)
852
+ x = rearrange(x, "n d h w c -> n c d h w")
853
+
854
+ if multi:
855
+ return feats[:-1] + [x]
856
+ else:
857
+ return x
858
+
859
+ def train(self, mode=True):
860
+ """Convert the model into training mode while keep layers freezed."""
861
+ super(SwinTransformer3D, self).train(mode)
862
+ self._freeze_stages()
cover/models/clip_model.py ADDED
@@ -0,0 +1,640 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import hashlib
2
+ import os
3
+ import urllib
4
+ import warnings
5
+ from tqdm import tqdm
6
+ from typing import Tuple, Union, List
7
+ from collections import OrderedDict
8
+
9
+ import numpy as np
10
+ import torch
11
+ import torch.nn.functional as F
12
+ from torch import nn
13
+
14
+
15
+ _MODELS = {
16
+ "RN50": "https://openaipublic.azureedge.net/clip/models/afeb0e10f9e5a86da6080e35cf09123aca3b358a0c3e3b6c78a7b63bc04b6762/RN50.pt",
17
+ "RN101": "https://openaipublic.azureedge.net/clip/models/8fa8567bab74a42d41c5915025a8e4538c3bdbe8804a470a72f30b0d94fab599/RN101.pt",
18
+ "RN50x4": "https://openaipublic.azureedge.net/clip/models/7e526bd135e493cef0776de27d5f42653e6b4c8bf9e0f653bb11773263205fdd/RN50x4.pt",
19
+ "RN50x16": "https://openaipublic.azureedge.net/clip/models/52378b407f34354e150460fe41077663dd5b39c54cd0bfd2b27167a4a06ec9aa/RN50x16.pt",
20
+ "RN50x64": "https://openaipublic.azureedge.net/clip/models/be1cfb55d75a9666199fb2206c106743da0f6468c9d327f3e0d0a543a9919d9c/RN50x64.pt",
21
+ "ViT-B/32": "https://openaipublic.azureedge.net/clip/models/40d365715913c9da98579312b702a82c18be219cc2a73407c4526f58eba950af/ViT-B-32.pt",
22
+ "ViT-B/16": "https://openaipublic.azureedge.net/clip/models/5806e77cd80f8b59890b7e101eabd078d9fb84e6937f9e85e4ecb61988df416f/ViT-B-16.pt",
23
+ "ViT-L/14": "https://openaipublic.azureedge.net/clip/models/b8cca3fd41ae0c99ba7e8951adf17d267cdb84cd88be6f7c2e0eca1737a03836/ViT-L-14.pt",
24
+ "ViT-L/14@336px": "https://openaipublic.azureedge.net/clip/models/3035c92b350959924f9f00213499208652fc7ea050643e8b385c2dac08641f02/ViT-L-14-336px.pt",
25
+ }
26
+
27
+
28
+ def _download(url: str, root: str):
29
+ os.makedirs(root, exist_ok=True)
30
+ filename = os.path.basename(url)
31
+
32
+ expected_sha256 = url.split("/")[-2]
33
+ download_target = os.path.join(root, filename)
34
+
35
+ if os.path.exists(download_target) and not os.path.isfile(download_target):
36
+ raise RuntimeError(f"{download_target} exists and is not a regular file")
37
+
38
+ if os.path.isfile(download_target):
39
+ if hashlib.sha256(open(download_target, "rb").read()).hexdigest() == expected_sha256:
40
+ return download_target
41
+ else:
42
+ warnings.warn(f"{download_target} exists, but the SHA256 checksum does not match; re-downloading the file")
43
+
44
+ with urllib.request.urlopen(url) as source, open(download_target, "wb") as output:
45
+ with tqdm(total=int(source.info().get("Content-Length")), ncols=80, unit='iB', unit_scale=True, unit_divisor=1024) as loop:
46
+ while True:
47
+ buffer = source.read(8192)
48
+ if not buffer:
49
+ break
50
+
51
+ output.write(buffer)
52
+ loop.update(len(buffer))
53
+
54
+ if hashlib.sha256(open(download_target, "rb").read()).hexdigest() != expected_sha256:
55
+ raise RuntimeError("Model has been downloaded but the SHA256 checksum does not not match")
56
+
57
+ return download_target
58
+
59
+
60
+ def available_models() -> List[str]:
61
+ """Returns the names of available CLIP models"""
62
+ return list(_MODELS.keys())
63
+
64
+
65
+ def load(name: str, device: Union[str, torch.device] = "cuda" if torch.cuda.is_available() else "cpu", jit: bool = False, download_root: str = None):
66
+ """Load a CLIP model
67
+ Parameters
68
+ ----------
69
+ name : str
70
+ A model name listed by `clip.available_models()`, or the path to a model checkpoint containing the state_dict
71
+ device : Union[str, torch.device]
72
+ The device to put the loaded model
73
+ jit : bool
74
+ Whether to load the optimized JIT model or more hackable non-JIT model (default).
75
+ download_root: str
76
+ path to download the model files; by default, it uses "~/.cache/clip"
77
+ Returns
78
+ -------
79
+ model : torch.nn.Module
80
+ The CLIP model
81
+ preprocess : Callable[[PIL.Image], torch.Tensor]
82
+ A torchvision transform that converts a PIL image into a tensor that the returned model can take as its input
83
+ """
84
+ if name in _MODELS:
85
+ model_path = _download(_MODELS[name], download_root or os.path.expanduser("~/.cache/clip"))
86
+ elif os.path.isfile(name):
87
+ model_path = name
88
+ else:
89
+ raise RuntimeError(f"Model {name} not found; available models = {available_models()}")
90
+
91
+ with open(model_path, 'rb') as opened_file:
92
+ try:
93
+ # loading JIT archive
94
+ model = torch.jit.load(opened_file, map_location=device if jit else "cpu").eval()
95
+ state_dict = None
96
+ except RuntimeError:
97
+ # loading saved state dict
98
+ if jit:
99
+ warnings.warn(f"File {model_path} is not a JIT archive. Loading as a state dict instead")
100
+ jit = False
101
+ state_dict = torch.load(opened_file, map_location="cpu")
102
+
103
+ if not jit:
104
+ model = build_model(state_dict or model.state_dict()).to(device)
105
+ if str(device) == "cpu":
106
+ model.float()
107
+ return model
108
+
109
+ # patch the device names
110
+ device_holder = torch.jit.trace(lambda: torch.ones([]).to(torch.device(device)), example_inputs=[])
111
+ device_node = [n for n in device_holder.graph.findAllNodes("prim::Constant") if "Device" in repr(n)][-1]
112
+
113
+ def patch_device(module):
114
+ try:
115
+ graphs = [module.graph] if hasattr(module, "graph") else []
116
+ except RuntimeError:
117
+ graphs = []
118
+
119
+ if hasattr(module, "forward1"):
120
+ graphs.append(module.forward1.graph)
121
+
122
+ for graph in graphs:
123
+ for node in graph.findAllNodes("prim::Constant"):
124
+ if "value" in node.attributeNames() and str(node["value"]).startswith("cuda"):
125
+ node.copyAttributes(device_node)
126
+
127
+ model.apply(patch_device)
128
+ patch_device(model.encode_image)
129
+ patch_device(model.encode_text)
130
+
131
+ # patch dtype to float32 on CPU
132
+ if str(device) == "cpu":
133
+ float_holder = torch.jit.trace(lambda: torch.ones([]).float(), example_inputs=[])
134
+ float_input = list(float_holder.graph.findNode("aten::to").inputs())[1]
135
+ float_node = float_input.node()
136
+
137
+ def patch_float(module):
138
+ try:
139
+ graphs = [module.graph] if hasattr(module, "graph") else []
140
+ except RuntimeError:
141
+ graphs = []
142
+
143
+ if hasattr(module, "forward1"):
144
+ graphs.append(module.forward1.graph)
145
+
146
+ for graph in graphs:
147
+ for node in graph.findAllNodes("aten::to"):
148
+ inputs = list(node.inputs())
149
+ for i in [1, 2]: # dtype can be the second or third argument to aten::to()
150
+ if inputs[i].node()["value"] == 5:
151
+ inputs[i].node().copyAttributes(float_node)
152
+
153
+ model.apply(patch_float)
154
+ patch_float(model.encode_image)
155
+ patch_float(model.encode_text)
156
+
157
+ model.float()
158
+
159
+ return model
160
+
161
+
162
+ class Bottleneck(nn.Module):
163
+ expansion = 4
164
+
165
+ def __init__(self, inplanes, planes, stride=1):
166
+ super().__init__()
167
+
168
+ # all conv layers have stride 1. an avgpool is performed after the second convolution when stride > 1
169
+ self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False)
170
+ self.bn1 = nn.BatchNorm2d(planes)
171
+
172
+ self.conv2 = nn.Conv2d(planes, planes, 3, padding=1, bias=False)
173
+ self.bn2 = nn.BatchNorm2d(planes)
174
+
175
+ self.avgpool = nn.AvgPool2d(stride) if stride > 1 else nn.Identity()
176
+
177
+ self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False)
178
+ self.bn3 = nn.BatchNorm2d(planes * self.expansion)
179
+
180
+ self.relu = nn.ReLU(inplace=True)
181
+ self.downsample = None
182
+ self.stride = stride
183
+
184
+ if stride > 1 or inplanes != planes * Bottleneck.expansion:
185
+ # downsampling layer is prepended with an avgpool, and the subsequent convolution has stride 1
186
+ self.downsample = nn.Sequential(OrderedDict([
187
+ ("-1", nn.AvgPool2d(stride)),
188
+ ("0", nn.Conv2d(inplanes, planes * self.expansion, 1, stride=1, bias=False)),
189
+ ("1", nn.BatchNorm2d(planes * self.expansion))
190
+ ]))
191
+
192
+ def forward(self, x: torch.Tensor):
193
+ identity = x
194
+
195
+ out = self.relu(self.bn1(self.conv1(x)))
196
+ out = self.relu(self.bn2(self.conv2(out)))
197
+ out = self.avgpool(out)
198
+ out = self.bn3(self.conv3(out))
199
+
200
+ if self.downsample is not None:
201
+ identity = self.downsample(x)
202
+
203
+ out += identity
204
+ out = self.relu(out)
205
+ return out
206
+
207
+
208
+ class AttentionPool2d(nn.Module):
209
+ def __init__(self, spacial_dim: int, embed_dim: int, num_heads: int, output_dim: int = None):
210
+ super().__init__()
211
+ self.positional_embedding = nn.Parameter(torch.randn(spacial_dim ** 2 + 1, embed_dim) / embed_dim ** 0.5)
212
+ self.k_proj = nn.Linear(embed_dim, embed_dim)
213
+ self.q_proj = nn.Linear(embed_dim, embed_dim)
214
+ self.v_proj = nn.Linear(embed_dim, embed_dim)
215
+ self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim)
216
+ self.num_heads = num_heads
217
+ self.spacial_dim = spacial_dim
218
+ self.embed_dim = embed_dim
219
+
220
+ def forward(self, x, return_token=False, pos_embedding=False):
221
+ n, c, h, w = x.shape
222
+ x = x.reshape(x.shape[0], x.shape[1], x.shape[2] * x.shape[3]).permute(2, 0, 1) # NCHW -> (HW)NC
223
+ x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (HW+1)NC
224
+ if pos_embedding:
225
+ positional_embedding_resize = F.interpolate(self.positional_embedding.unsqueeze(
226
+ 0).unsqueeze(0), size=(x.size(0), x.size(2)), mode='bicubic').squeeze(0).squeeze(0)
227
+ x = x + positional_embedding_resize[:, None, :].to(x.dtype) # (HW+1)NC
228
+
229
+ x, _ = F.multi_head_attention_forward(
230
+ query=x, key=x, value=x,
231
+ embed_dim_to_check=x.shape[-1],
232
+ num_heads=self.num_heads,
233
+ q_proj_weight=self.q_proj.weight,
234
+ k_proj_weight=self.k_proj.weight,
235
+ v_proj_weight=self.v_proj.weight,
236
+ in_proj_weight=None,
237
+ in_proj_bias=torch.cat([self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]),
238
+ bias_k=None,
239
+ bias_v=None,
240
+ add_zero_attn=False,
241
+ dropout_p=0,
242
+ out_proj_weight=self.c_proj.weight,
243
+ out_proj_bias=self.c_proj.bias,
244
+ use_separate_proj_weight=True,
245
+ training=self.training,
246
+ need_weights=False
247
+ )
248
+
249
+ if return_token:
250
+ return x[0], x[1:]
251
+ else:
252
+ return x[0]
253
+
254
+
255
+ class ModifiedResNet(nn.Module):
256
+ """
257
+ A ResNet class that is similar to torchvision's but contains the following changes:
258
+ - There are now 3 "stem" convolutions as opposed to 1, with an average pool instead of a max pool.
259
+ - Performs anti-aliasing strided convolutions, where an avgpool is prepended to convolutions with stride > 1
260
+ - The final pooling layer is a QKV attention instead of an average pool
261
+ """
262
+
263
+ def __init__(self, layers, output_dim, heads, input_resolution=224, width=64):
264
+ super().__init__()
265
+ self.output_dim = output_dim
266
+ self.input_resolution = input_resolution
267
+
268
+ # the 3-layer stem
269
+ self.conv1 = nn.Conv2d(3, width // 2, kernel_size=3, stride=2, padding=1, bias=False)
270
+ self.bn1 = nn.BatchNorm2d(width // 2)
271
+ self.conv2 = nn.Conv2d(width // 2, width // 2, kernel_size=3, padding=1, bias=False)
272
+ self.bn2 = nn.BatchNorm2d(width // 2)
273
+ self.conv3 = nn.Conv2d(width // 2, width, kernel_size=3, padding=1, bias=False)
274
+ self.bn3 = nn.BatchNorm2d(width)
275
+ self.avgpool = nn.AvgPool2d(2)
276
+ self.relu = nn.ReLU(inplace=True)
277
+
278
+ # residual layers
279
+ self._inplanes = width # this is a *mutable* variable used during construction
280
+ self.layer1 = self._make_layer(width, layers[0])
281
+ self.layer2 = self._make_layer(width * 2, layers[1], stride=2)
282
+ self.layer3 = self._make_layer(width * 4, layers[2], stride=2)
283
+ self.layer4 = self._make_layer(width * 8, layers[3], stride=2)
284
+
285
+ self.feature_dim_list = [width, width * 4, width * 8, width * 16, width * 32]
286
+
287
+ embed_dim = width * 32 # the ResNet feature dimension
288
+ self.attnpool = AttentionPool2d(input_resolution // 32, embed_dim, heads, output_dim)
289
+
290
+ def _make_layer(self, planes, blocks, stride=1):
291
+ layers = [Bottleneck(self._inplanes, planes, stride)]
292
+
293
+ self._inplanes = planes * Bottleneck.expansion
294
+ for _ in range(1, blocks):
295
+ layers.append(Bottleneck(self._inplanes, planes))
296
+
297
+ return nn.Sequential(*layers)
298
+
299
+ def forward_features(self, x, return_token=False, pos_embedding=False):
300
+ def stem(x):
301
+ for conv, bn in [(self.conv1, self.bn1), (self.conv2, self.bn2), (self.conv3, self.bn3)]:
302
+ x = self.relu(bn(conv(x)))
303
+ x = self.avgpool(x)
304
+ return x
305
+
306
+ x = x.type(self.conv1.weight.dtype)
307
+ x = stem(x)
308
+ feat_list = [x]
309
+ x = self.layer1(x)
310
+ feat_list += [x]
311
+ x = self.layer2(x)
312
+ feat_list += [x]
313
+ x = self.layer3(x)
314
+ feat_list += [x]
315
+ x = self.layer4(x)
316
+ feat_list += [x]
317
+ return feat_list
318
+
319
+ def forward(self, x, return_token=False, pos_embedding=False):
320
+ def stem(x):
321
+ for conv, bn in [(self.conv1, self.bn1), (self.conv2, self.bn2), (self.conv3, self.bn3)]:
322
+ x = self.relu(bn(conv(x)))
323
+ x = self.avgpool(x)
324
+ return x
325
+
326
+ x = x.type(self.conv1.weight.dtype)
327
+ x = stem(x)
328
+ x = self.layer1(x)
329
+ x = self.layer2(x)
330
+ x = self.layer3(x)
331
+ x = self.layer4(x)
332
+
333
+ if return_token:
334
+ x, tokens = self.attnpool(x, return_token, pos_embedding)
335
+ return x, tokens
336
+ else:
337
+ x = self.attnpool(x, return_token, pos_embedding)
338
+ return x
339
+
340
+
341
+ class LayerNorm(nn.LayerNorm):
342
+ """Subclass torch's LayerNorm to handle fp16."""
343
+
344
+ def forward(self, x: torch.Tensor):
345
+ orig_type = x.dtype
346
+ ret = super().forward(x.type(torch.float32))
347
+ return ret.type(orig_type)
348
+
349
+
350
+ class QuickGELU(nn.Module):
351
+ def forward(self, x: torch.Tensor):
352
+ return x * torch.sigmoid(1.702 * x)
353
+
354
+
355
+ class ResidualAttentionBlock(nn.Module):
356
+ def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None):
357
+ super().__init__()
358
+
359
+ self.attn = nn.MultiheadAttention(d_model, n_head)
360
+ self.ln_1 = LayerNorm(d_model)
361
+ self.mlp = nn.Sequential(OrderedDict([
362
+ ("c_fc", nn.Linear(d_model, d_model * 4)),
363
+ ("gelu", QuickGELU()),
364
+ ("c_proj", nn.Linear(d_model * 4, d_model))
365
+ ]))
366
+ self.ln_2 = LayerNorm(d_model)
367
+ self.attn_mask = attn_mask
368
+
369
+ def attention(self, x: torch.Tensor):
370
+ self.attn_mask = self.attn_mask.to(dtype=x.dtype, device=x.device) if self.attn_mask is not None else None
371
+ return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0]
372
+
373
+ def forward(self, x: torch.Tensor):
374
+ x = x + self.attention(self.ln_1(x))
375
+ x = x + self.mlp(self.ln_2(x))
376
+ return x
377
+
378
+
379
+ class Transformer(nn.Module):
380
+ def __init__(self, width: int, layers: int, heads: int, attn_mask: torch.Tensor = None):
381
+ super().__init__()
382
+ self.width = width
383
+ self.layers = layers
384
+ self.resblocks = nn.Sequential(*[ResidualAttentionBlock(width, heads, attn_mask) for _ in range(layers)])
385
+
386
+ def forward(self, x: torch.Tensor):
387
+ return self.resblocks(x)
388
+
389
+
390
+ class VisionTransformer(nn.Module):
391
+ def __init__(self, input_resolution: int, patch_size: int, width: int, layers: int, heads: int, output_dim: int):
392
+ super().__init__()
393
+ self.input_resolution = input_resolution
394
+ self.output_dim = output_dim
395
+ self.conv1 = nn.Conv2d(in_channels=3, out_channels=width,
396
+ kernel_size=patch_size, stride=patch_size, bias=False)
397
+
398
+ scale = width ** -0.5
399
+ self.class_embedding = nn.Parameter(scale * torch.randn(width))
400
+ self.positional_embedding = nn.Parameter(scale * torch.randn((input_resolution // patch_size) ** 2 + 1, width))
401
+ self.ln_pre = LayerNorm(width)
402
+
403
+ self.transformer = Transformer(width, layers, heads)
404
+
405
+ self.ln_post = LayerNorm(width)
406
+ self.proj = nn.Parameter(scale * torch.randn(width, output_dim))
407
+
408
+ def forward(self, x: torch.Tensor, return_token=True, pos_embedding=False):
409
+ x = self.conv1(x) # shape = [*, width, grid, grid]
410
+ x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2]
411
+ x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width]
412
+ x = torch.cat([self.class_embedding.to(x.dtype) + torch.zeros(x.shape[0], 1, x.shape[-1],
413
+ dtype=x.dtype, device=x.device), x], dim=1) # shape = [*, grid ** 2 + 1, width]
414
+
415
+ if pos_embedding:
416
+ positional_embedding_resize = F.interpolate(self.positional_embedding.unsqueeze(
417
+ 0).unsqueeze(0), size=(x.size(1), x.size(2)), mode='bicubic').squeeze(0).squeeze(0)
418
+ x = x + positional_embedding_resize.to(x.dtype)
419
+
420
+ x = self.ln_pre(x)
421
+
422
+ x = x.permute(1, 0, 2) # NLD -> LND
423
+ x = self.transformer(x)
424
+ x = x.permute(1, 0, 2) # LND -> NLD
425
+
426
+ token = self.ln_post(x[:, 1:, :])
427
+
428
+ x = self.ln_post(x[:, 0, :])
429
+
430
+ if self.proj is not None:
431
+ x = x @ self.proj
432
+
433
+ if return_token:
434
+ return x, token
435
+ else:
436
+ return x
437
+
438
+
439
+ class CLIP(nn.Module):
440
+ def __init__(self,
441
+ embed_dim: int,
442
+ # vision
443
+ image_resolution: int,
444
+ vision_layers: Union[Tuple[int, int, int, int], int],
445
+ vision_width: int,
446
+ vision_patch_size: int,
447
+ # text
448
+ context_length: int,
449
+ vocab_size: int,
450
+ transformer_width: int,
451
+ transformer_heads: int,
452
+ transformer_layers: int
453
+ ):
454
+ super().__init__()
455
+
456
+ self.context_length = context_length
457
+
458
+ if isinstance(vision_layers, (tuple, list)):
459
+ vision_heads = vision_width * 32 // 64
460
+ self.visual = ModifiedResNet(
461
+ layers=vision_layers,
462
+ output_dim=embed_dim,
463
+ heads=vision_heads,
464
+ input_resolution=image_resolution,
465
+ width=vision_width
466
+ )
467
+ else:
468
+ vision_heads = vision_width // 64
469
+ self.visual = VisionTransformer(
470
+ input_resolution=image_resolution,
471
+ patch_size=vision_patch_size,
472
+ width=vision_width,
473
+ layers=vision_layers,
474
+ heads=vision_heads,
475
+ output_dim=embed_dim
476
+ )
477
+
478
+ self.transformer = Transformer(
479
+ width=transformer_width,
480
+ layers=transformer_layers,
481
+ heads=transformer_heads,
482
+ attn_mask=self.build_attention_mask()
483
+ )
484
+
485
+ self.vocab_size = vocab_size
486
+ self.token_embedding = nn.Embedding(vocab_size, transformer_width)
487
+ self.positional_embedding = nn.Parameter(torch.empty(self.context_length, transformer_width))
488
+ self.ln_final = LayerNorm(transformer_width)
489
+
490
+ self.text_projection = nn.Parameter(torch.empty(transformer_width, embed_dim))
491
+ self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07))
492
+
493
+ self.initialize_parameters()
494
+
495
+ def initialize_parameters(self):
496
+ nn.init.normal_(self.token_embedding.weight, std=0.02)
497
+ nn.init.normal_(self.positional_embedding, std=0.01)
498
+
499
+ if isinstance(self.visual, ModifiedResNet):
500
+ if self.visual.attnpool is not None:
501
+ std = self.visual.attnpool.c_proj.in_features ** -0.5
502
+ nn.init.normal_(self.visual.attnpool.q_proj.weight, std=std)
503
+ nn.init.normal_(self.visual.attnpool.k_proj.weight, std=std)
504
+ nn.init.normal_(self.visual.attnpool.v_proj.weight, std=std)
505
+ nn.init.normal_(self.visual.attnpool.c_proj.weight, std=std)
506
+
507
+ for resnet_block in [self.visual.layer1, self.visual.layer2, self.visual.layer3, self.visual.layer4]:
508
+ for name, param in resnet_block.named_parameters():
509
+ if name.endswith("bn3.weight"):
510
+ nn.init.zeros_(param)
511
+
512
+ proj_std = (self.transformer.width ** -0.5) * ((2 * self.transformer.layers) ** -0.5)
513
+ attn_std = self.transformer.width ** -0.5
514
+ fc_std = (2 * self.transformer.width) ** -0.5
515
+ for block in self.transformer.resblocks:
516
+ nn.init.normal_(block.attn.in_proj_weight, std=attn_std)
517
+ nn.init.normal_(block.attn.out_proj.weight, std=proj_std)
518
+ nn.init.normal_(block.mlp.c_fc.weight, std=fc_std)
519
+ nn.init.normal_(block.mlp.c_proj.weight, std=proj_std)
520
+
521
+ if self.text_projection is not None:
522
+ nn.init.normal_(self.text_projection, std=self.transformer.width ** -0.5)
523
+
524
+ def build_attention_mask(self):
525
+ # lazily create causal attention mask, with full attention between the vision tokens
526
+ # pytorch uses additive attention mask; fill with -inf
527
+ mask = torch.empty(self.context_length, self.context_length)
528
+ mask.fill_(float("-inf"))
529
+ mask.triu_(1) # zero out the lower diagonal
530
+ return mask
531
+
532
+ @property
533
+ def dtype(self):
534
+ return self.visual.conv1.weight.dtype
535
+
536
+ def encode_image(self, image, pos_embedding):
537
+ return self.visual(image.type(self.dtype), pos_embedding=pos_embedding)
538
+
539
+ def encode_text(self, text):
540
+ x = self.token_embedding(text).type(self.dtype) # [batch_size, n_ctx, d_model]
541
+
542
+ x = x + self.positional_embedding.type(self.dtype)
543
+ x = x.permute(1, 0, 2) # NLD -> LND
544
+ x = self.transformer(x)
545
+ x = x.permute(1, 0, 2) # LND -> NLD
546
+ x = self.ln_final(x).type(self.dtype)
547
+
548
+ # x.shape = [batch_size, n_ctx, transformer.width]
549
+ # take features from the eot embedding (eot_token is the highest number in each sequence)
550
+ x = x[torch.arange(x.shape[0]), text.argmax(dim=-1)] @ self.text_projection
551
+
552
+ return x
553
+
554
+ def forward(self, image, text, pos_embedding=False, text_features=None):
555
+ # only use the image encoder in CLIP
556
+ image_features, token_features = self.encode_image(image, pos_embedding)
557
+
558
+ # normalized features
559
+ image_features = image_features / image_features.norm(dim=-1, keepdim=True)
560
+
561
+ # don't process encode_text
562
+ # if text_features is None:
563
+ # text_features = self.encode_text(text)
564
+ # text_features = text_features / text_features.norm(dim=-1, keepdim=True)
565
+
566
+ # cosine similarity as logits
567
+ # logit_scale = self.logit_scale.exp()
568
+ # logits_per_image = logit_scale * image_features @ text_features.t()
569
+ # logits_per_text = logits_per_image.t()
570
+ logits_per_image = 0
571
+ logits_per_text = 0
572
+
573
+ # shape = [global_batch_size, global_batch_size]
574
+ return logits_per_image, logits_per_text, image_features, token_features
575
+
576
+
577
+ def convert_weights(model: nn.Module):
578
+ """Convert applicable model parameters to fp16"""
579
+
580
+ def _convert_weights_to_fp16(l):
581
+ if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)):
582
+ l.weight.data = l.weight.data.half()
583
+ if l.bias is not None:
584
+ l.bias.data = l.bias.data.half()
585
+
586
+ if isinstance(l, nn.MultiheadAttention):
587
+ for attr in [*[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], "in_proj_bias", "bias_k", "bias_v"]:
588
+ tensor = getattr(l, attr)
589
+ if tensor is not None:
590
+ tensor.data = tensor.data.half()
591
+
592
+ for name in ["text_projection", "proj"]:
593
+ if hasattr(l, name):
594
+ attr = getattr(l, name)
595
+ if attr is not None:
596
+ attr.data = attr.data.half()
597
+
598
+ model.apply(_convert_weights_to_fp16)
599
+
600
+
601
+ def build_model(state_dict: dict):
602
+ vit = "visual.proj" in state_dict
603
+
604
+ if vit:
605
+ vision_width = state_dict["visual.conv1.weight"].shape[0]
606
+ vision_layers = len([k for k in state_dict.keys() if k.startswith(
607
+ "visual.") and k.endswith(".attn.in_proj_weight")])
608
+ vision_patch_size = state_dict["visual.conv1.weight"].shape[-1]
609
+ grid_size = round((state_dict["visual.positional_embedding"].shape[0] - 1) ** 0.5)
610
+ image_resolution = vision_patch_size * grid_size
611
+ else:
612
+ counts: list = [len(set(k.split(".")[2]
613
+ for k in state_dict if k.startswith(f"visual.layer{b}"))) for b in [1, 2, 3, 4]]
614
+ vision_layers = tuple(counts)
615
+ vision_width = state_dict["visual.layer1.0.conv1.weight"].shape[0]
616
+ output_width = round((state_dict["visual.attnpool.positional_embedding"].shape[0] - 1) ** 0.5)
617
+ vision_patch_size = None
618
+ assert output_width ** 2 + 1 == state_dict["visual.attnpool.positional_embedding"].shape[0]
619
+ image_resolution = output_width * 32
620
+
621
+ embed_dim = state_dict["text_projection"].shape[1]
622
+ context_length = state_dict["positional_embedding"].shape[0]
623
+ vocab_size = state_dict["token_embedding.weight"].shape[0]
624
+ transformer_width = state_dict["ln_final.weight"].shape[0]
625
+ transformer_heads = transformer_width // 64
626
+ transformer_layers = len(set(k.split(".")[2] for k in state_dict if k.startswith(f"transformer.resblocks")))
627
+
628
+ model = CLIP(
629
+ embed_dim,
630
+ image_resolution, vision_layers, vision_width, vision_patch_size,
631
+ context_length, vocab_size, transformer_width, transformer_heads, transformer_layers
632
+ )
633
+
634
+ for key in ["input_resolution", "context_length", "vocab_size"]:
635
+ if key in state_dict:
636
+ del state_dict[key]
637
+
638
+ convert_weights(model)
639
+ model.load_state_dict(state_dict)
640
+ return model.eval()
cover/models/clipiqa_arch.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ r"""CLIP-IQA metric, proposed by
2
+
3
+ Exploring CLIP for Assessing the Look and Feel of Images.
4
+ Jianyi Wang Kelvin C.K. Chan Chen Change Loy.
5
+ AAAI 2023.
6
+
7
+ Ref url: https://github.com/IceClear/CLIP-IQA
8
+ Re-implmented by: Chaofeng Chen (https://github.com/chaofengc) with the following modification:
9
+ - We assemble multiple prompts to improve the results of clipiqa model.
10
+
11
+ """
12
+ import torch
13
+ import torch.nn as nn
14
+ import sys
15
+
16
+ import pyiqa
17
+ from pyiqa.archs.arch_util import load_file_from_url
18
+ from pyiqa.archs.arch_util import load_pretrained_network
19
+
20
+ import clip
21
+ from .constants import OPENAI_CLIP_MEAN, OPENAI_CLIP_STD
22
+ from .clip_model import load
23
+
24
+
25
+ default_model_urls = {
26
+ 'clipiqa+': 'https://github.com/chaofengc/IQA-PyTorch/releases/download/v0.1-weights/CLIP-IQA+_learned_prompts-603f3273.pth',
27
+ 'clipiqa+_rn50_512': 'https://github.com/chaofengc/IQA-PyTorch/releases/download/v0.1-weights/CLIPIQA+_RN50_512-89f5d940.pth',
28
+ 'clipiqa+_vitL14_512': 'https://github.com/chaofengc/IQA-PyTorch/releases/download/v0.1-weights/CLIPIQA+_ViTL14_512-e66488f2.pth',
29
+ }
30
+
31
+
32
+ class PromptLearner(nn.Module):
33
+ """
34
+ Disclaimer:
35
+ This implementation follows exactly the official codes in: https://github.com/IceClear/CLIP-IQA. We have no idea why some tricks are implemented like this, which include
36
+ 1. Using n_ctx prefix characters "X"
37
+ 2. Appending extra "." at the end
38
+ 3. Insert the original text embedding at the middle
39
+ """
40
+
41
+ def __init__(self, clip_model, n_ctx=16) -> None:
42
+ super().__init__()
43
+
44
+ # For the following codes about prompts, we follow the official codes to get the same results
45
+ prompt_prefix = " ".join(["X"] * n_ctx) + ' '
46
+ init_prompts = [prompt_prefix + 'Good photo..', prompt_prefix + 'Bad photo..']
47
+ with torch.no_grad():
48
+ txt_token = clip.tokenize(init_prompts)
49
+ self.tokenized_prompts = txt_token
50
+ init_embedding = clip_model.token_embedding(txt_token)
51
+
52
+ init_ctx = init_embedding[:, 1: 1 + n_ctx]
53
+ self.ctx = nn.Parameter(init_ctx)
54
+
55
+ self.n_ctx = n_ctx
56
+
57
+ self.n_cls = len(init_prompts)
58
+ self.name_lens = [3, 3] # hard coded length, which does not include the extra "." at the end
59
+
60
+ self.register_buffer("token_prefix", init_embedding[:, :1, :]) # SOS
61
+ self.register_buffer("token_suffix", init_embedding[:, 1 + n_ctx:, :]) # CLS, EOS
62
+
63
+ def get_prompts_with_middel_class(self,):
64
+
65
+ ctx = self.ctx.to(self.token_prefix)
66
+ if ctx.dim() == 2:
67
+ ctx = ctx.unsqueeze(0).expand(self.n_cls, -1, -1)
68
+
69
+ half_n_ctx = self.n_ctx // 2
70
+ prompts = []
71
+ for i in range(self.n_cls):
72
+ name_len = self.name_lens[i]
73
+ prefix_i = self.token_prefix[i: i + 1, :, :]
74
+ class_i = self.token_suffix[i: i + 1, :name_len, :]
75
+ suffix_i = self.token_suffix[i: i + 1, name_len:, :]
76
+ ctx_i_half1 = ctx[i: i + 1, :half_n_ctx, :]
77
+ ctx_i_half2 = ctx[i: i + 1, half_n_ctx:, :]
78
+ prompt = torch.cat(
79
+ [
80
+ prefix_i, # (1, 1, dim)
81
+ ctx_i_half1, # (1, n_ctx//2, dim)
82
+ class_i, # (1, name_len, dim)
83
+ ctx_i_half2, # (1, n_ctx//2, dim)
84
+ suffix_i, # (1, *, dim)
85
+ ],
86
+ dim=1,
87
+ )
88
+ prompts.append(prompt)
89
+ prompts = torch.cat(prompts, dim=0)
90
+ return prompts
91
+
92
+ def forward(self, clip_model):
93
+ prompts = self.get_prompts_with_middel_class()
94
+ # self.get_prompts_with_middel_class
95
+ x = prompts + clip_model.positional_embedding.type(clip_model.dtype)
96
+ x = x.permute(1, 0, 2) # NLD -> LND
97
+ x = clip_model.transformer(x)
98
+ x = x.permute(1, 0, 2) # LND -> NLD
99
+ x = clip_model.ln_final(x).type(clip_model.dtype)
100
+
101
+ # x.shape = [batch_size, n_ctx, transformer.width]
102
+ # take features from the eot embedding (eot_token is the highest number in each sequence)
103
+ x = x[torch.arange(x.shape[0]), self.tokenized_prompts.argmax(dim=-1)] @ clip_model.text_projection
104
+
105
+ return x
106
+
107
+
108
+ class CLIPIQA(nn.Module):
109
+ def __init__(self,
110
+ model_type='clipiqa+_vitL14_512',
111
+ backbone='ViT-L/14',
112
+ pretrained=True,
113
+ pos_embedding=False,
114
+ ) -> None:
115
+ super().__init__()
116
+
117
+ self.clip_model = [load(backbone, 'cpu')] # avoid saving clip weights
118
+ # Different from original paper, we assemble multiple prompts to improve performance
119
+ self.prompt_pairs = clip.tokenize([
120
+ 'Good image', 'bad image',
121
+ 'Sharp image', 'blurry image',
122
+ 'sharp edges', 'blurry edges',
123
+ 'High resolution image', 'low resolution image',
124
+ 'Noise-free image', 'noisy image',
125
+ ])
126
+
127
+ self.model_type = model_type
128
+ self.pos_embedding = pos_embedding
129
+ if 'clipiqa+' in model_type:
130
+ self.prompt_learner = PromptLearner(self.clip_model[0])
131
+
132
+ self.default_mean = torch.Tensor(OPENAI_CLIP_MEAN).view(1, 3, 1, 1)
133
+ self.default_std = torch.Tensor(OPENAI_CLIP_STD).view(1, 3, 1, 1)
134
+
135
+ for p in self.clip_model[0].parameters():
136
+ p.requires_grad = False
137
+
138
+ if pretrained and 'clipiqa+' in model_type:
139
+ if model_type == 'clipiqa+' and backbone == 'RN50':
140
+ self.prompt_learner.ctx.data = torch.load(load_file_from_url(default_model_urls['clipiqa+']))
141
+ elif model_type in default_model_urls.keys():
142
+ load_pretrained_network(self, default_model_urls[model_type], True, 'params')
143
+ else:
144
+ raise(f'No pretrained model for {model_type}')
145
+
146
+
147
+ def forward(self, x, multi=False, layer=-1):
148
+ # no need to preprocess image here
149
+ # as already image is already preprocessed
150
+ # x = (x - self.default_mean.to(x)) / self.default_std.to(x)
151
+ clip_model = self.clip_model[0].to(x)
152
+
153
+ if self.model_type == 'clipiqa':
154
+ prompts = self.prompt_pairs.to(x.device)
155
+ logits_per_image, logits_per_text, image_feature, token_feature = clip_model(x, prompts, pos_embedding=self.pos_embedding)
156
+ elif 'clipiqa+' in self.model_type:
157
+ # learned_prompt_feature = self.prompt_learner(clip_model)
158
+ learned_prompt_feature = 0
159
+ logits_per_image, logits_per_text, image_feature, token_feature = clip_model(
160
+ x, None, text_features=learned_prompt_feature, pos_embedding=self.pos_embedding)
161
+
162
+ # probs = logits_per_image.reshape(logits_per_image.shape[0], -1, 2).softmax(dim=-1)
163
+
164
+ # return probs[..., 0].mean(dim=1, keepdim=True), image_feature
165
+ return image_feature, token_feature
cover/models/constants.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
2
+ IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
3
+ IMAGENET_INCEPTION_MEAN = (0.5, 0.5, 0.5)
4
+ IMAGENET_INCEPTION_STD = (0.5, 0.5, 0.5)
5
+ IMAGENET_DPN_MEAN = (124 / 255, 117 / 255, 104 / 255)
6
+ IMAGENET_DPN_STD = tuple([1 / (.0167 * 255)] * 3)
7
+ OPENAI_CLIP_MEAN = (122.77, 116.75, 104.09)
8
+ OPENAI_CLIP_STD = (68.50, 66.63, 70.32)
cover/models/conv_backbone.py ADDED
@@ -0,0 +1,651 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from timm.models.layers import trunc_normal_, DropPath
5
+ from timm.models.registry import register_model
6
+ from .clipiqa_arch import CLIPIQA
7
+
8
+
9
+ class GRN(nn.Module):
10
+ """ GRN (Global Response Normalization) layer
11
+ """
12
+ def __init__(self, dim):
13
+ super().__init__()
14
+ self.gamma = nn.Parameter(torch.zeros(1, 1, 1, dim))
15
+ self.beta = nn.Parameter(torch.zeros(1, 1, 1, dim))
16
+
17
+ def forward(self, x):
18
+ Gx = torch.norm(x, p=2, dim=(1,2), keepdim=True)
19
+ Nx = Gx / (Gx.mean(dim=-1, keepdim=True) + 1e-6)
20
+ return self.gamma * (x * Nx) + self.beta + x
21
+
22
+ class Block(nn.Module):
23
+ r""" ConvNeXt Block. There are two equivalent implementations:
24
+ (1) DwConv -> LayerNorm (channels_first) -> 1x1 Conv -> GELU -> 1x1 Conv; all in (N, C, H, W)
25
+ (2) DwConv -> Permute to (N, H, W, C); LayerNorm (channels_last) -> Linear -> GELU -> Linear; Permute back
26
+ We use (2) as we find it slightly faster in PyTorch
27
+
28
+ Args:
29
+ dim (int): Number of input channels.
30
+ drop_path (float): Stochastic depth rate. Default: 0.0
31
+ layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
32
+ """
33
+ def __init__(self, dim, drop_path=0., layer_scale_init_value=1e-6):
34
+ super().__init__()
35
+ self.dwconv = nn.Conv2d(dim, dim, kernel_size=7, padding=3, groups=dim) # depthwise conv
36
+ self.norm = LayerNorm(dim, eps=1e-6)
37
+ self.pwconv1 = nn.Linear(dim, 4 * dim) # pointwise/1x1 convs, implemented with linear layers
38
+ self.act = nn.GELU()
39
+ self.pwconv2 = nn.Linear(4 * dim, dim)
40
+ self.gamma = nn.Parameter(layer_scale_init_value * torch.ones((dim)),
41
+ requires_grad=True) if layer_scale_init_value > 0 else None
42
+ self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
43
+
44
+ def forward(self, x):
45
+ input = x
46
+ x = self.dwconv(x)
47
+ x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C)
48
+ x = self.norm(x)
49
+ x = self.pwconv1(x)
50
+ x = self.act(x)
51
+ x = self.pwconv2(x)
52
+ if self.gamma is not None:
53
+ x = self.gamma * x
54
+ x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W)
55
+
56
+ x = input + self.drop_path(x)
57
+ return x
58
+
59
+ class ConvNeXt(nn.Module):
60
+ r""" ConvNeXt
61
+ A PyTorch impl of : `A ConvNet for the 2020s` -
62
+ https://arxiv.org/pdf/2201.03545.pdf
63
+ Args:
64
+ in_chans (int): Number of input image channels. Default: 3
65
+ num_classes (int): Number of classes for classification head. Default: 1000
66
+ depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3]
67
+ dims (int): Feature dimension at each stage. Default: [96, 192, 384, 768]
68
+ drop_path_rate (float): Stochastic depth rate. Default: 0.
69
+ layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
70
+ head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1.
71
+ """
72
+ def __init__(self, in_chans=3, num_classes=1000,
73
+ depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], drop_path_rate=0.,
74
+ layer_scale_init_value=1e-6, head_init_scale=1.,
75
+ ):
76
+ super().__init__()
77
+
78
+ self.downsample_layers = nn.ModuleList() # stem and 3 intermediate downsampling conv layers
79
+ stem = nn.Sequential(
80
+ nn.Conv2d(in_chans, dims[0], kernel_size=4, stride=4),
81
+ LayerNorm(dims[0], eps=1e-6, data_format="channels_first")
82
+ )
83
+ self.downsample_layers.append(stem)
84
+ for i in range(3):
85
+ downsample_layer = nn.Sequential(
86
+ LayerNorm(dims[i], eps=1e-6, data_format="channels_first"),
87
+ nn.Conv2d(dims[i], dims[i+1], kernel_size=2, stride=2),
88
+ )
89
+ self.downsample_layers.append(downsample_layer)
90
+
91
+ self.stages = nn.ModuleList() # 4 feature resolution stages, each consisting of multiple residual blocks
92
+ dp_rates=[x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))]
93
+ cur = 0
94
+ for i in range(4):
95
+ stage = nn.Sequential(
96
+ *[Block(dim=dims[i], drop_path=dp_rates[cur + j],
97
+ layer_scale_init_value=layer_scale_init_value) for j in range(depths[i])]
98
+ )
99
+ self.stages.append(stage)
100
+ cur += depths[i]
101
+
102
+ self.norm = nn.LayerNorm(dims[-1], eps=1e-6) # final norm layer
103
+ self.head = nn.Linear(dims[-1], num_classes)
104
+
105
+ self.apply(self._init_weights)
106
+ self.head.weight.data.mul_(head_init_scale)
107
+ self.head.bias.data.mul_(head_init_scale)
108
+
109
+ def _init_weights(self, m):
110
+ if isinstance(m, (nn.Conv2d, nn.Linear)):
111
+ trunc_normal_(m.weight, std=.02)
112
+ nn.init.constant_(m.bias, 0)
113
+
114
+ def forward_features(self, x):
115
+ for i in range(4):
116
+ x = self.downsample_layers[i](x)
117
+ x = self.stages[i](x)
118
+ return self.norm(x.mean([-2, -1])) # global average pooling, (N, C, H, W) -> (N, C)
119
+
120
+ def forward(self, x):
121
+ x = self.forward_features(x)
122
+ x = self.head(x)
123
+ return x
124
+
125
+ class LayerNorm(nn.Module):
126
+ r""" LayerNorm that supports two data formats: channels_last (default) or channels_first.
127
+ The ordering of the dimensions in the inputs. channels_last corresponds to inputs with
128
+ shape (batch_size, height, width, channels) while channels_first corresponds to inputs
129
+ with shape (batch_size, channels, height, width).
130
+ """
131
+ def __init__(self, normalized_shape, eps=1e-6, data_format="channels_last"):
132
+ super().__init__()
133
+ self.weight = nn.Parameter(torch.ones(normalized_shape))
134
+ self.bias = nn.Parameter(torch.zeros(normalized_shape))
135
+ self.eps = eps
136
+ self.data_format = data_format
137
+ if self.data_format not in ["channels_last", "channels_first"]:
138
+ raise NotImplementedError
139
+ self.normalized_shape = (normalized_shape, )
140
+
141
+ def forward(self, x):
142
+ if self.data_format == "channels_last":
143
+ return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
144
+ elif self.data_format == "channels_first":
145
+ u = x.mean(1, keepdim=True)
146
+ s = (x - u).pow(2).mean(1, keepdim=True)
147
+ x = (x - u) / torch.sqrt(s + self.eps)
148
+ if len(x.shape) == 4:
149
+ x = self.weight[:, None, None] * x + self.bias[:, None, None]
150
+ elif len(x.shape) == 5:
151
+ x = self.weight[:, None, None, None] * x + self.bias[:, None, None, None]
152
+ return x
153
+
154
+
155
+ class Block3D(nn.Module):
156
+ r""" ConvNeXt Block. There are two equivalent implementations:
157
+ (1) DwConv -> LayerNorm (channels_first) -> 1x1 Conv -> GELU -> 1x1 Conv; all in (N, C, H, W)
158
+ (2) DwConv -> Permute to (N, H, W, C); LayerNorm (channels_last) -> Linear -> GELU -> Linear; Permute back
159
+ We use (2) as we find it slightly faster in PyTorch
160
+
161
+ Args:
162
+ dim (int): Number of input channels.
163
+ drop_path (float): Stochastic depth rate. Default: 0.0
164
+ layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
165
+ """
166
+ def __init__(self, dim, drop_path=0., inflate_len=3, layer_scale_init_value=1e-6):
167
+ super().__init__()
168
+ self.dwconv = nn.Conv3d(dim, dim, kernel_size=(inflate_len,7,7), padding=(inflate_len // 2,3,3), groups=dim) # depthwise conv
169
+ self.norm = LayerNorm(dim, eps=1e-6)
170
+ self.pwconv1 = nn.Linear(dim, 4 * dim) # pointwise/1x1 convs, implemented with linear layers
171
+ self.act = nn.GELU()
172
+ self.pwconv2 = nn.Linear(4 * dim, dim)
173
+ self.gamma = nn.Parameter(layer_scale_init_value * torch.ones((dim)),
174
+ requires_grad=True) if layer_scale_init_value > 0 else None
175
+ self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
176
+
177
+ def forward(self, x):
178
+ input = x
179
+ x = self.dwconv(x)
180
+ x = x.permute(0, 2, 3, 4, 1) # (N, C, H, W) -> (N, H, W, C)
181
+ x = self.norm(x)
182
+ x = self.pwconv1(x)
183
+ x = self.act(x)
184
+ x = self.pwconv2(x)
185
+ if self.gamma is not None:
186
+ x = self.gamma * x
187
+ x = x.permute(0, 4, 1, 2, 3) # (N, H, W, C) -> (N, C, H, W)
188
+
189
+ x = input + self.drop_path(x)
190
+ return x
191
+
192
+ class BlockV2(nn.Module):
193
+ """ ConvNeXtV2 Block.
194
+
195
+ Args:
196
+ dim (int): Number of input channels.
197
+ drop_path (float): Stochastic depth rate. Default: 0.0
198
+ """
199
+ def __init__(self, dim, drop_path=0.):
200
+ super().__init__()
201
+ self.dwconv = nn.Conv2d(dim, dim, kernel_size=7, padding=3, groups=dim) # depthwise conv
202
+ self.norm = LayerNorm(dim, eps=1e-6)
203
+ self.pwconv1 = nn.Linear(dim, 4 * dim) # pointwise/1x1 convs, implemented with linear layers
204
+ self.act = nn.GELU()
205
+ self.grn = GRN(4 * dim)
206
+ self.pwconv2 = nn.Linear(4 * dim, dim)
207
+ self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
208
+
209
+ def forward(self, x):
210
+ input = x
211
+ x = self.dwconv(x)
212
+ x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C)
213
+ x = self.norm(x)
214
+ x = self.pwconv1(x)
215
+ x = self.act(x)
216
+ x = self.grn(x)
217
+ x = self.pwconv2(x)
218
+ x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W)
219
+
220
+ x = input + self.drop_path(x)
221
+ return x
222
+
223
+ class BlockV23D(nn.Module):
224
+ """ ConvNeXtV2 Block.
225
+
226
+ Args:
227
+ dim (int): Number of input channels.
228
+ drop_path (float): Stochastic depth rate. Default: 0.0
229
+ """
230
+ def __init__(self, dim, drop_path=0., inflate_len=3,):
231
+ super().__init__()
232
+ self.dwconv = nn.Conv3d(dim, dim, kernel_size=(inflate_len,7,7), padding=(inflate_len // 2,3,3), groups=dim) # depthwise conv
233
+ self.norm = LayerNorm(dim, eps=1e-6)
234
+ self.pwconv1 = nn.Linear(dim, 4 * dim) # pointwise/1x1 convs, implemented with linear layers
235
+ self.act = nn.GELU()
236
+ self.grn = GRN(4 * dim)
237
+ self.pwconv2 = nn.Linear(4 * dim, dim)
238
+ self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()
239
+
240
+ def forward(self, x):
241
+ input = x
242
+ x = self.dwconv(x)
243
+ x = x.permute(0, 2, 3, 4, 1) # (N, C, H, W) -> (N, H, W, C)
244
+ x = self.norm(x)
245
+ x = self.pwconv1(x)
246
+ x = self.act(x)
247
+ x = self.grn(x)
248
+ x = self.pwconv2(x)
249
+ x = x.permute(0, 4, 1, 2, 3) # (N, H, W, C) -> (N, C, H, W)
250
+
251
+ x = input + self.drop_path(x)
252
+ return x
253
+
254
+ class ConvNeXtV2(nn.Module):
255
+ """ ConvNeXt V2
256
+
257
+ Args:
258
+ in_chans (int): Number of input image channels. Default: 3
259
+ num_classes (int): Number of classes for classification head. Default: 1000
260
+ depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3]
261
+ dims (int): Feature dimension at each stage. Default: [96, 192, 384, 768]
262
+ drop_path_rate (float): Stochastic depth rate. Default: 0.
263
+ head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1.
264
+ """
265
+ def __init__(self, in_chans=3, num_classes=1000,
266
+ depths=[3, 3, 9, 3], dims=[96, 192, 384, 768],
267
+ drop_path_rate=0., head_init_scale=1.
268
+ ):
269
+ super().__init__()
270
+ self.depths = depths
271
+ self.downsample_layers = nn.ModuleList() # stem and 3 intermediate downsampling conv layers
272
+ stem = nn.Sequential(
273
+ nn.Conv2d(in_chans, dims[0], kernel_size=4, stride=4),
274
+ LayerNorm(dims[0], eps=1e-6, data_format="channels_first")
275
+ )
276
+ self.downsample_layers.append(stem)
277
+ for i in range(3):
278
+ downsample_layer = nn.Sequential(
279
+ LayerNorm(dims[i], eps=1e-6, data_format="channels_first"),
280
+ nn.Conv2d(dims[i], dims[i+1], kernel_size=2, stride=2),
281
+ )
282
+ self.downsample_layers.append(downsample_layer)
283
+
284
+ self.stages = nn.ModuleList() # 4 feature resolution stages, each consisting of multiple residual blocks
285
+ dp_rates=[x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))]
286
+ cur = 0
287
+ for i in range(4):
288
+ stage = nn.Sequential(
289
+ *[BlockV2(dim=dims[i], drop_path=dp_rates[cur + j]) for j in range(depths[i])]
290
+ )
291
+ self.stages.append(stage)
292
+ cur += depths[i]
293
+
294
+ self.norm = nn.LayerNorm(dims[-1], eps=1e-6) # final norm layer
295
+ self.head = nn.Linear(dims[-1], num_classes)
296
+
297
+ self.apply(self._init_weights)
298
+ self.head.weight.data.mul_(head_init_scale)
299
+ self.head.bias.data.mul_(head_init_scale)
300
+
301
+ def _init_weights(self, m):
302
+ if isinstance(m, (nn.Conv2d, nn.Linear)):
303
+ trunc_normal_(m.weight, std=.02)
304
+ nn.init.constant_(m.bias, 0)
305
+
306
+ def forward_features(self, x):
307
+ for i in range(4):
308
+ x = self.downsample_layers[i](x)
309
+ x = self.stages[i](x)
310
+ return self.norm(x.mean([-2, -1])) # global average pooling, (N, C, H, W) -> (N, C)
311
+
312
+ def forward(self, x):
313
+ x = self.forward_features(x)
314
+ x = self.head(x)
315
+ return x
316
+
317
+ def convnextv2_atto(**kwargs):
318
+ model = ConvNeXtV2(depths=[2, 2, 6, 2], dims=[40, 80, 160, 320], **kwargs)
319
+ return model
320
+
321
+ def convnextv2_femto(**kwargs):
322
+ model = ConvNeXtV2(depths=[2, 2, 6, 2], dims=[48, 96, 192, 384], **kwargs)
323
+ return model
324
+
325
+ def convnext_pico(**kwargs):
326
+ model = ConvNeXtV2(depths=[2, 2, 6, 2], dims=[64, 128, 256, 512], **kwargs)
327
+ return model
328
+
329
+ def convnextv2_nano(**kwargs):
330
+ model = ConvNeXtV2(depths=[2, 2, 8, 2], dims=[80, 160, 320, 640], **kwargs)
331
+ return model
332
+
333
+ def convnextv2_tiny(**kwargs):
334
+ model = ConvNeXtV2(depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], **kwargs)
335
+ return model
336
+
337
+ def convnextv2_base(**kwargs):
338
+ model = ConvNeXtV2(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024], **kwargs)
339
+ return model
340
+
341
+ def convnextv2_large(**kwargs):
342
+ model = ConvNeXtV2(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], **kwargs)
343
+ return model
344
+
345
+ def convnextv2_huge(**kwargs):
346
+ model = ConvNeXtV2(depths=[3, 3, 27, 3], dims=[352, 704, 1408, 2816], **kwargs)
347
+ return model
348
+
349
+ class ConvNeXt3D(nn.Module):
350
+ r""" ConvNeXt
351
+ A PyTorch impl of : `A ConvNet for the 2020s` -
352
+ https://arxiv.org/pdf/2201.03545.pdf
353
+ Args:
354
+ in_chans (int): Number of input image channels. Default: 3
355
+ num_classes (int): Number of classes for classification head. Default: 1000
356
+ depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3]
357
+ dims (int): Feature dimension at each stage. Default: [96, 192, 384, 768]
358
+ drop_path_rate (float): Stochastic depth rate. Default: 0.
359
+ layer_scale_init_value (float): Init value for Layer Scale. Default: 1e-6.
360
+ head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1.
361
+ """
362
+ def __init__(self, in_chans=3, num_classes=1000,
363
+ inflate_strategy='131',
364
+ depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], drop_path_rate=0.,
365
+ layer_scale_init_value=1e-6, head_init_scale=1.,
366
+ ):
367
+ super().__init__()
368
+
369
+ self.downsample_layers = nn.ModuleList() # stem and 3 intermediate downsampling conv layers
370
+ stem = nn.Sequential(
371
+ nn.Conv3d(in_chans, dims[0], kernel_size=(2,4,4), stride=(2,4,4)),
372
+ LayerNorm(dims[0], eps=1e-6, data_format="channels_first")
373
+ )
374
+ self.downsample_layers.append(stem)
375
+ for i in range(3):
376
+ downsample_layer = nn.Sequential(
377
+ LayerNorm(dims[i], eps=1e-6, data_format="channels_first"),
378
+ nn.Conv3d(dims[i], dims[i+1], kernel_size=(1,2,2), stride=(1,2,2)),
379
+ )
380
+ self.downsample_layers.append(downsample_layer)
381
+
382
+ self.stages = nn.ModuleList() # 4 feature resolution stages, each consisting of multiple residual blocks
383
+ dp_rates=[x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))]
384
+ cur = 0
385
+ for i in range(4):
386
+ stage = nn.Sequential(
387
+ *[Block3D(dim=dims[i], inflate_len=int(inflate_strategy[j%len(inflate_strategy)]),
388
+ drop_path=dp_rates[cur + j],
389
+ layer_scale_init_value=layer_scale_init_value) for j in range(depths[i])]
390
+ )
391
+ self.stages.append(stage)
392
+ cur += depths[i]
393
+
394
+ self.norm = nn.LayerNorm(dims[-1], eps=1e-6) # final norm layer
395
+
396
+ self.apply(self._init_weights)
397
+
398
+ def inflate_weights(self, s_state_dict):
399
+ t_state_dict = self.state_dict()
400
+ from collections import OrderedDict
401
+ for key in t_state_dict.keys():
402
+ if key not in s_state_dict:
403
+ print(key)
404
+ continue
405
+ if t_state_dict[key].shape != s_state_dict[key].shape:
406
+ t = t_state_dict[key].shape[2]
407
+ s_state_dict[key] = s_state_dict[key].unsqueeze(2).repeat(1,1,t,1,1) / t
408
+ self.load_state_dict(s_state_dict, strict=False)
409
+
410
+ def _init_weights(self, m):
411
+ if isinstance(m, (nn.Conv3d, nn.Linear)):
412
+ trunc_normal_(m.weight, std=.02)
413
+ nn.init.constant_(m.bias, 0)
414
+
415
+ def forward_features(self, x, return_spatial=False, multi=False, layer=-1):
416
+ if multi:
417
+ xs = []
418
+ for i in range(4):
419
+ x = self.downsample_layers[i](x)
420
+ x = self.stages[i](x)
421
+ if multi:
422
+ xs.append(x)
423
+ if return_spatial:
424
+ if multi:
425
+ shape = xs[-1].shape[2:]
426
+ return torch.cat([F.interpolate(x,size=shape, mode="trilinear") for x in xs[:-1]], 1) #+ [self.norm(x.permute(0, 2, 3, 4, 1)).permute(0, 4, 1, 2, 3)], 1)
427
+ elif layer > -1:
428
+ return xs[layer]
429
+ else:
430
+ return self.norm(x.permute(0, 2, 3, 4, 1)).permute(0, 4, 1, 2, 3)
431
+ return self.norm(x.mean([-3, -2, -1])) # global average pooling, (N, C, T, H, W) -> (N, C)
432
+
433
+ def forward(self, x, multi=False, layer=-1):
434
+ x = self.forward_features(x, True, multi=multi, layer=layer)
435
+ return x
436
+
437
+
438
+ class ConvNeXtV23D(nn.Module):
439
+ """ ConvNeXt V2
440
+
441
+ Args:
442
+ in_chans (int): Number of input image channels. Default: 3
443
+ num_classes (int): Number of classes for classification head. Default: 1000
444
+ depths (tuple(int)): Number of blocks at each stage. Default: [3, 3, 9, 3]
445
+ dims (int): Feature dimension at each stage. Default: [96, 192, 384, 768]
446
+ drop_path_rate (float): Stochastic depth rate. Default: 0.
447
+ head_init_scale (float): Init scaling value for classifier weights and biases. Default: 1.
448
+ """
449
+ def __init__(self, in_chans=3, num_classes=1000,
450
+ inflate_strategy='131',
451
+ depths=[3, 3, 9, 3], dims=[96, 192, 384, 768],
452
+ drop_path_rate=0., head_init_scale=1.
453
+ ):
454
+ super().__init__()
455
+ self.depths = depths
456
+ self.downsample_layers = nn.ModuleList() # stem and 3 intermediate downsampling conv layers
457
+ stem = nn.Sequential(
458
+ nn.Conv3d(in_chans, dims[0], kernel_size=(2,4,4), stride=(2,4,4)),
459
+ LayerNorm(dims[0], eps=1e-6, data_format="channels_first")
460
+ )
461
+ self.downsample_layers.append(stem)
462
+ for i in range(3):
463
+ downsample_layer = nn.Sequential(
464
+ LayerNorm(dims[i], eps=1e-6, data_format="channels_first"),
465
+ nn.Conv3d(dims[i], dims[i+1], kernel_size=(1,2,2), stride=(1,2,2)),
466
+ )
467
+ self.downsample_layers.append(downsample_layer)
468
+
469
+ self.stages = nn.ModuleList() # 4 feature resolution stages, each consisting of multiple residual blocks
470
+ dp_rates=[x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))]
471
+ cur = 0
472
+ for i in range(4):
473
+ stage = nn.Sequential(
474
+ *[BlockV23D(dim=dims[i], drop_path=dp_rates[cur + j],
475
+ inflate_len=int(inflate_strategy[j%len(inflate_strategy)]),
476
+ ) for j in range(depths[i])]
477
+ )
478
+ self.stages.append(stage)
479
+ cur += depths[i]
480
+
481
+ self.norm = nn.LayerNorm(dims[-1], eps=1e-6) # final norm layer
482
+ self.head = nn.Linear(dims[-1], num_classes)
483
+
484
+ self.apply(self._init_weights)
485
+ self.head.weight.data.mul_(head_init_scale)
486
+ self.head.bias.data.mul_(head_init_scale)
487
+
488
+ def inflate_weights(self, pretrained_path):
489
+ t_state_dict = self.state_dict()
490
+ s_state_dict = torch.load(pretrained_path)["model"]
491
+ from collections import OrderedDict
492
+ for key in t_state_dict.keys():
493
+ if key not in s_state_dict:
494
+ print(key)
495
+ continue
496
+ if t_state_dict[key].shape != s_state_dict[key].shape:
497
+ print(t_state_dict[key].shape, s_state_dict[key].shape)
498
+ t = t_state_dict[key].shape[2]
499
+ s_state_dict[key] = s_state_dict[key].unsqueeze(2).repeat(1,1,t,1,1) / t
500
+ self.load_state_dict(s_state_dict, strict=False)
501
+
502
+ def _init_weights(self, m):
503
+ if isinstance(m, (nn.Conv3d, nn.Linear)):
504
+ trunc_normal_(m.weight, std=.02)
505
+ nn.init.constant_(m.bias, 0)
506
+
507
+ def forward_features(self, x, return_spatial=False, multi=False, layer=-1):
508
+ if multi:
509
+ xs = []
510
+ for i in range(4):
511
+ x = self.downsample_layers[i](x)
512
+ x = self.stages[i](x)
513
+ if multi:
514
+ xs.append(x)
515
+ if return_spatial:
516
+ if multi:
517
+ shape = xs[-1].shape[2:]
518
+ return torch.cat([F.interpolate(x,size=shape, mode="trilinear") for x in xs[:-1]], 1) #+ [self.norm(x.permute(0, 2, 3, 4, 1)).permute(0, 4, 1, 2, 3)], 1)
519
+ elif layer > -1:
520
+ return xs[layer]
521
+ else:
522
+ return self.norm(x.permute(0, 2, 3, 4, 1)).permute(0, 4, 1, 2, 3)
523
+ return self.norm(x.mean([-3, -2, -1])) # global average pooling, (N, C, T, H, W) -> (N, C)
524
+
525
+ def forward(self, x, multi=False, layer=-1):
526
+ x = self.forward_features(x, True, multi=multi, layer=layer)
527
+ return x
528
+
529
+
530
+ model_urls = {
531
+ "convnext_tiny_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_tiny_1k_224_ema.pth",
532
+ "convnext_small_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_small_1k_224_ema.pth",
533
+ "convnext_base_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_base_1k_224_ema.pth",
534
+ "convnext_large_1k": "https://dl.fbaipublicfiles.com/convnext/convnext_large_1k_224_ema.pth",
535
+ "convnext_tiny_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_tiny_22k_224.pth",
536
+ "convnext_small_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_small_22k_224.pth",
537
+ "convnext_base_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_base_22k_224.pth",
538
+ "convnext_large_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_large_22k_224.pth",
539
+ "convnext_xlarge_22k": "https://dl.fbaipublicfiles.com/convnext/convnext_xlarge_22k_224.pth",
540
+ }
541
+
542
+ def convnext_tiny(pretrained=False,in_22k=False, **kwargs):
543
+ model = ConvNeXt(depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], **kwargs)
544
+ if pretrained:
545
+ url = model_urls['convnext_tiny_22k'] if in_22k else model_urls['convnext_tiny_1k']
546
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu", check_hash=True)
547
+ model.load_state_dict(checkpoint["model"])
548
+ return model
549
+
550
+ def convnext_small(pretrained=False,in_22k=False, **kwargs):
551
+ model = ConvNeXt(depths=[3, 3, 27, 3], dims=[96, 192, 384, 768], **kwargs)
552
+ if pretrained:
553
+ url = model_urls['convnext_small_22k'] if in_22k else model_urls['convnext_small_1k']
554
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
555
+ model.load_state_dict(checkpoint["model"])
556
+ return model
557
+
558
+ def convnext_base(pretrained=False, in_22k=False, **kwargs):
559
+ model = ConvNeXt(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024], **kwargs)
560
+ if pretrained:
561
+ url = model_urls['convnext_base_22k'] if in_22k else model_urls['convnext_base_1k']
562
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
563
+ model.load_state_dict(checkpoint["model"])
564
+ return model
565
+
566
+
567
+ def convnext_large(pretrained=False, in_22k=False, **kwargs):
568
+ model = ConvNeXt(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], **kwargs)
569
+ if pretrained:
570
+ url = model_urls['convnext_large_22k'] if in_22k else model_urls['convnext_large_1k']
571
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
572
+ model.load_state_dict(checkpoint["model"])
573
+ return model
574
+
575
+ def convnext_xlarge(pretrained=False, in_22k=False, **kwargs):
576
+ model = ConvNeXt(depths=[3, 3, 27, 3], dims=[256, 512, 1024, 2048], **kwargs)
577
+ if pretrained:
578
+ assert in_22k, "only ImageNet-22K pre-trained ConvNeXt-XL is available; please set in_22k=True"
579
+ url = model_urls['convnext_xlarge_22k']
580
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu")
581
+ model.load_state_dict(checkpoint["model"])
582
+
583
+ return model
584
+
585
+ def convnext_3d_tiny(pretrained=False, in_22k=False, **kwargs):
586
+ print("Using Imagenet 22K pretrain", in_22k)
587
+ model = ConvNeXt3D(depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], **kwargs)
588
+ if pretrained:
589
+ url = model_urls['convnext_tiny_22k'] if in_22k else model_urls['convnext_tiny_1k']
590
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu", check_hash=True)
591
+ model.inflate_weights(checkpoint["model"])
592
+ return model
593
+
594
+ def convnext_3d_small(pretrained=False, in_22k=False, **kwargs):
595
+ model = ConvNeXt3D(depths=[3, 3, 27, 3], dims=[96, 192, 384, 768], **kwargs)
596
+ if pretrained:
597
+ url = model_urls['convnext_small_22k'] if in_22k else model_urls['convnext_small_1k']
598
+ checkpoint = torch.hub.load_state_dict_from_url(url=url, map_location="cpu", check_hash=True)
599
+ model.inflate_weights(checkpoint["model"])
600
+
601
+ return model
602
+
603
+ def convnextv2_3d_atto(**kwargs):
604
+ model = ConvNeXtV23D(depths=[2, 2, 6, 2], dims=[40, 80, 160, 320], **kwargs)
605
+
606
+ return model
607
+
608
+ def convnextv2_3d_femto(pretrained="../pretrained/convnextv2_femto_1k_224_ema.pt", **kwargs):
609
+ model = ConvNeXtV23D(depths=[2, 2, 6, 2], dims=[48, 96, 192, 384], **kwargs)
610
+ #model.inflate_weights(pretrained)
611
+ return model
612
+
613
+ def convnextv2_3d_pico(pretrained="../pretrained/convnextv2_pico_1k_224_ema.pt", **kwargs):
614
+ model = ConvNeXtV23D(depths=[2, 2, 6, 2], dims=[64, 128, 256, 512], **kwargs)
615
+ #model.inflate_weights(pretrained)
616
+ return model
617
+
618
+ def convnextv2_3d_nano(pretrained="../pretrained/convnextv2_nano_1k_224_ema.pt", **kwargs):
619
+ model = ConvNeXtV23D(depths=[2, 2, 8, 2], dims=[80, 160, 320, 640], **kwargs)
620
+ #model.inflate_weights(pretrained)
621
+ return model
622
+
623
+ def convnextv2_tiny(**kwargs):
624
+ model = ConvNeXtV23D(depths=[3, 3, 9, 3], dims=[96, 192, 384, 768], **kwargs)
625
+ return model
626
+
627
+ def convnextv2_base(**kwargs):
628
+ model = ConvNeXtV23D(depths=[3, 3, 27, 3], dims=[128, 256, 512, 1024], **kwargs)
629
+ return model
630
+
631
+ def convnextv2_large(**kwargs):
632
+ model = ConvNeXtV23D(depths=[3, 3, 27, 3], dims=[192, 384, 768, 1536], **kwargs)
633
+ return model
634
+
635
+ def convnextv2_huge(**kwargs):
636
+ model = ConvNeXtV2(depths=[3, 3, 27, 3], dims=[352, 704, 1408, 2816], **kwargs)
637
+ return model
638
+
639
+ def clip_vitL14(pretrained, **kwargs):
640
+ model = CLIPIQA(model_type='clipiqa+_vitL14_512', backbone='ViT-L/14', pretrained=pretrained)
641
+ return model
642
+
643
+ if __name__ == "__main__":
644
+
645
+ device = "cuda" if torch.cuda.is_available() else "cpu"
646
+ model = convnext_3d_tiny(True).to(device)
647
+ print(model)
648
+ from thop import profile
649
+ print(profile(model, (torch.randn(4,3,32,224,224).to(device),))[0] / 1e9)
650
+
651
+
cover/models/evaluator.py ADDED
@@ -0,0 +1,374 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import time
2
+ from functools import partial, reduce
3
+
4
+ import torch
5
+ import torch.nn as nn
6
+ from torch.nn.functional import adaptive_avg_pool3d
7
+
8
+ from .conv_backbone import convnext_3d_small, convnext_3d_tiny, convnextv2_3d_pico, convnextv2_3d_femto, clip_vitL14
9
+ from .head import IQAHead, VARHead, VQAHead
10
+ from .swin_backbone import SwinTransformer2D as ImageBackbone
11
+ from .swin_backbone import SwinTransformer3D as VideoBackbone
12
+ from .swin_backbone import swin_3d_small, swin_3d_tiny
13
+
14
+
15
+ class BaseEvaluator(nn.Module):
16
+ def __init__(
17
+ self, backbone=dict(), vqa_head=dict(),
18
+ ):
19
+ super().__init__()
20
+ self.backbone = VideoBackbone(**backbone)
21
+ self.vqa_head = VQAHead(**vqa_head)
22
+
23
+ def forward(self, vclip, inference=True, **kwargs):
24
+ if inference:
25
+ self.eval()
26
+ with torch.no_grad():
27
+ feat = self.backbone(vclip)
28
+ score = self.vqa_head(feat)
29
+ self.train()
30
+ return score
31
+ else:
32
+ feat = self.backbone(vclip)
33
+ score = self.vqa_head(feat)
34
+ return score
35
+
36
+ def forward_with_attention(self, vclip):
37
+ self.eval()
38
+ with torch.no_grad():
39
+ feat, avg_attns = self.backbone(vclip, require_attn=True)
40
+ score = self.vqa_head(feat)
41
+ return score, avg_attns
42
+
43
+
44
+ class COVER(nn.Module):
45
+ def __init__(
46
+ self,
47
+ backbone_size="divided",
48
+ backbone_preserve_keys="fragments,resize",
49
+ multi=False,
50
+ layer=-1,
51
+ backbone=dict(
52
+ resize={"window_size": (4, 4, 4)}, fragments={"window_size": (4, 4, 4)}
53
+ ),
54
+ divide_head=False,
55
+ vqa_head=dict(in_channels=768),
56
+ var=False,
57
+ ):
58
+ self.backbone_preserve_keys = backbone_preserve_keys.split(",")
59
+ self.multi = multi
60
+ self.layer = layer
61
+ super().__init__()
62
+ for key, hypers in backbone.items():
63
+ print(backbone_size)
64
+ if key not in self.backbone_preserve_keys:
65
+ continue
66
+ if backbone_size == "divided":
67
+ t_backbone_size = hypers["type"]
68
+ else:
69
+ t_backbone_size = backbone_size
70
+ if t_backbone_size == "swin_tiny":
71
+ b = swin_3d_tiny(**backbone[key])
72
+ elif t_backbone_size == "swin_tiny_grpb":
73
+ # to reproduce fast-vqa
74
+ b = VideoBackbone()
75
+ elif t_backbone_size == "swin_tiny_grpb_m":
76
+ # to reproduce fast-vqa-m
77
+ b = VideoBackbone(window_size=(4, 4, 4), frag_biases=[0, 0, 0, 0])
78
+ elif t_backbone_size == "swin_small":
79
+ b = swin_3d_small(**backbone[key])
80
+ elif t_backbone_size == "conv_tiny":
81
+ b = convnext_3d_tiny(pretrained=True)
82
+ elif t_backbone_size == "conv_small":
83
+ b = convnext_3d_small(pretrained=True)
84
+ elif t_backbone_size == "conv_femto":
85
+ b = convnextv2_3d_femto(pretrained=True)
86
+ elif t_backbone_size == "conv_pico":
87
+ b = convnextv2_3d_pico(pretrained=True)
88
+ elif t_backbone_size == "xclip":
89
+ raise NotImplementedError
90
+ elif t_backbone_size == "clip_iqa+":
91
+ b = clip_vitL14(pretrained=True)
92
+ else:
93
+ raise NotImplementedError
94
+ print("Setting backbone:", key + "_backbone")
95
+ setattr(self, key + "_backbone", b)
96
+ if divide_head:
97
+ for key in backbone:
98
+ pre_pool = False #if key == "technical" else True
99
+ if key not in self.backbone_preserve_keys:
100
+ continue
101
+ b = VQAHead(pre_pool=pre_pool, **vqa_head)
102
+ print("Setting head:", key + "_head")
103
+ setattr(self, key + "_head", b)
104
+ else:
105
+ if var:
106
+ self.vqa_head = VARHead(**vqa_head)
107
+ print(b)
108
+ else:
109
+ self.vqa_head = VQAHead(**vqa_head)
110
+ self.smtc_gate_tech = CrossGatingBlock(x_features=768, num_channels=768, block_size=1,
111
+ grid_size=1, upsample_y=False, dropout_rate=0.1, use_bias=True, use_global_mlp=False)
112
+ self.smtc_gate_aesc = CrossGatingBlock(x_features=768, num_channels=768, block_size=1,
113
+ grid_size=1, upsample_y=False, dropout_rate=0.1, use_bias=True, use_global_mlp=False)
114
+
115
+ def forward(
116
+ self,
117
+ vclips,
118
+ inference=True,
119
+ return_pooled_feats=False,
120
+ return_raw_feats=False,
121
+ reduce_scores=False,
122
+ pooled=False,
123
+ **kwargs
124
+ ):
125
+ assert (return_pooled_feats & return_raw_feats) == False, "Please only choose one kind of features to return"
126
+ if inference:
127
+ self.eval()
128
+ with torch.no_grad():
129
+ scores = []
130
+ feats = {}
131
+ for key in vclips:
132
+ if key == 'technical' or key == 'aesthetic':
133
+ feat = getattr(self, key.split("_")[0] + "_backbone")(
134
+ vclips[key], multi=self.multi, layer=self.layer, **kwargs
135
+ )
136
+ if key == 'technical':
137
+ feat_gated = self.smtc_gate_tech(feats['semantic'], feat)
138
+ elif key == 'aesthetic':
139
+ feat_gated = self.smtc_gate_aesc(feats['semantic'], feat)
140
+ if hasattr(self, key.split("_")[0] + "_head"):
141
+ scores += [getattr(self, key.split("_")[0] + "_head")(feat_gated)]
142
+ else:
143
+ scores += [getattr(self, "vqa_head")(feat_gated)]
144
+ elif key == 'semantic':
145
+ x = vclips[key].squeeze()
146
+ x = x.permute(1,0,2,3)
147
+ feat, _ = getattr(self, key.split("_")[0] + "_backbone")(
148
+ x, multi=self.multi, layer=self.layer, **kwargs
149
+ )
150
+ # for image feature from clipiqa+ VIT14
151
+ # image feature shape (t, c) -> (16, 768)
152
+ feat = feat.permute(1,0).contiguous() # (c, t) -> (768, 16)
153
+ feat = feat.unsqueeze(-1).unsqueeze(-1) # (c, t, w, h) -> (768, 16, 1, 1)
154
+ feat_expand = feat.expand(-1, -1, 7, 7) # (c, t, w, h) -> (768, 16, 7, 7)
155
+ feat_expand = feat_expand.unsqueeze(0) # (b, c, t, w, h) -> (1, 768, 16, 7, 7)
156
+ if hasattr(self, key.split("_")[0] + "_head"):
157
+ score = getattr(self, key.split("_")[0] + "_head")(feat_expand)
158
+ else:
159
+ score = getattr(self, "vqa_head")(feat_expand)
160
+ scores += [score]
161
+ feats[key] = feat_expand
162
+ if reduce_scores:
163
+ if len(scores) > 1:
164
+ scores = reduce(lambda x, y: x + y, scores)
165
+ else:
166
+ scores = scores[0]
167
+ if pooled:
168
+ scores = torch.mean(scores, (1, 2, 3, 4))
169
+ self.train()
170
+ if return_pooled_feats or return_raw_feats:
171
+ return scores, feats
172
+ return scores
173
+ else:
174
+ self.train()
175
+ scores = []
176
+ feats = {}
177
+ for key in vclips:
178
+ if key == 'technical' or key == 'aesthetic':
179
+ feat = getattr(self, key.split("_")[0] + "_backbone")(
180
+ vclips[key], multi=self.multi, layer=self.layer, **kwargs
181
+ )
182
+ if key == 'technical':
183
+ feat_gated = self.smtc_gate_tech(feats['semantic'], feat)
184
+ elif key == 'aesthetic':
185
+ feat_gated = self.smtc_gate_aesc(feats['semantic'], feat)
186
+ if hasattr(self, key.split("_")[0] + "_head"):
187
+ scores += [getattr(self, key.split("_")[0] + "_head")(feat_gated)]
188
+ else:
189
+ scores += [getattr(self, "vqa_head")(feat_gated)]
190
+ feats[key] = feat
191
+ elif key == 'semantic':
192
+ scores_semantic_list = []
193
+ feats_semantic_list = []
194
+ for batch_idx in range(vclips[key].shape[0]):
195
+ x = vclips[key][batch_idx].squeeze()
196
+ x = x.permute(1,0,2,3)
197
+ feat, _ = getattr(self, key.split("_")[0] + "_backbone")(
198
+ x, multi=self.multi, layer=self.layer, **kwargs
199
+ )
200
+ # for image feature from clipiqa+ VIT14
201
+ # image feature shape (t, c) -> (16, 768)
202
+ feat = feat.permute(1,0).contiguous() # (c, t) -> (768, 16)
203
+ feat = feat.unsqueeze(-1).unsqueeze(-1) # (c, t, w, h) -> (768, 16, 1, 1)
204
+ feat_expand = feat.expand(-1, -1, 7, 7) # (c, t, w, h) -> (768, 16, 7, 7)
205
+ feats_semantic_list.append(feat_expand)
206
+ if hasattr(self, key.split("_")[0] + "_head"):
207
+ feat_expand = feat_expand.unsqueeze(0) # (b, c, t, w, h) -> (1, 768, 16, 7, 7)
208
+ score = getattr(self, key.split("_")[0] + "_head")(feat_expand)
209
+ score = score.squeeze(0)
210
+ scores_semantic_list.append(score)
211
+ else:
212
+ feat_expand = feat_expand.unsqueeze(0) # (b, c, t, w, h) -> (1, 768, 16, 7, 7)
213
+ score = getattr(self, "vqa_head")(feat_expand)
214
+ score = score.squeeze(0)
215
+ scores_semantic_list.append(score)
216
+ scores_semantic_tensor = torch.stack(scores_semantic_list)
217
+ feats[key] = torch.stack(feats_semantic_list)
218
+ scores += [scores_semantic_tensor]
219
+ if return_pooled_feats:
220
+ feats[key] = feat.mean((-3, -2, -1))
221
+ if reduce_scores:
222
+ if len(scores) > 1:
223
+ scores = reduce(lambda x, y: x + y, scores)
224
+ else:
225
+ scores = scores[0]
226
+ if pooled:
227
+ print(scores.shape)
228
+ scores = torch.mean(scores, (1, 2, 3, 4))
229
+ print(scores.shape)
230
+
231
+ if return_pooled_feats:
232
+ return scores, feats
233
+ return scores
234
+
235
+ def forward_head(
236
+ self,
237
+ feats,
238
+ inference=True,
239
+ reduce_scores=False,
240
+ pooled=False,
241
+ **kwargs
242
+ ):
243
+ if inference:
244
+ self.eval()
245
+ with torch.no_grad():
246
+ scores = []
247
+ feats = {}
248
+ for key in feats:
249
+ feat = feats[key]
250
+ if hasattr(self, key.split("_")[0] + "_head"):
251
+ scores += [getattr(self, key.split("_")[0] + "_head")(feat)]
252
+ else:
253
+ scores += [getattr(self, "vqa_head")(feat)]
254
+ if reduce_scores:
255
+ if len(scores) > 1:
256
+ scores = reduce(lambda x, y: x + y, scores)
257
+ else:
258
+ scores = scores[0]
259
+ if pooled:
260
+ scores = torch.mean(scores, (1, 2, 3, 4))
261
+ self.train()
262
+ return scores
263
+ else:
264
+ self.train()
265
+ scores = []
266
+ feats = {}
267
+ for key in vclips:
268
+ feat = getattr(self, key.split("_")[0] + "_backbone")(
269
+ vclips[key], multi=self.multi, layer=self.layer, **kwargs
270
+ )
271
+ if hasattr(self, key.split("_")[0] + "_head"):
272
+ scores += [getattr(self, key.split("_")[0] + "_head")(feat)]
273
+ else:
274
+ scores += [getattr(self, "vqa_head")(feat)]
275
+ if return_pooled_feats:
276
+ feats[key] = feat
277
+ if reduce_scores:
278
+ if len(scores) > 1:
279
+ scores = reduce(lambda x, y: x + y, scores)
280
+ else:
281
+ scores = scores[0]
282
+ if pooled:
283
+ print(scores.shape)
284
+ scores = torch.mean(scores, (1, 2, 3, 4))
285
+ print(scores.shape)
286
+
287
+ if return_pooled_feats:
288
+ return scores, feats
289
+ return scores
290
+
291
+ class MinimumCOVER(nn.Module):
292
+ def __init__(self):
293
+ super().__init__()
294
+ self.technical_backbone = VideoBackbone()
295
+ self.aesthetic_backbone = convnext_3d_tiny(pretrained=True)
296
+ self.technical_head = VQAHead(pre_pool=False, in_channels=768)
297
+ self.aesthetic_head = VQAHead(pre_pool=False, in_channels=768)
298
+
299
+
300
+ def forward(self,aesthetic_view, technical_view):
301
+ self.eval()
302
+ with torch.no_grad():
303
+ aesthetic_score = self.aesthetic_head(self.aesthetic_backbone(aesthetic_view))
304
+ technical_score = self.technical_head(self.technical_backbone(technical_view))
305
+
306
+ aesthetic_score_pooled = torch.mean(aesthetic_score, (1,2,3,4))
307
+ technical_score_pooled = torch.mean(technical_score, (1,2,3,4))
308
+ return [aesthetic_score_pooled, technical_score_pooled]
309
+
310
+
311
+
312
+ class BaseImageEvaluator(nn.Module):
313
+ def __init__(
314
+ self, backbone=dict(), iqa_head=dict(),
315
+ ):
316
+ super().__init__()
317
+ self.backbone = ImageBackbone(**backbone)
318
+ self.iqa_head = IQAHead(**iqa_head)
319
+
320
+ def forward(self, image, inference=True, **kwargs):
321
+ if inference:
322
+ self.eval()
323
+ with torch.no_grad():
324
+ feat = self.backbone(image)
325
+ score = self.iqa_head(feat)
326
+ self.train()
327
+ return score
328
+ else:
329
+ feat = self.backbone(image)
330
+ score = self.iqa_head(feat)
331
+ return score
332
+
333
+ def forward_with_attention(self, image):
334
+ self.eval()
335
+ with torch.no_grad():
336
+ feat, avg_attns = self.backbone(image, require_attn=True)
337
+ score = self.iqa_head(feat)
338
+ return score, avg_attns
339
+
340
+ class CrossGatingBlock(nn.Module): #input shape: n, c, h, w
341
+ """Cross-gating MLP block."""
342
+ def __init__(self, x_features, num_channels, block_size, grid_size, cin_y=0,upsample_y=True, use_bias=True, use_global_mlp=True, dropout_rate=0):
343
+ super().__init__()
344
+ self.cin_y = cin_y
345
+ self.x_features = x_features
346
+ self.num_channels = num_channels
347
+ self.block_size = block_size
348
+ self.grid_size = grid_size
349
+ self.upsample_y = upsample_y
350
+ self.use_bias = use_bias
351
+ self.use_global_mlp = use_global_mlp
352
+ self.drop = dropout_rate
353
+ self.Conv_0 = nn.Linear(self.x_features, self.num_channels)
354
+ self.Conv_1 = nn.Linear(self.num_channels, self.num_channels)
355
+ self.in_project_x = nn.Linear(self.num_channels, self.num_channels, bias=self.use_bias)
356
+ self.gelu1 = nn.GELU(approximate='tanh')
357
+ self.out_project_y = nn.Linear(self.num_channels, self.num_channels, bias=self.use_bias)
358
+ self.dropout1 = nn.Dropout(self.drop)
359
+ def forward(self, x,y): #n,c,t,h,w
360
+ # Upscale Y signal, y is the gating signal.
361
+ assert y.shape == x.shape
362
+ x = x.permute(0,2,3,4,1).contiguous() #n,t,h,w,c
363
+ y = y.permute(0,2,3,4,1).contiguous() #n,t,h,w,c
364
+ x = self.Conv_0(x)
365
+ y = self.Conv_1(y)
366
+ shortcut_y = y
367
+ x = self.in_project_x(x)
368
+ gx = self.gelu1(x)
369
+ # Apply cross gating
370
+ y = y * gx # gating y using x
371
+ y = self.out_project_y(y)
372
+ y = self.dropout1(y)
373
+ y = y + shortcut_y # y = y * x + y
374
+ return y.permute(0,4,1,2,3).contiguous() #n,c,t,h,w
cover/models/head.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+
3
+ import numpy as np
4
+ import torch
5
+ import torch.nn as nn
6
+ from torch.nn import functional as F
7
+ from torchvision.ops import roi_align, roi_pool
8
+
9
+
10
+ class VQAHead(nn.Module):
11
+ """MLP Regression Head for VQA.
12
+ Args:
13
+ in_channels: input channels for MLP
14
+ hidden_channels: hidden channels for MLP
15
+ dropout_ratio: the dropout ratio for features before the MLP (default 0.5)
16
+ pre_pool: whether pre-pool the features or not (True for Aesthetic Attributes, False for Technical Attributes)
17
+ """
18
+
19
+ def __init__(
20
+ self, in_channels=768, hidden_channels=64, dropout_ratio=0.5, pre_pool=False, **kwargs
21
+ ):
22
+ super().__init__()
23
+ self.dropout_ratio = dropout_ratio
24
+ self.in_channels = in_channels
25
+ self.hidden_channels = hidden_channels
26
+ self.pre_pool = pre_pool
27
+ if self.dropout_ratio != 0:
28
+ self.dropout = nn.Dropout(p=self.dropout_ratio)
29
+ else:
30
+ self.dropout = None
31
+ self.fc_hid = nn.Conv3d(self.in_channels, self.hidden_channels, (1, 1, 1))
32
+ self.fc_last = nn.Conv3d(self.hidden_channels, 1, (1, 1, 1))
33
+ self.gelu = nn.GELU()
34
+
35
+ self.avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1))
36
+
37
+ def forward(self, x, rois=None):
38
+ if self.pre_pool:
39
+ x = self.avg_pool(x)
40
+ x = self.dropout(x)
41
+ qlt_score = self.fc_last(self.dropout(self.gelu(self.fc_hid(x))))
42
+ return qlt_score
43
+
44
+
45
+
46
+
47
+
48
+ class VARHead(nn.Module):
49
+ """MLP Regression Head for Video Action Recognition.
50
+ Args:
51
+ in_channels: input channels for MLP
52
+ hidden_channels: hidden channels for MLP
53
+ dropout_ratio: the dropout ratio for features before the MLP (default 0.5)
54
+ """
55
+
56
+ def __init__(self, in_channels=768, out_channels=400, dropout_ratio=0.5, **kwargs):
57
+ super().__init__()
58
+ self.dropout_ratio = dropout_ratio
59
+ self.in_channels = in_channels
60
+ self.out_channels = out_channels
61
+ if self.dropout_ratio != 0:
62
+ self.dropout = nn.Dropout(p=self.dropout_ratio)
63
+ else:
64
+ self.dropout = None
65
+ self.fc = nn.Conv3d(self.in_channels, self.out_channels, (1, 1, 1))
66
+ self.avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1))
67
+
68
+ def forward(self, x, rois=None):
69
+ x = self.dropout(x)
70
+ x = self.avg_pool(x)
71
+ out = self.fc(x)
72
+ return out
73
+
74
+
75
+ class IQAHead(nn.Module):
76
+ """MLP Regression Head for IQA.
77
+ Args:
78
+ in_channels: input channels for MLP
79
+ hidden_channels: hidden channels for MLP
80
+ dropout_ratio: the dropout ratio for features before the MLP (default 0.5)
81
+ """
82
+
83
+ def __init__(
84
+ self, in_channels=768, hidden_channels=64, dropout_ratio=0.5, **kwargs
85
+ ):
86
+ super().__init__()
87
+ self.dropout_ratio = dropout_ratio
88
+ self.in_channels = in_channels
89
+ self.hidden_channels = hidden_channels
90
+ if self.dropout_ratio != 0:
91
+ self.dropout = nn.Dropout(p=self.dropout_ratio)
92
+ else:
93
+ self.dropout = None
94
+ self.fc_hid = nn.Linear(self.in_channels, self.hidden_channels)
95
+ self.fc_last = nn.Linear(self.hidden_channels, 1)
96
+ self.gelu = nn.GELU()
97
+
98
+ def forward(self, x):
99
+ x = self.dropout(x)
100
+ qlt_score = self.fc_last(self.dropout(self.gelu(self.fc_hid(x))))
101
+ return qlt_score
cover/models/swin_backbone.py ADDED
@@ -0,0 +1,1097 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ from functools import lru_cache, reduce
3
+ from operator import mul
4
+
5
+ import numpy as np
6
+ import torch
7
+ import torch.nn as nn
8
+ import torch.nn.functional as F
9
+ import torch.utils.checkpoint as checkpoint
10
+ from einops import rearrange
11
+ from timm.models.layers import DropPath, trunc_normal_
12
+
13
+
14
+ def fragment_infos(D, H, W, fragments=7, device="cuda"):
15
+ m = torch.arange(fragments).unsqueeze(-1).float()
16
+ m = (m + m.t() * fragments).reshape(1, 1, 1, fragments, fragments)
17
+ m = F.interpolate(m.to(device), size=(D, H, W)).permute(0, 2, 3, 4, 1)
18
+ return m.long()
19
+
20
+
21
+ @lru_cache
22
+ def global_position_index(
23
+ D,
24
+ H,
25
+ W,
26
+ fragments=(1, 7, 7),
27
+ window_size=(8, 7, 7),
28
+ shift_size=(0, 0, 0),
29
+ device="cuda",
30
+ ):
31
+ frags_d = torch.arange(fragments[0])
32
+ frags_h = torch.arange(fragments[1])
33
+ frags_w = torch.arange(fragments[2])
34
+ frags = torch.stack(
35
+ torch.meshgrid(frags_d, frags_h, frags_w)
36
+ ).float() # 3, Fd, Fh, Fw
37
+ coords = (
38
+ torch.nn.functional.interpolate(frags[None].to(device), size=(D, H, W))
39
+ .long()
40
+ .permute(0, 2, 3, 4, 1)
41
+ )
42
+ # print(shift_size)
43
+ coords = torch.roll(
44
+ coords, shifts=(-shift_size[0], -shift_size[1], -shift_size[2]), dims=(1, 2, 3)
45
+ )
46
+ window_coords = window_partition(coords, window_size)
47
+ relative_coords = (
48
+ window_coords[:, None, :] - window_coords[:, :, None]
49
+ ) # Wd*Wh*Ww, Wd*Wh*Ww, 3
50
+ return relative_coords # relative_coords
51
+
52
+
53
+ @lru_cache
54
+ def get_adaptive_window_size(
55
+ base_window_size, input_x_size, base_x_size,
56
+ ):
57
+ tw, hw, ww = base_window_size
58
+ tx_, hx_, wx_ = input_x_size
59
+ tx, hx, wx = base_x_size
60
+ print((tw * tx_) // tx, (hw * hx_) // hx, (ww * wx_) // wx)
61
+ return (tw * tx_) // tx, (hw * hx_) // hx, (ww * wx_) // wx
62
+
63
+
64
+ class Mlp(nn.Module):
65
+ """Multilayer perceptron."""
66
+
67
+ def __init__(
68
+ self,
69
+ in_features,
70
+ hidden_features=None,
71
+ out_features=None,
72
+ act_layer=nn.GELU,
73
+ drop=0.0,
74
+ ):
75
+ super().__init__()
76
+ out_features = out_features or in_features
77
+ hidden_features = hidden_features or in_features
78
+ self.fc1 = nn.Linear(in_features, hidden_features)
79
+ self.act = act_layer()
80
+ self.fc2 = nn.Linear(hidden_features, out_features)
81
+ self.drop = nn.Dropout(drop)
82
+
83
+ def forward(self, x):
84
+ x = self.fc1(x)
85
+ x = self.act(x)
86
+ x = self.drop(x)
87
+ x = self.fc2(x)
88
+ x = self.drop(x)
89
+ return x
90
+
91
+
92
+ def window_partition(x, window_size):
93
+ """
94
+ Args:
95
+ x: (B, D, H, W, C)
96
+ window_size (tuple[int]): window size
97
+
98
+ Returns:
99
+ windows: (B*num_windows, window_size*window_size, C)
100
+ """
101
+ B, D, H, W, C = x.shape
102
+ x = x.view(
103
+ B,
104
+ D // window_size[0],
105
+ window_size[0],
106
+ H // window_size[1],
107
+ window_size[1],
108
+ W // window_size[2],
109
+ window_size[2],
110
+ C,
111
+ )
112
+ windows = (
113
+ x.permute(0, 1, 3, 5, 2, 4, 6, 7)
114
+ .contiguous()
115
+ .view(-1, reduce(mul, window_size), C)
116
+ )
117
+ return windows
118
+
119
+
120
+ def window_reverse(windows, window_size, B, D, H, W):
121
+ """
122
+ Args:
123
+ windows: (B*num_windows, window_size, window_size, C)
124
+ window_size (tuple[int]): Window size
125
+ H (int): Height of image
126
+ W (int): Width of image
127
+
128
+ Returns:
129
+ x: (B, D, H, W, C)
130
+ """
131
+ x = windows.view(
132
+ B,
133
+ D // window_size[0],
134
+ H // window_size[1],
135
+ W // window_size[2],
136
+ window_size[0],
137
+ window_size[1],
138
+ window_size[2],
139
+ -1,
140
+ )
141
+ x = x.permute(0, 1, 4, 2, 5, 3, 6, 7).contiguous().view(B, D, H, W, -1)
142
+ return x
143
+
144
+
145
+ def get_window_size(x_size, window_size, shift_size=None):
146
+ use_window_size = list(window_size)
147
+ if shift_size is not None:
148
+ use_shift_size = list(shift_size)
149
+ for i in range(len(x_size)):
150
+ if x_size[i] <= window_size[i]:
151
+ use_window_size[i] = x_size[i]
152
+ if shift_size is not None:
153
+ use_shift_size[i] = 0
154
+
155
+ if shift_size is None:
156
+ return tuple(use_window_size)
157
+ else:
158
+ return tuple(use_window_size), tuple(use_shift_size)
159
+
160
+
161
+ class WindowAttention3D(nn.Module):
162
+ """Window based multi-head self attention (W-MSA) module with relative position bias.
163
+ It supports both of shifted and non-shifted window.
164
+ Args:
165
+ dim (int): Number of input channels.
166
+ window_size (tuple[int]): The temporal length, height and width of the window.
167
+ num_heads (int): Number of attention heads.
168
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
169
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set
170
+ attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
171
+ proj_drop (float, optional): Dropout ratio of output. Default: 0.0
172
+ """
173
+
174
+ def __init__(
175
+ self,
176
+ dim,
177
+ window_size,
178
+ num_heads,
179
+ qkv_bias=False,
180
+ qk_scale=None,
181
+ attn_drop=0.0,
182
+ proj_drop=0.0,
183
+ frag_bias=False,
184
+ ):
185
+
186
+ super().__init__()
187
+ self.dim = dim
188
+ self.window_size = window_size # Wd, Wh, Ww
189
+ self.num_heads = num_heads
190
+ head_dim = dim // num_heads
191
+ self.scale = qk_scale or head_dim ** -0.5
192
+
193
+ # define a parameter table of relative position bias
194
+ self.relative_position_bias_table = nn.Parameter(
195
+ torch.zeros(
196
+ (2 * window_size[0] - 1)
197
+ * (2 * window_size[1] - 1)
198
+ * (2 * window_size[2] - 1),
199
+ num_heads,
200
+ )
201
+ ) # 2*Wd-1 * 2*Wh-1 * 2*Ww-1, nH
202
+ if frag_bias:
203
+ self.fragment_position_bias_table = nn.Parameter(
204
+ torch.zeros(
205
+ (2 * window_size[0] - 1)
206
+ * (2 * window_size[1] - 1)
207
+ * (2 * window_size[2] - 1),
208
+ num_heads,
209
+ )
210
+ )
211
+
212
+ # get pair-wise relative position index for each token inside the window
213
+ coords_d = torch.arange(self.window_size[0])
214
+ coords_h = torch.arange(self.window_size[1])
215
+ coords_w = torch.arange(self.window_size[2])
216
+ coords = torch.stack(
217
+ torch.meshgrid(coords_d, coords_h, coords_w)
218
+ ) # 3, Wd, Wh, Ww
219
+ coords_flatten = torch.flatten(coords, 1) # 3, Wd*Wh*Ww
220
+ relative_coords = (
221
+ coords_flatten[:, :, None] - coords_flatten[:, None, :]
222
+ ) # 3, Wd*Wh*Ww, Wd*Wh*Ww
223
+ relative_coords = relative_coords.permute(
224
+ 1, 2, 0
225
+ ).contiguous() # Wd*Wh*Ww, Wd*Wh*Ww, 3
226
+ relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
227
+ relative_coords[:, :, 1] += self.window_size[1] - 1
228
+ relative_coords[:, :, 2] += self.window_size[2] - 1
229
+
230
+ relative_coords[:, :, 0] *= (2 * self.window_size[1] - 1) * (
231
+ 2 * self.window_size[2] - 1
232
+ )
233
+ relative_coords[:, :, 1] *= 2 * self.window_size[2] - 1
234
+ relative_position_index = relative_coords.sum(-1) # Wd*Wh*Ww, Wd*Wh*Ww
235
+ self.register_buffer("relative_position_index", relative_position_index)
236
+
237
+ self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
238
+ self.attn_drop = nn.Dropout(attn_drop)
239
+ self.proj = nn.Linear(dim, dim)
240
+ self.proj_drop = nn.Dropout(proj_drop)
241
+
242
+ trunc_normal_(self.relative_position_bias_table, std=0.02)
243
+ self.softmax = nn.Softmax(dim=-1)
244
+
245
+ def forward(self, x, mask=None, fmask=None, resized_window_size=None):
246
+ """Forward function.
247
+ Args:
248
+ x: input features with shape of (num_windows*B, N, C)
249
+ mask: (0/-inf) mask with shape of (num_windows, N, N) or None
250
+ """
251
+ # print(x.shape)
252
+ B_, N, C = x.shape
253
+ qkv = (
254
+ self.qkv(x)
255
+ .reshape(B_, N, 3, self.num_heads, C // self.num_heads)
256
+ .permute(2, 0, 3, 1, 4)
257
+ )
258
+ q, k, v = qkv[0], qkv[1], qkv[2] # B_, nH, N, C
259
+
260
+ q = q * self.scale
261
+ attn = q @ k.transpose(-2, -1)
262
+
263
+ if resized_window_size is None:
264
+ rpi = self.relative_position_index[:N, :N]
265
+ else:
266
+ relative_position_index = self.relative_position_index.reshape(
267
+ *self.window_size, *self.window_size
268
+ )
269
+ d, h, w = resized_window_size
270
+
271
+ rpi = relative_position_index[:d, :h, :w, :d, :h, :w]
272
+ relative_position_bias = self.relative_position_bias_table[
273
+ rpi.reshape(-1)
274
+ ].reshape(
275
+ N, N, -1
276
+ ) # Wd*Wh*Ww,Wd*Wh*Ww,nH
277
+ relative_position_bias = relative_position_bias.permute(
278
+ 2, 0, 1
279
+ ).contiguous() # nH, Wd*Wh*Ww, Wd*Wh*Ww
280
+ if hasattr(self, "fragment_position_bias_table"):
281
+ fragment_position_bias = self.fragment_position_bias_table[
282
+ rpi.reshape(-1)
283
+ ].reshape(
284
+ N, N, -1
285
+ ) # Wd*Wh*Ww,Wd*Wh*Ww,nH
286
+ fragment_position_bias = fragment_position_bias.permute(
287
+ 2, 0, 1
288
+ ).contiguous() # nH, Wd*Wh*Ww, Wd*Wh*Ww
289
+
290
+ ### Mask Position Bias
291
+ if fmask is not None:
292
+ # fgate = torch.where(fmask - fmask.transpose(-1, -2) == 0, 1, 0).float()
293
+ fgate = fmask.abs().sum(-1)
294
+ nW = fmask.shape[0]
295
+ relative_position_bias = relative_position_bias.unsqueeze(0)
296
+ fgate = fgate.unsqueeze(1)
297
+ # print(fgate.shape, relative_position_bias.shape)
298
+ if hasattr(self, "fragment_position_bias_table"):
299
+ relative_position_bias = (
300
+ relative_position_bias * fgate
301
+ + fragment_position_bias * (1 - fgate)
302
+ )
303
+
304
+ attn = attn.view(
305
+ B_ // nW, nW, self.num_heads, N, N
306
+ ) + relative_position_bias.unsqueeze(0)
307
+ attn = attn.view(-1, self.num_heads, N, N)
308
+ else:
309
+ attn = attn + relative_position_bias.unsqueeze(0) # B_, nH, N, N
310
+
311
+ if mask is not None:
312
+ nW = mask.shape[0]
313
+ attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(
314
+ 1
315
+ ).unsqueeze(0)
316
+ attn = attn.view(-1, self.num_heads, N, N)
317
+ attn = self.softmax(attn)
318
+ else:
319
+ attn = self.softmax(attn)
320
+ attn = self.attn_drop(attn)
321
+
322
+ x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
323
+ x = self.proj(x)
324
+ x = self.proj_drop(x)
325
+
326
+ return x
327
+
328
+
329
+ class SwinTransformerBlock3D(nn.Module):
330
+ """Swin Transformer Block.
331
+
332
+ Args:
333
+ dim (int): Number of input channels.
334
+ num_heads (int): Number of attention heads.
335
+ window_size (tuple[int]): Window size.
336
+ shift_size (tuple[int]): Shift size for SW-MSA.
337
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim.
338
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
339
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
340
+ drop (float, optional): Dropout rate. Default: 0.0
341
+ attn_drop (float, optional): Attention dropout rate. Default: 0.0
342
+ drop_path (float, optional): Stochastic depth rate. Default: 0.0
343
+ act_layer (nn.Module, optional): Activation layer. Default: nn.GELU
344
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
345
+ """
346
+
347
+ def __init__(
348
+ self,
349
+ dim,
350
+ num_heads,
351
+ window_size=(2, 7, 7),
352
+ shift_size=(0, 0, 0),
353
+ mlp_ratio=4.0,
354
+ qkv_bias=True,
355
+ qk_scale=None,
356
+ drop=0.0,
357
+ attn_drop=0.0,
358
+ drop_path=0.0,
359
+ act_layer=nn.GELU,
360
+ norm_layer=nn.LayerNorm,
361
+ use_checkpoint=False,
362
+ jump_attention=False,
363
+ frag_bias=False,
364
+ ):
365
+ super().__init__()
366
+ self.dim = dim
367
+ self.num_heads = num_heads
368
+ self.window_size = window_size
369
+ self.shift_size = shift_size
370
+ self.mlp_ratio = mlp_ratio
371
+ self.use_checkpoint = use_checkpoint
372
+ self.jump_attention = jump_attention
373
+ self.frag_bias = frag_bias
374
+
375
+ assert (
376
+ 0 <= self.shift_size[0] < self.window_size[0]
377
+ ), "shift_size must in 0-window_size"
378
+ assert (
379
+ 0 <= self.shift_size[1] < self.window_size[1]
380
+ ), "shift_size must in 0-window_size"
381
+ assert (
382
+ 0 <= self.shift_size[2] < self.window_size[2]
383
+ ), "shift_size must in 0-window_size"
384
+
385
+ self.norm1 = norm_layer(dim)
386
+ self.attn = WindowAttention3D(
387
+ dim,
388
+ window_size=self.window_size,
389
+ num_heads=num_heads,
390
+ qkv_bias=qkv_bias,
391
+ qk_scale=qk_scale,
392
+ attn_drop=attn_drop,
393
+ proj_drop=drop,
394
+ frag_bias=frag_bias,
395
+ )
396
+
397
+ self.drop_path = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
398
+ self.norm2 = norm_layer(dim)
399
+ mlp_hidden_dim = int(dim * mlp_ratio)
400
+ self.mlp = Mlp(
401
+ in_features=dim,
402
+ hidden_features=mlp_hidden_dim,
403
+ act_layer=act_layer,
404
+ drop=drop,
405
+ )
406
+
407
+ def forward_part1(self, x, mask_matrix, resized_window_size=None):
408
+ B, D, H, W, C = x.shape
409
+ window_size, shift_size = get_window_size(
410
+ (D, H, W),
411
+ self.window_size if resized_window_size is None else resized_window_size,
412
+ self.shift_size,
413
+ )
414
+
415
+ x = self.norm1(x)
416
+ # pad feature maps to multiples of window size
417
+ pad_l = pad_t = pad_d0 = 0
418
+ pad_d1 = (window_size[0] - D % window_size[0]) % window_size[0]
419
+ pad_b = (window_size[1] - H % window_size[1]) % window_size[1]
420
+ pad_r = (window_size[2] - W % window_size[2]) % window_size[2]
421
+
422
+ x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b, pad_d0, pad_d1))
423
+ _, Dp, Hp, Wp, _ = x.shape
424
+ if False: # not hasattr(self, 'finfo_windows'):
425
+ finfo = fragment_infos(Dp, Hp, Wp)
426
+
427
+ # cyclic shift
428
+ if any(i > 0 for i in shift_size):
429
+ shifted_x = torch.roll(
430
+ x,
431
+ shifts=(-shift_size[0], -shift_size[1], -shift_size[2]),
432
+ dims=(1, 2, 3),
433
+ )
434
+ if False: # not hasattr(self, 'finfo_windows'):
435
+ shifted_finfo = torch.roll(
436
+ finfo,
437
+ shifts=(-shift_size[0], -shift_size[1], -shift_size[2]),
438
+ dims=(1, 2, 3),
439
+ )
440
+ attn_mask = mask_matrix
441
+ else:
442
+ shifted_x = x
443
+ if False: # not hasattr(self, 'finfo_windows'):
444
+ shifted_finfo = finfo
445
+ attn_mask = None
446
+ # partition windows
447
+ x_windows = window_partition(shifted_x, window_size) # B*nW, Wd*Wh*Ww, C
448
+ if False: # not hasattr(self, 'finfo_windows'):
449
+ self.finfo_windows = window_partition(shifted_finfo, window_size)
450
+ # W-MSA/SW-MSA
451
+ # print(shift_size)
452
+ gpi = global_position_index(
453
+ Dp,
454
+ Hp,
455
+ Wp,
456
+ fragments=(1,) + window_size[1:],
457
+ window_size=window_size,
458
+ shift_size=shift_size,
459
+ device=x.device,
460
+ )
461
+ attn_windows = self.attn(
462
+ x_windows,
463
+ mask=attn_mask,
464
+ fmask=gpi,
465
+ resized_window_size=window_size
466
+ if resized_window_size is not None
467
+ else None,
468
+ ) # self.finfo_windows) # B*nW, Wd*Wh*Ww, C
469
+ # merge windows
470
+ attn_windows = attn_windows.view(-1, *(window_size + (C,)))
471
+ shifted_x = window_reverse(
472
+ attn_windows, window_size, B, Dp, Hp, Wp
473
+ ) # B D' H' W' C
474
+ # reverse cyclic shift
475
+ if any(i > 0 for i in shift_size):
476
+ x = torch.roll(
477
+ shifted_x,
478
+ shifts=(shift_size[0], shift_size[1], shift_size[2]),
479
+ dims=(1, 2, 3),
480
+ )
481
+ else:
482
+ x = shifted_x
483
+
484
+ if pad_d1 > 0 or pad_r > 0 or pad_b > 0:
485
+ x = x[:, :D, :H, :W, :].contiguous()
486
+ return x
487
+
488
+ def forward_part2(self, x):
489
+ return self.drop_path(self.mlp(self.norm2(x)))
490
+
491
+ def forward(self, x, mask_matrix, resized_window_size=None):
492
+ """Forward function.
493
+
494
+ Args:
495
+ x: Input feature, tensor size (B, D, H, W, C).
496
+ mask_matrix: Attention mask for cyclic shift.
497
+ """
498
+
499
+ shortcut = x
500
+ if not self.jump_attention:
501
+ if self.use_checkpoint:
502
+ x = checkpoint.checkpoint(
503
+ self.forward_part1, x, mask_matrix, resized_window_size
504
+ )
505
+ else:
506
+ x = self.forward_part1(x, mask_matrix, resized_window_size)
507
+ x = shortcut + self.drop_path(x)
508
+
509
+ if self.use_checkpoint:
510
+ x = x + checkpoint.checkpoint(self.forward_part2, x)
511
+ else:
512
+ x = x + self.forward_part2(x)
513
+
514
+ return x
515
+
516
+
517
+ class PatchMerging(nn.Module):
518
+ """Patch Merging Layer
519
+
520
+ Args:
521
+ dim (int): Number of input channels.
522
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
523
+ """
524
+
525
+ def __init__(self, dim, norm_layer=nn.LayerNorm):
526
+ super().__init__()
527
+ self.dim = dim
528
+ self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
529
+ self.norm = norm_layer(4 * dim)
530
+
531
+ def forward(self, x):
532
+ """Forward function.
533
+
534
+ Args:
535
+ x: Input feature, tensor size (B, D, H, W, C).
536
+ """
537
+ B, D, H, W, C = x.shape
538
+
539
+ # padding
540
+ pad_input = (H % 2 == 1) or (W % 2 == 1)
541
+ if pad_input:
542
+ x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2))
543
+
544
+ x0 = x[:, :, 0::2, 0::2, :] # B D H/2 W/2 C
545
+ x1 = x[:, :, 1::2, 0::2, :] # B D H/2 W/2 C
546
+ x2 = x[:, :, 0::2, 1::2, :] # B D H/2 W/2 C
547
+ x3 = x[:, :, 1::2, 1::2, :] # B D H/2 W/2 C
548
+ x = torch.cat([x0, x1, x2, x3], -1) # B D H/2 W/2 4*C
549
+
550
+ x = self.norm(x)
551
+ x = self.reduction(x)
552
+
553
+ return x
554
+
555
+
556
+ # cache each stage results
557
+ @lru_cache()
558
+ def compute_mask(D, H, W, window_size, shift_size, device):
559
+ img_mask = torch.zeros((1, D, H, W, 1), device=device) # 1 Dp Hp Wp 1
560
+ cnt = 0
561
+ for d in (
562
+ slice(-window_size[0]),
563
+ slice(-window_size[0], -shift_size[0]),
564
+ slice(-shift_size[0], None),
565
+ ):
566
+ for h in (
567
+ slice(-window_size[1]),
568
+ slice(-window_size[1], -shift_size[1]),
569
+ slice(-shift_size[1], None),
570
+ ):
571
+ for w in (
572
+ slice(-window_size[2]),
573
+ slice(-window_size[2], -shift_size[2]),
574
+ slice(-shift_size[2], None),
575
+ ):
576
+ img_mask[:, d, h, w, :] = cnt
577
+ cnt += 1
578
+ mask_windows = window_partition(img_mask, window_size) # nW, ws[0]*ws[1]*ws[2], 1
579
+ mask_windows = mask_windows.squeeze(-1) # nW, ws[0]*ws[1]*ws[2]
580
+ attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
581
+ attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(
582
+ attn_mask == 0, float(0.0)
583
+ )
584
+ return attn_mask
585
+
586
+
587
+ class BasicLayer(nn.Module):
588
+ """A basic Swin Transformer layer for one stage.
589
+
590
+ Args:
591
+ dim (int): Number of feature channels
592
+ depth (int): Depths of this stage.
593
+ num_heads (int): Number of attention head.
594
+ window_size (tuple[int]): Local window size. Default: (1,7,7).
595
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
596
+ qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
597
+ qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set.
598
+ drop (float, optional): Dropout rate. Default: 0.0
599
+ attn_drop (float, optional): Attention dropout rate. Default: 0.0
600
+ drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0
601
+ norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm
602
+ downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None
603
+ """
604
+
605
+ def __init__(
606
+ self,
607
+ dim,
608
+ depth,
609
+ num_heads,
610
+ window_size=(1, 7, 7),
611
+ mlp_ratio=4.0,
612
+ qkv_bias=False,
613
+ qk_scale=None,
614
+ drop=0.0,
615
+ attn_drop=0.0,
616
+ drop_path=0.0,
617
+ norm_layer=nn.LayerNorm,
618
+ downsample=None,
619
+ use_checkpoint=False,
620
+ jump_attention=False,
621
+ frag_bias=False,
622
+ ):
623
+ super().__init__()
624
+ self.window_size = window_size
625
+ self.shift_size = tuple(i // 2 for i in window_size)
626
+ self.depth = depth
627
+ self.use_checkpoint = use_checkpoint
628
+ # print(window_size)
629
+ # build blocks
630
+ self.blocks = nn.ModuleList(
631
+ [
632
+ SwinTransformerBlock3D(
633
+ dim=dim,
634
+ num_heads=num_heads,
635
+ window_size=window_size,
636
+ shift_size=(0, 0, 0) if (i % 2 == 0) else self.shift_size,
637
+ mlp_ratio=mlp_ratio,
638
+ qkv_bias=qkv_bias,
639
+ qk_scale=qk_scale,
640
+ drop=drop,
641
+ attn_drop=attn_drop,
642
+ drop_path=drop_path[i]
643
+ if isinstance(drop_path, list)
644
+ else drop_path,
645
+ norm_layer=norm_layer,
646
+ use_checkpoint=use_checkpoint,
647
+ jump_attention=jump_attention,
648
+ frag_bias=frag_bias,
649
+ )
650
+ for i in range(depth)
651
+ ]
652
+ )
653
+
654
+ self.downsample = downsample
655
+ if self.downsample is not None:
656
+ self.downsample = downsample(dim=dim, norm_layer=norm_layer)
657
+
658
+ def forward(self, x, resized_window_size=None):
659
+ """Forward function.
660
+
661
+ Args:
662
+ x: Input feature, tensor size (B, C, D, H, W).
663
+ """
664
+ # calculate attention mask for SW-MSA
665
+ B, C, D, H, W = x.shape
666
+
667
+ window_size, shift_size = get_window_size(
668
+ (D, H, W),
669
+ self.window_size if resized_window_size is None else resized_window_size,
670
+ self.shift_size,
671
+ )
672
+ # print(window_size)
673
+ x = rearrange(x, "b c d h w -> b d h w c")
674
+ Dp = int(np.ceil(D / window_size[0])) * window_size[0]
675
+ Hp = int(np.ceil(H / window_size[1])) * window_size[1]
676
+ Wp = int(np.ceil(W / window_size[2])) * window_size[2]
677
+ attn_mask = compute_mask(Dp, Hp, Wp, window_size, shift_size, x.device)
678
+ for blk in self.blocks:
679
+ x = blk(x, attn_mask, resized_window_size=resized_window_size)
680
+ x = x.view(B, D, H, W, -1)
681
+
682
+ if self.downsample is not None:
683
+ x = self.downsample(x)
684
+ x = rearrange(x, "b d h w c -> b c d h w")
685
+ return x
686
+
687
+
688
+ class PatchEmbed3D(nn.Module):
689
+ """Video to Patch Embedding.
690
+
691
+ Args:
692
+ patch_size (int): Patch token size. Default: (2,4,4).
693
+ in_chans (int): Number of input video channels. Default: 3.
694
+ embed_dim (int): Number of linear projection output channels. Default: 96.
695
+ norm_layer (nn.Module, optional): Normalization layer. Default: None
696
+ """
697
+
698
+ def __init__(self, patch_size=(2, 4, 4), in_chans=3, embed_dim=96, norm_layer=None):
699
+ super().__init__()
700
+ self.patch_size = patch_size
701
+
702
+ self.in_chans = in_chans
703
+ self.embed_dim = embed_dim
704
+
705
+ self.proj = nn.Conv3d(
706
+ in_chans, embed_dim, kernel_size=patch_size, stride=patch_size
707
+ )
708
+ if norm_layer is not None:
709
+ self.norm = norm_layer(embed_dim)
710
+ else:
711
+ self.norm = None
712
+
713
+ def forward(self, x):
714
+ """Forward function."""
715
+ # padding
716
+ _, _, D, H, W = x.size()
717
+ if W % self.patch_size[2] != 0:
718
+ x = F.pad(x, (0, self.patch_size[2] - W % self.patch_size[2]))
719
+ if H % self.patch_size[1] != 0:
720
+ x = F.pad(x, (0, 0, 0, self.patch_size[1] - H % self.patch_size[1]))
721
+ if D % self.patch_size[0] != 0:
722
+ x = F.pad(x, (0, 0, 0, 0, 0, self.patch_size[0] - D % self.patch_size[0]))
723
+
724
+ x = self.proj(x) # B C D Wh Ww
725
+ if self.norm is not None:
726
+ D, Wh, Ww = x.size(2), x.size(3), x.size(4)
727
+ x = x.flatten(2).transpose(1, 2)
728
+ x = self.norm(x)
729
+ x = x.transpose(1, 2).view(-1, self.embed_dim, D, Wh, Ww)
730
+
731
+ return x
732
+
733
+
734
+ class SwinTransformer3D(nn.Module):
735
+ """Swin Transformer backbone.
736
+ A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` -
737
+ https://arxiv.org/pdf/2103.14030
738
+
739
+ Args:
740
+ patch_size (int | tuple(int)): Patch size. Default: (4,4,4).
741
+ in_chans (int): Number of input image channels. Default: 3.
742
+ embed_dim (int): Number of linear projection output channels. Default: 96.
743
+ depths (tuple[int]): Depths of each Swin Transformer stage.
744
+ num_heads (tuple[int]): Number of attention head of each stage.
745
+ window_size (int): Window size. Default: 7.
746
+ mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4.
747
+ qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: Truee
748
+ qk_scale (float): Override default qk scale of head_dim ** -0.5 if set.
749
+ drop_rate (float): Dropout rate.
750
+ attn_drop_rate (float): Attention dropout rate. Default: 0.
751
+ drop_path_rate (float): Stochastic depth rate. Default: 0.2.
752
+ norm_layer: Normalization layer. Default: nn.LayerNorm.
753
+ patch_norm (bool): If True, add normalization after patch embedding. Default: False.
754
+ frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
755
+ -1 means not freezing any parameters.
756
+ """
757
+
758
+ def __init__(
759
+ self,
760
+ pretrained=None,
761
+ pretrained2d=False,
762
+ patch_size=(2, 4, 4),
763
+ in_chans=3,
764
+ embed_dim=96,
765
+ depths=[2, 2, 6, 2],
766
+ num_heads=[3, 6, 12, 24],
767
+ window_size=(8, 7, 7),
768
+ mlp_ratio=4.0,
769
+ qkv_bias=True,
770
+ qk_scale=None,
771
+ drop_rate=0.0,
772
+ attn_drop_rate=0.0,
773
+ drop_path_rate=0.1,
774
+ norm_layer=nn.LayerNorm,
775
+ patch_norm=True,
776
+ frozen_stages=-1,
777
+ use_checkpoint=True,
778
+ jump_attention=[False, False, False, False],
779
+ frag_biases=[True, True, True, False],
780
+ base_x_size=(32, 224, 224),
781
+ ):
782
+ super().__init__()
783
+
784
+ self.pretrained = pretrained
785
+ self.pretrained2d = pretrained2d
786
+ self.num_layers = len(depths)
787
+ self.embed_dim = embed_dim
788
+ self.patch_norm = patch_norm
789
+ self.frozen_stages = frozen_stages
790
+ self.window_size = window_size
791
+ self.patch_size = patch_size
792
+ self.base_x_size = base_x_size
793
+
794
+ # split image into non-overlapping patches
795
+ self.patch_embed = PatchEmbed3D(
796
+ patch_size=patch_size,
797
+ in_chans=in_chans,
798
+ embed_dim=embed_dim,
799
+ norm_layer=norm_layer if self.patch_norm else None,
800
+ )
801
+
802
+ self.pos_drop = nn.Dropout(p=drop_rate)
803
+
804
+ # stochastic depth
805
+ dpr = [
806
+ x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))
807
+ ] # stochastic depth decay rule
808
+
809
+ # build layers
810
+ self.layers = nn.ModuleList()
811
+ for i_layer in range(self.num_layers):
812
+ layer = BasicLayer(
813
+ dim=int(embed_dim * 2 ** i_layer),
814
+ depth=depths[i_layer],
815
+ num_heads=num_heads[i_layer],
816
+ window_size=window_size[i_layer]
817
+ if isinstance(window_size, list)
818
+ else window_size,
819
+ mlp_ratio=mlp_ratio,
820
+ qkv_bias=qkv_bias,
821
+ qk_scale=qk_scale,
822
+ drop=drop_rate,
823
+ attn_drop=attn_drop_rate,
824
+ drop_path=dpr[sum(depths[:i_layer]) : sum(depths[: i_layer + 1])],
825
+ norm_layer=norm_layer,
826
+ downsample=PatchMerging if i_layer < self.num_layers - 1 else None,
827
+ use_checkpoint=use_checkpoint,
828
+ jump_attention=jump_attention[i_layer],
829
+ frag_bias=frag_biases[i_layer],
830
+ )
831
+ self.layers.append(layer)
832
+
833
+ self.num_features = int(embed_dim * 2 ** (self.num_layers - 1))
834
+
835
+ # add a norm layer for each output
836
+ self.norm = norm_layer(self.num_features)
837
+
838
+ self._freeze_stages()
839
+
840
+ self.init_weights()
841
+
842
+ def _freeze_stages(self):
843
+ if self.frozen_stages >= 0:
844
+ self.patch_embed.eval()
845
+ for param in self.patch_embed.parameters():
846
+ param.requires_grad = False
847
+
848
+ if self.frozen_stages >= 1:
849
+ self.pos_drop.eval()
850
+ for i in range(0, self.frozen_stages):
851
+ m = self.layers[i]
852
+ m.eval()
853
+ for param in m.parameters():
854
+ param.requires_grad = False
855
+
856
+ def inflate_weights(self):
857
+ """Inflate the swin2d parameters to swin3d.
858
+
859
+ The differences between swin3d and swin2d mainly lie in an extra
860
+ axis. To utilize the pretrained parameters in 2d model,
861
+ the weight of swin2d models should be inflated to fit in the shapes of
862
+ the 3d counterpart.
863
+
864
+ Args:
865
+ logger (logging.Logger): The logger used to print
866
+ debugging infomation.
867
+ """
868
+ checkpoint = torch.load(self.pretrained, map_location="cpu")
869
+ state_dict = checkpoint["model"]
870
+
871
+ # delete relative_position_index since we always re-init it
872
+ relative_position_index_keys = [
873
+ k for k in state_dict.keys() if "relative_position_index" in k
874
+ ]
875
+ for k in relative_position_index_keys:
876
+ del state_dict[k]
877
+
878
+ # delete attn_mask since we always re-init it
879
+ attn_mask_keys = [k for k in state_dict.keys() if "attn_mask" in k]
880
+ for k in attn_mask_keys:
881
+ del state_dict[k]
882
+
883
+ state_dict["patch_embed.proj.weight"] = (
884
+ state_dict["patch_embed.proj.weight"]
885
+ .unsqueeze(2)
886
+ .repeat(1, 1, self.patch_size[0], 1, 1)
887
+ / self.patch_size[0]
888
+ )
889
+
890
+ # bicubic interpolate relative_position_bias_table if not match
891
+ relative_position_bias_table_keys = [
892
+ k for k in state_dict.keys() if "relative_position_bias_table" in k
893
+ ]
894
+ for k in relative_position_bias_table_keys:
895
+ relative_position_bias_table_pretrained = state_dict[k]
896
+ relative_position_bias_table_current = self.state_dict()[k]
897
+ L1, nH1 = relative_position_bias_table_pretrained.size()
898
+ L2, nH2 = relative_position_bias_table_current.size()
899
+ L2 = (2 * self.window_size[1] - 1) * (2 * self.window_size[2] - 1)
900
+ wd = self.window_size[0]
901
+ if nH1 != nH2:
902
+ print(f"Error in loading {k}, passing")
903
+ else:
904
+ if L1 != L2:
905
+ S1 = int(L1 ** 0.5)
906
+ relative_position_bias_table_pretrained_resized = torch.nn.functional.interpolate(
907
+ relative_position_bias_table_pretrained.permute(1, 0).view(
908
+ 1, nH1, S1, S1
909
+ ),
910
+ size=(
911
+ 2 * self.window_size[1] - 1,
912
+ 2 * self.window_size[2] - 1,
913
+ ),
914
+ mode="bicubic",
915
+ )
916
+ relative_position_bias_table_pretrained = relative_position_bias_table_pretrained_resized.view(
917
+ nH2, L2
918
+ ).permute(
919
+ 1, 0
920
+ )
921
+ state_dict[k] = relative_position_bias_table_pretrained.repeat(
922
+ 2 * wd - 1, 1
923
+ )
924
+
925
+ msg = self.load_state_dict(state_dict, strict=False)
926
+ print(msg)
927
+ print(f"=> loaded successfully '{self.pretrained}'")
928
+ del checkpoint
929
+ torch.cuda.empty_cache()
930
+
931
+ def load_swin(self, load_path, strict=False):
932
+ print("loading swin lah")
933
+ from collections import OrderedDict
934
+
935
+ model_state_dict = self.state_dict()
936
+ state_dict = torch.load(load_path)["state_dict"]
937
+
938
+ clean_dict = OrderedDict()
939
+ for key, value in state_dict.items():
940
+ if "backbone" in key:
941
+ clean_key = key[9:]
942
+ clean_dict[clean_key] = value
943
+ if "relative_position_bias_table" in clean_key:
944
+ forked_key = clean_key.replace(
945
+ "relative_position_bias_table", "fragment_position_bias_table"
946
+ )
947
+ if forked_key in clean_dict:
948
+ print("load_swin_error?")
949
+ else:
950
+ clean_dict[forked_key] = value
951
+
952
+ # bicubic interpolate relative_position_bias_table if not match
953
+ relative_position_bias_table_keys = [
954
+ k for k in clean_dict.keys() if "relative_position_bias_table" in k
955
+ ]
956
+ for k in relative_position_bias_table_keys:
957
+ print(k)
958
+ relative_position_bias_table_pretrained = clean_dict[k]
959
+ relative_position_bias_table_current = model_state_dict[k]
960
+ L1, nH1 = relative_position_bias_table_pretrained.size()
961
+ L2, nH2 = relative_position_bias_table_current.size()
962
+ if isinstance(self.window_size, list):
963
+ i_layer = int(k.split(".")[1])
964
+ L2 = (2 * self.window_size[i_layer][1] - 1) * (
965
+ 2 * self.window_size[i_layer][2] - 1
966
+ )
967
+ wd = self.window_size[i_layer][0]
968
+ else:
969
+ L2 = (2 * self.window_size[1] - 1) * (2 * self.window_size[2] - 1)
970
+ wd = self.window_size[0]
971
+ if nH1 != nH2:
972
+ print(f"Error in loading {k}, passing")
973
+ else:
974
+ if L1 != L2:
975
+ S1 = int((L1 / 15) ** 0.5)
976
+ print(
977
+ relative_position_bias_table_pretrained.shape, 15, nH1, S1, S1
978
+ )
979
+ relative_position_bias_table_pretrained_resized = torch.nn.functional.interpolate(
980
+ relative_position_bias_table_pretrained.permute(1, 0)
981
+ .view(nH1, 15, S1, S1)
982
+ .transpose(0, 1),
983
+ size=(
984
+ 2 * self.window_size[i_layer][1] - 1,
985
+ 2 * self.window_size[i_layer][2] - 1,
986
+ ),
987
+ mode="bicubic",
988
+ )
989
+ relative_position_bias_table_pretrained = relative_position_bias_table_pretrained_resized.transpose(
990
+ 0, 1
991
+ ).view(
992
+ nH2, 15, L2
993
+ )
994
+ clean_dict[k] = relative_position_bias_table_pretrained # .repeat(2*wd-1,1)
995
+
996
+ ## Clean Mismatched Keys
997
+ for key, value in model_state_dict.items():
998
+ if key in clean_dict:
999
+ if value.shape != clean_dict[key].shape:
1000
+ print(key)
1001
+ clean_dict.pop(key)
1002
+
1003
+ self.load_state_dict(clean_dict, strict=strict)
1004
+
1005
+ def init_weights(self, pretrained=None):
1006
+ print(self.pretrained, self.pretrained2d)
1007
+ """Initialize the weights in backbone.
1008
+
1009
+ Args:
1010
+ pretrained (str, optional): Path to pre-trained weights.
1011
+ Defaults to None.
1012
+ """
1013
+
1014
+ def _init_weights(m):
1015
+ if isinstance(m, nn.Linear):
1016
+ trunc_normal_(m.weight, std=0.02)
1017
+ if isinstance(m, nn.Linear) and m.bias is not None:
1018
+ nn.init.constant_(m.bias, 0)
1019
+ elif isinstance(m, nn.LayerNorm):
1020
+ nn.init.constant_(m.bias, 0)
1021
+ nn.init.constant_(m.weight, 1.0)
1022
+
1023
+ if pretrained:
1024
+ self.pretrained = pretrained
1025
+ if isinstance(self.pretrained, str):
1026
+ self.apply(_init_weights)
1027
+ # logger = get_root_logger()
1028
+ # logger.info(f"load model from: {self.pretrained}")
1029
+
1030
+ if self.pretrained2d:
1031
+ # Inflate 2D model into 3D model.
1032
+ self.inflate_weights()
1033
+ else:
1034
+ # Directly load 3D model.
1035
+ self.load_swin(self.pretrained, strict=False) # , logger=logger)
1036
+ elif self.pretrained is None:
1037
+ self.apply(_init_weights)
1038
+ else:
1039
+ raise TypeError("pretrained must be a str or None")
1040
+
1041
+ def forward(self, x, multi=False, layer=-1, adaptive_window_size=False):
1042
+
1043
+ """Forward function."""
1044
+ if adaptive_window_size:
1045
+ resized_window_size = get_adaptive_window_size(
1046
+ self.window_size, x.shape[2:], self.base_x_size
1047
+ )
1048
+ else:
1049
+ resized_window_size = None
1050
+
1051
+ x = self.patch_embed(x)
1052
+
1053
+ x = self.pos_drop(x)
1054
+ feats = [x]
1055
+
1056
+ for l, mlayer in enumerate(self.layers):
1057
+ x = mlayer(x.contiguous(), resized_window_size)
1058
+ feats += [x]
1059
+
1060
+ x = rearrange(x, "n c d h w -> n d h w c")
1061
+ x = self.norm(x)
1062
+ x = rearrange(x, "n d h w c -> n c d h w")
1063
+
1064
+ if multi:
1065
+ shape = x.shape[2:]
1066
+ return torch.cat(
1067
+ [F.interpolate(xi, size=shape, mode="trilinear") for xi in feats[:-1]],
1068
+ 1,
1069
+ )
1070
+ elif layer > -1:
1071
+ print("something", len(feats))
1072
+ return feats[layer]
1073
+ else:
1074
+ return x
1075
+
1076
+ def train(self, mode=True):
1077
+ """Convert the model into training mode while keep layers freezed."""
1078
+ super(SwinTransformer3D, self).train(mode)
1079
+ self._freeze_stages()
1080
+
1081
+
1082
+ def swin_3d_tiny(**kwargs):
1083
+ ## Original Swin-3D Tiny with reduced windows
1084
+ return SwinTransformer3D(depths=[2, 2, 6, 2], frag_biases=[0, 0, 0, 0], **kwargs)
1085
+
1086
+
1087
+ def swin_3d_small(**kwargs):
1088
+ # Original Swin-3D Small with reduced windows
1089
+ return SwinTransformer3D(depths=[2, 2, 18, 2], frag_biases=[0, 0, 0, 0], **kwargs)
1090
+
1091
+
1092
+ class SwinTransformer2D(nn.Sequential):
1093
+ def __init__(self):
1094
+ ## Only backbone for Swin Transformer 2D
1095
+ from timm.models import swin_tiny_patch4_window7_224
1096
+
1097
+ super().__init__(*list(swin_tiny_patch4_window7_224().children())[:-2])
cover/models/xclip_backbone.py ADDED
@@ -0,0 +1,902 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import math
3
+ from collections import OrderedDict
4
+ from typing import Tuple, Union
5
+
6
+ import clip
7
+ import numpy as np
8
+ import torch
9
+ import torch.nn.functional as F
10
+ from einops import rearrange
11
+ from timm.models.layers import trunc_normal_
12
+ from torch import nn
13
+ from torch.utils.checkpoint import checkpoint_sequential
14
+
15
+
16
+ def drop_path(x, drop_prob: float = 0.0, training: bool = False):
17
+ """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
18
+ This is the same as the DropConnect impl I created for EfficientNet, etc networks, however,
19
+ the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
20
+ See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for
21
+ changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use
22
+ 'survival rate' as the argument.
23
+ """
24
+ if drop_prob == 0.0 or not training:
25
+ return x
26
+ keep_prob = 1 - drop_prob
27
+ shape = (x.shape[0],) + (1,) * (
28
+ x.ndim - 1
29
+ ) # work with diff dim tensors, not just 2D ConvNets
30
+ random_tensor = keep_prob + torch.rand(shape, dtype=x.dtype, device=x.device)
31
+ random_tensor.floor_() # binarize
32
+ output = x.div(keep_prob) * random_tensor
33
+ return output
34
+
35
+
36
+ class DropPath(nn.Module):
37
+ """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks)."""
38
+
39
+ def __init__(self, drop_prob=None):
40
+ super(DropPath, self).__init__()
41
+ self.drop_prob = drop_prob
42
+
43
+ def forward(self, x):
44
+ return drop_path(x, self.drop_prob, self.training)
45
+
46
+
47
+ class LayerNorm(nn.LayerNorm):
48
+ """Subclass torch's LayerNorm to handle fp16."""
49
+
50
+ def forward(self, x: torch.Tensor):
51
+ # orig_type = x.dtype
52
+ # ret = super().forward(x.type(torch.float32))
53
+ # return ret.type(orig_type)
54
+ return super().forward(x)
55
+
56
+
57
+ class QuickGELU(nn.Module):
58
+ def forward(self, x: torch.Tensor):
59
+ return x * torch.sigmoid(1.702 * x)
60
+
61
+
62
+ class ResidualAttentionBlock(nn.Module):
63
+ def __init__(
64
+ self, d_model: int, n_head: int, attn_mask: torch.Tensor = None,
65
+ ):
66
+ super().__init__()
67
+
68
+ self.attn = nn.MultiheadAttention(d_model, n_head,)
69
+ self.ln_1 = LayerNorm(d_model)
70
+
71
+ self.mlp = nn.Sequential(
72
+ OrderedDict(
73
+ [
74
+ ("c_fc", nn.Linear(d_model, d_model * 4)),
75
+ ("gelu", QuickGELU()),
76
+ ("c_proj", nn.Linear(d_model * 4, d_model)),
77
+ ]
78
+ )
79
+ )
80
+ self.ln_2 = LayerNorm(d_model)
81
+ self.attn_mask = attn_mask
82
+
83
+ def attention(self, x: torch.Tensor):
84
+ self.attn_mask = (
85
+ self.attn_mask.to(dtype=x.dtype, device=x.device)
86
+ if self.attn_mask is not None
87
+ else None
88
+ )
89
+ return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0]
90
+
91
+ def forward(self, x: torch.Tensor):
92
+ x = x + self.attention(self.ln_1(x))
93
+ x = x + self.mlp(self.ln_2(x))
94
+ return x
95
+
96
+
97
+ class Transformer(nn.Module):
98
+ def __init__(
99
+ self, width: int, layers: int, heads: int, attn_mask: torch.Tensor = None
100
+ ):
101
+ super().__init__()
102
+ self.width = width
103
+ self.layers = layers
104
+ self.resblocks = nn.Sequential(
105
+ *[ResidualAttentionBlock(width, heads, attn_mask) for _ in range(layers)]
106
+ )
107
+
108
+ def forward(self, x: torch.Tensor):
109
+ return self.resblocks(x)
110
+
111
+
112
+ class VisionTransformer(nn.Module):
113
+ def __init__(
114
+ self,
115
+ input_resolution: int,
116
+ patch_size: int,
117
+ width: int,
118
+ layers: int,
119
+ heads: int,
120
+ output_dim: int,
121
+ ):
122
+ super().__init__()
123
+ self.input_resolution = input_resolution
124
+ self.output_dim = output_dim
125
+ self.conv1 = nn.Conv2d(
126
+ in_channels=3,
127
+ out_channels=width,
128
+ kernel_size=patch_size,
129
+ stride=patch_size,
130
+ bias=False,
131
+ )
132
+
133
+ scale = width ** -0.5
134
+ self.class_embedding = nn.Parameter(scale * torch.randn(width))
135
+ self.positional_embedding = nn.Parameter(
136
+ scale * torch.randn((input_resolution // patch_size) ** 2 + 1, width)
137
+ )
138
+ self.ln_pre = LayerNorm(width)
139
+
140
+ self.transformer = Transformer(width, layers, heads)
141
+
142
+ self.ln_post = LayerNorm(width)
143
+ self.proj = nn.Parameter(scale * torch.randn(width, output_dim))
144
+
145
+ def forward(self, x: torch.Tensor):
146
+ x = self.conv1(x) # shape = [*, width, grid, grid]
147
+ x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2]
148
+ x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width]
149
+ x = torch.cat(
150
+ [
151
+ self.class_embedding.to(x.dtype)
152
+ + torch.zeros(
153
+ x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device
154
+ ),
155
+ x,
156
+ ],
157
+ dim=1,
158
+ ) # shape = [*, grid ** 2 + 1, width]
159
+ x = x + self.positional_embedding.to(x.dtype)
160
+ x = self.ln_pre(x)
161
+
162
+ x = x.permute(1, 0, 2) # NLD -> LND
163
+ x = self.transformer(x)
164
+ x = x.permute(1, 0, 2) # LND -> NLD
165
+
166
+ x = self.ln_post(x[:, 0, :])
167
+
168
+ if self.proj is not None:
169
+ x = x @ self.proj
170
+ return x
171
+
172
+
173
+ class CLIP(nn.Module):
174
+ def __init__(
175
+ self,
176
+ embed_dim: int,
177
+ # vision
178
+ image_resolution: int,
179
+ vision_layers: Union[Tuple[int, int, int, int], int],
180
+ vision_width: int,
181
+ vision_patch_size: int,
182
+ # text
183
+ context_length: int,
184
+ vocab_size: int,
185
+ transformer_width: int,
186
+ transformer_heads: int,
187
+ transformer_layers: int,
188
+ ):
189
+ super().__init__()
190
+
191
+ self.context_length = context_length
192
+
193
+ # vision_heads = vision_width // 64
194
+ # self.visual = VisionTransformer(
195
+ # input_resolution=image_resolution,
196
+ # patch_size=vision_patch_size,
197
+ # width=vision_width,
198
+ # layers=vision_layers,
199
+ # heads=vision_heads,
200
+ # output_dim=embed_dim
201
+ # )
202
+
203
+ # self.transformer = Transformer(
204
+ # width=transformer_width,
205
+ # layers=transformer_layers,
206
+ # heads=transformer_heads,
207
+ # attn_mask=self.build_attention_mask()
208
+ # )
209
+
210
+ # self.vocab_size = vocab_size
211
+ # self.token_embedding = nn.Embedding(vocab_size, transformer_width)
212
+ # self.positional_embedding = nn.Parameter(torch.empty(self.context_length, transformer_width))
213
+ # self.ln_final = LayerNorm(transformer_width)
214
+
215
+ # self.text_projection = nn.Parameter(torch.empty(transformer_width, embed_dim))
216
+ # self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07))
217
+
218
+ # self.initialize_parameters()
219
+
220
+ def initialize_parameters(self):
221
+ nn.init.normal_(self.token_embedding.weight, std=0.02)
222
+ nn.init.normal_(self.positional_embedding, std=0.01)
223
+
224
+ proj_std = (self.transformer.width ** -0.5) * (
225
+ (2 * self.transformer.layers) ** -0.5
226
+ )
227
+ attn_std = self.transformer.width ** -0.5
228
+ fc_std = (2 * self.transformer.width) ** -0.5
229
+ for block in self.transformer.resblocks:
230
+ nn.init.normal_(block.attn.in_proj_weight, std=attn_std)
231
+ nn.init.normal_(block.attn.out_proj.weight, std=proj_std)
232
+ nn.init.normal_(block.mlp.c_fc.weight, std=fc_std)
233
+ nn.init.normal_(block.mlp.c_proj.weight, std=proj_std)
234
+
235
+ if self.text_projection is not None:
236
+ nn.init.normal_(self.text_projection, std=self.transformer.width ** -0.5)
237
+
238
+ def build_attention_mask(self):
239
+ # lazily create causal attention mask, with full attention between the vision tokens
240
+ # pytorch uses additive attention mask; fill with -inf
241
+ mask = torch.empty(self.context_length, self.context_length)
242
+ mask.fill_(float("-inf"))
243
+ mask.triu_(1) # zero out the lower diagonal
244
+ return mask
245
+
246
+ @property
247
+ def dtype(self):
248
+ return self.visual.conv1.weight.dtype
249
+
250
+ def encode_image(self, image):
251
+ return self.visual(image.type(self.dtype))
252
+
253
+ def encode_text(self, text):
254
+ x = self.token_embedding(text).type(self.dtype) # [batch_size, n_ctx, d_model]
255
+
256
+ x = x + self.positional_embedding.type(self.dtype)
257
+ x = x.permute(1, 0, 2) # NLD -> LND
258
+ x = self.transformer(x)
259
+ x = x.permute(1, 0, 2) # LND -> NLD
260
+ x = self.ln_final(x).type(self.dtype)
261
+
262
+ # x.shape = [batch_size, n_ctx, transformer.width]
263
+ # take features from the eot embedding (eot_token is the highest number in each sequence)
264
+ x = x[torch.arange(x.shape[0]), text.argmax(dim=-1)] @ self.text_projection
265
+
266
+ return x
267
+
268
+ def forward(self, image, text):
269
+ image_features = self.encode_image(image)
270
+ text_features = self.encode_text(text)
271
+
272
+ # normalized features
273
+ image_features = image_features / image_features.norm(dim=1, keepdim=True)
274
+ text_features = text_features / text_features.norm(dim=1, keepdim=True)
275
+
276
+ # cosine similarity as logits
277
+ logit_scale = self.logit_scale.exp()
278
+ logits_per_image = logit_scale * image_features @ text_features.t()
279
+ logits_per_text = logits_per_image.t()
280
+
281
+ # shape = [global_batch_size, global_batch_size]
282
+ return logits_per_image, logits_per_text
283
+
284
+
285
+ class CrossFramelAttentionBlock(nn.Module):
286
+ def __init__(
287
+ self,
288
+ d_model: int,
289
+ n_head: int,
290
+ attn_mask: torch.Tensor = None,
291
+ droppath=0.0,
292
+ T=0,
293
+ ):
294
+ super().__init__()
295
+ self.T = T
296
+
297
+ self.message_fc = nn.Linear(d_model, d_model)
298
+ self.message_ln = LayerNorm(d_model)
299
+ self.message_attn = nn.MultiheadAttention(d_model, n_head,)
300
+
301
+ self.attn = nn.MultiheadAttention(d_model, n_head,)
302
+ self.ln_1 = LayerNorm(d_model)
303
+
304
+ self.drop_path = DropPath(droppath) if droppath > 0.0 else nn.Identity()
305
+ self.mlp = nn.Sequential(
306
+ OrderedDict(
307
+ [
308
+ ("c_fc", nn.Linear(d_model, d_model * 4)),
309
+ ("gelu", QuickGELU()),
310
+ ("c_proj", nn.Linear(d_model * 4, d_model)),
311
+ ]
312
+ )
313
+ )
314
+ self.ln_2 = LayerNorm(d_model)
315
+ self.attn_mask = attn_mask
316
+
317
+ def attention(self, x: torch.Tensor):
318
+ self.attn_mask = (
319
+ self.attn_mask.to(dtype=x.dtype, device=x.device)
320
+ if self.attn_mask is not None
321
+ else None
322
+ )
323
+ return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0]
324
+
325
+ def forward(self, x):
326
+ l, bt, d = x.size()
327
+ b = bt // self.T
328
+ x = x.view(l, b, self.T, d)
329
+
330
+ msg_token = self.message_fc(x[0, :, :, :])
331
+ msg_token = msg_token.view(b, self.T, 1, d)
332
+
333
+ msg_token = msg_token.permute(1, 2, 0, 3).view(self.T, b, d)
334
+ msg_token = msg_token + self.drop_path(
335
+ self.message_attn(
336
+ self.message_ln(msg_token),
337
+ self.message_ln(msg_token),
338
+ self.message_ln(msg_token),
339
+ need_weights=False,
340
+ )[0]
341
+ )
342
+ msg_token = msg_token.view(self.T, 1, b, d).permute(1, 2, 0, 3)
343
+
344
+ x = torch.cat([x, msg_token], dim=0)
345
+
346
+ x = x.view(l + 1, -1, d)
347
+ x = x + self.drop_path(self.attention(self.ln_1(x)))
348
+ x = x[:l, :, :]
349
+ x = x + self.drop_path(self.mlp(self.ln_2(x)))
350
+ return x
351
+
352
+
353
+ class Transformer(nn.Module):
354
+ def __init__(
355
+ self,
356
+ width: int,
357
+ layers: int,
358
+ heads: int,
359
+ attn_mask: torch.Tensor = None,
360
+ droppath=None,
361
+ use_checkpoint=False,
362
+ T=8,
363
+ ):
364
+ super().__init__()
365
+ self.use_checkpoint = use_checkpoint
366
+ if droppath is None:
367
+ droppath = [0.0 for i in range(layers)]
368
+ self.width = width
369
+ self.layers = layers
370
+
371
+ self.resblocks = nn.Sequential(
372
+ *[
373
+ CrossFramelAttentionBlock(width, heads, attn_mask, droppath[i], T)
374
+ for i in range(layers)
375
+ ]
376
+ )
377
+
378
+ def forward(self, x: torch.Tensor):
379
+ if not self.use_checkpoint:
380
+ return self.resblocks(x)
381
+ else:
382
+ return checkpoint_sequential(self.resblocks, 3, x)
383
+
384
+
385
+ class CrossFrameCommunicationTransformer(nn.Module):
386
+ def __init__(
387
+ self,
388
+ input_resolution: int,
389
+ patch_size: int,
390
+ width: int,
391
+ layers: int,
392
+ heads: int,
393
+ output_dim: int,
394
+ droppath=None,
395
+ T=8,
396
+ use_checkpoint=False,
397
+ ):
398
+ super().__init__()
399
+ self.input_resolution = input_resolution
400
+ self.output_dim = output_dim
401
+
402
+ self.conv1 = nn.Conv2d(
403
+ in_channels=3,
404
+ out_channels=width,
405
+ kernel_size=patch_size,
406
+ stride=patch_size,
407
+ bias=False,
408
+ )
409
+
410
+ scale = width ** -0.5
411
+ self.class_embedding = nn.Parameter(scale * torch.randn(width))
412
+ self.positional_embedding = nn.Parameter(
413
+ scale * torch.randn((input_resolution // patch_size) ** 2 + 1, width)
414
+ )
415
+ self.ln_pre = LayerNorm(width)
416
+
417
+ ## Attention Blocks
418
+ self.transformer = Transformer(
419
+ width, layers, heads, droppath=droppath, use_checkpoint=use_checkpoint, T=T,
420
+ )
421
+ self.ln_post = LayerNorm(width)
422
+ self.proj = nn.Parameter(scale * torch.randn(width, output_dim))
423
+
424
+ def init_weights(self):
425
+ self.apply(self._init_weights)
426
+
427
+ def _init_weights(self, m):
428
+ if isinstance(m, nn.Linear):
429
+ trunc_normal_(m.weight, std=0.02)
430
+ if isinstance(m, nn.Linear) and m.bias is not None:
431
+ nn.init.constant_(m.bias, 0)
432
+ elif isinstance(m, nn.LayerNorm):
433
+ nn.init.constant_(m.bias, 0)
434
+ nn.init.constant_(m.weight, 1.0)
435
+
436
+ def forward(self, x: torch.Tensor):
437
+ x = self.conv1(x) # shape = [*, width, grid, grid]
438
+ x = x.reshape(x.shape[0], x.shape[1], -1) # shape = [*, width, grid ** 2]
439
+ x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width]
440
+ x = torch.cat(
441
+ [
442
+ self.class_embedding.to(x.dtype)
443
+ + torch.zeros(
444
+ x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device
445
+ ),
446
+ x,
447
+ ],
448
+ dim=1,
449
+ ) # shape = [*, grid ** 2 + 1, width]
450
+ x = x + self.positional_embedding.to(x.dtype)
451
+
452
+ x = self.ln_pre(x)
453
+
454
+ x = x.permute(1, 0, 2)
455
+ x = self.transformer(x)
456
+ x = x.permute(1, 0, 2)
457
+
458
+ cls_x = self.ln_post(x[:, 0, :])
459
+
460
+ if self.proj is not None:
461
+ cls_x = cls_x @ self.proj
462
+
463
+ return cls_x, x[:, 1:, :]
464
+
465
+
466
+ class MulitHeadAttention(nn.Module):
467
+ def __init__(
468
+ self,
469
+ dim,
470
+ num_heads=8,
471
+ qkv_bias=False,
472
+ qk_scale=None,
473
+ attn_drop=0.0,
474
+ proj_drop=0.0,
475
+ ):
476
+ super().__init__()
477
+ self.num_heads = num_heads
478
+ head_dim = dim // num_heads
479
+
480
+ self.scale = qk_scale or head_dim ** -0.5
481
+
482
+ self.q_proj = nn.Linear(dim, dim, bias=qkv_bias)
483
+ self.k_proj = nn.Linear(dim, dim, bias=qkv_bias)
484
+ self.v_proj = nn.Linear(dim, dim, bias=qkv_bias)
485
+
486
+ self.attn_drop = nn.Dropout(attn_drop)
487
+ self.proj = nn.Linear(dim, dim)
488
+ self.proj_drop = nn.Dropout(proj_drop)
489
+
490
+ def forward(self, q, k, v):
491
+ B, N, C = q.shape
492
+ B, M, C = k.shape
493
+ q = (
494
+ self.q_proj(q)
495
+ .reshape(B, N, self.num_heads, C // self.num_heads)
496
+ .permute(0, 2, 1, 3)
497
+ )
498
+ k = (
499
+ self.k_proj(k)
500
+ .reshape(B, M, self.num_heads, C // self.num_heads)
501
+ .permute(0, 2, 1, 3)
502
+ )
503
+ v = (
504
+ self.v_proj(v)
505
+ .reshape(B, M, self.num_heads, C // self.num_heads)
506
+ .permute(0, 2, 1, 3)
507
+ )
508
+
509
+ attn = (q @ k.transpose(-2, -1)) * self.scale
510
+ attn = attn.softmax(dim=-1)
511
+ attn = self.attn_drop(attn)
512
+
513
+ x = (attn @ v).transpose(1, 2).reshape(B, N, C)
514
+ x = self.proj(x)
515
+ x = self.proj_drop(x)
516
+ return x
517
+
518
+
519
+ class PromptGeneratorLayer(nn.Module):
520
+ def __init__(
521
+ self, d_model, nhead, dropout=0.0,
522
+ ):
523
+ super().__init__()
524
+ self.cross_attn = MulitHeadAttention(d_model, nhead, proj_drop=dropout)
525
+
526
+ self.norm1 = nn.LayerNorm(d_model)
527
+ self.norm3 = nn.LayerNorm(d_model)
528
+
529
+ self.dropout = nn.Dropout(dropout)
530
+
531
+ self.mlp = nn.Sequential(
532
+ nn.Linear(d_model, d_model * 4),
533
+ QuickGELU(),
534
+ nn.Dropout(dropout),
535
+ nn.Linear(d_model * 4, d_model),
536
+ )
537
+
538
+ def forward(self, x, visual):
539
+ q = k = v = self.norm1(x)
540
+ x = x + self.cross_attn(q, visual, visual)
541
+ x = x + self.dropout(self.mlp(self.norm3(x)))
542
+ return x
543
+
544
+
545
+ class VideoSpecificPrompt(nn.Module):
546
+ def __init__(
547
+ self, layers=2, embed_dim=512, alpha=0.1,
548
+ ):
549
+ super().__init__()
550
+ self.norm = nn.LayerNorm(embed_dim)
551
+ self.decoder = nn.ModuleList(
552
+ [PromptGeneratorLayer(embed_dim, embed_dim // 64) for _ in range(layers)]
553
+ )
554
+ self.alpha = nn.Parameter(torch.ones(embed_dim) * alpha)
555
+ self.apply(self._init_weights)
556
+
557
+ def _init_weights(self, m):
558
+ if isinstance(m, nn.Linear):
559
+ trunc_normal_(m.weight, std=0.02)
560
+ if isinstance(m, nn.Linear) and m.bias is not None:
561
+ nn.init.constant_(m.bias, 0)
562
+ elif isinstance(m, nn.LayerNorm):
563
+ nn.init.constant_(m.bias, 0)
564
+ nn.init.constant_(m.weight, 1.0)
565
+
566
+ def forward(self, text, visual):
567
+ B, N, C = visual.shape
568
+ visual = self.norm(visual)
569
+ for layer in self.decoder:
570
+ text = layer(text, visual)
571
+
572
+
573
+ from collections import OrderedDict
574
+
575
+ from timm.models.layers import trunc_normal_
576
+
577
+
578
+ class ResidualAttentionBlock(nn.Module):
579
+ def __init__(self, d_model: int, n_head: int, attn_mask: torch.Tensor = None):
580
+ super().__init__()
581
+
582
+ self.attn = nn.MultiheadAttention(d_model, n_head)
583
+ self.ln_1 = nn.LayerNorm(d_model)
584
+ self.mlp = nn.Sequential(
585
+ OrderedDict(
586
+ [
587
+ ("c_fc", nn.Linear(d_model, d_model * 4)),
588
+ ("gelu", QuickGELU()),
589
+ ("c_proj", nn.Linear(d_model * 4, d_model)),
590
+ ]
591
+ )
592
+ )
593
+ self.ln_2 = nn.LayerNorm(d_model)
594
+ self.attn_mask = attn_mask
595
+
596
+ def attention(self, x: torch.Tensor):
597
+ self.attn_mask = (
598
+ self.attn_mask.to(dtype=x.dtype, device=x.device)
599
+ if self.attn_mask is not None
600
+ else None
601
+ )
602
+ return self.attn(x, x, x, need_weights=False, attn_mask=self.attn_mask)[0]
603
+
604
+ def forward(self, x: torch.Tensor):
605
+ x = x + self.attention(self.ln_1(x))
606
+ x = x + self.mlp(self.ln_2(x))
607
+ return x
608
+
609
+
610
+ class MultiframeIntegrationTransformer(nn.Module):
611
+ def __init__(
612
+ self, T, embed_dim=512, layers=1,
613
+ ):
614
+ super().__init__()
615
+ self.T = T
616
+ transformer_heads = embed_dim // 64
617
+ self.positional_embedding = nn.Parameter(torch.empty(1, T, embed_dim))
618
+ trunc_normal_(self.positional_embedding, std=0.02)
619
+ self.resblocks = nn.Sequential(
620
+ *[
621
+ ResidualAttentionBlock(d_model=embed_dim, n_head=transformer_heads)
622
+ for _ in range(layers)
623
+ ]
624
+ )
625
+
626
+ self.apply(self._init_weights)
627
+
628
+ def _init_weights(self, m):
629
+ if isinstance(m, (nn.Linear,)):
630
+ trunc_normal_(m.weight, std=0.02)
631
+ if m.bias is not None:
632
+ nn.init.zeros_(m.bias)
633
+ elif isinstance(m, nn.LayerNorm):
634
+ nn.init.zeros_(m.bias)
635
+ nn.init.ones_(m.weight)
636
+
637
+ def forward(self, x):
638
+ ori_x = x
639
+ x = x + self.positional_embedding
640
+ x = x.permute(1, 0, 2)
641
+ x = self.resblocks(x)
642
+ x = x.permute(1, 0, 2)
643
+ x = x.type(ori_x.dtype) + ori_x
644
+
645
+ return x.mean(dim=1, keepdim=False)
646
+
647
+
648
+ class XCLIP(CLIP):
649
+ def __init__(
650
+ self,
651
+ embed_dim: int,
652
+ # vision
653
+ image_resolution: int,
654
+ vision_layers: Union[Tuple[int, int, int, int], int],
655
+ vision_width: int,
656
+ vision_patch_size: int,
657
+ # text
658
+ context_length: int,
659
+ vocab_size: int,
660
+ transformer_width: int,
661
+ transformer_heads: int,
662
+ transformer_layers: int,
663
+ # video
664
+ T=8,
665
+ droppath=0.0,
666
+ mit_layers=1,
667
+ # prompt
668
+ prompts_alpha=1e-4,
669
+ prompts_layers=1,
670
+ # other
671
+ use_cache=True,
672
+ use_checkpoint=False,
673
+ ):
674
+ super().__init__(
675
+ embed_dim,
676
+ image_resolution,
677
+ vision_layers,
678
+ vision_width,
679
+ vision_patch_size,
680
+ context_length,
681
+ vocab_size,
682
+ transformer_width,
683
+ transformer_heads,
684
+ transformer_layers,
685
+ )
686
+
687
+ self.prompts_generator = VideoSpecificPrompt(
688
+ layers=prompts_layers, embed_dim=embed_dim, alpha=prompts_alpha,
689
+ )
690
+ self.use_cache = use_cache
691
+ self.mit = MultiframeIntegrationTransformer(
692
+ T=T, embed_dim=embed_dim, layers=mit_layers,
693
+ )
694
+
695
+ dpr = (
696
+ [x.item() for x in torch.linspace(0, droppath, vision_layers)]
697
+ if droppath > 0.0
698
+ else None
699
+ )
700
+
701
+ vision_heads = vision_width // 64
702
+ self.visual = CrossFrameCommunicationTransformer(
703
+ input_resolution=image_resolution,
704
+ patch_size=vision_patch_size,
705
+ width=vision_width,
706
+ layers=vision_layers,
707
+ heads=vision_heads,
708
+ output_dim=embed_dim,
709
+ droppath=dpr,
710
+ T=T,
711
+ use_checkpoint=use_checkpoint,
712
+ )
713
+
714
+ self.transformer = Transformer(
715
+ width=transformer_width,
716
+ layers=transformer_layers,
717
+ heads=transformer_heads,
718
+ attn_mask=self.build_attention_mask(),
719
+ )
720
+ self.vocab_size = vocab_size
721
+ self.token_embedding = nn.Embedding(vocab_size, transformer_width)
722
+ self.positional_embedding = nn.Parameter(
723
+ torch.empty(self.context_length, transformer_width)
724
+ )
725
+ self.ln_final = LayerNorm(transformer_width)
726
+ self.text_projection = nn.Parameter(torch.empty(transformer_width, embed_dim))
727
+ self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07))
728
+
729
+ self.cache_text_features = None
730
+ self.prompts_visual_ln = LayerNorm(vision_width)
731
+ self.prompts_visual_proj = nn.Parameter(torch.randn(vision_width, embed_dim))
732
+
733
+ self.initialize_parameters()
734
+
735
+ @torch.jit.ignore
736
+ def no_weight_decay_keywords(self):
737
+ return {"positional_embedding"}
738
+
739
+ def encode_image(self, image):
740
+ return self.visual(image)
741
+
742
+ def encode_text(self, text):
743
+ x = self.token_embedding(text)
744
+ eos_indx = text.argmax(dim=-1)
745
+ K, N1, C = x.shape
746
+
747
+ x = x + self.positional_embedding
748
+ x = x.permute(1, 0, 2) # NLD -> LND
749
+ x = self.transformer(x)
750
+ x = x.permute(1, 0, 2) # LND -> NLD
751
+ x = self.ln_final(x)
752
+ # x.shape = [batch_size, n_ctx, transformer.width]
753
+ # take features from the eot embedding (eot_token is the highest number in each sequence)
754
+ x = x[torch.arange(x.shape[0]), eos_indx] @ self.text_projection
755
+ x = x.reshape(K, -1)
756
+ return x
757
+
758
+ def encode_video(self, image):
759
+ b, t, c, h, w = image.size()
760
+ image = image.reshape(-1, c, h, w)
761
+
762
+ cls_features, img_features = self.encode_image(image)
763
+ img_features = self.prompts_visual_ln(img_features)
764
+ img_features = img_features @ self.prompts_visual_proj
765
+
766
+ cls_features = cls_features.view(b, t, -1)
767
+ img_features = img_features.view(b, t, -1, cls_features.shape[-1])
768
+
769
+ video_features = self.mit(cls_features)
770
+
771
+ return video_features, img_features
772
+
773
+ def forward(self, image, **kwargs):
774
+ image = rearrange(image, "b c t h w -> b t c h w")
775
+ video_features, _ = self.encode_video(image)
776
+ return video_features.reshape(*video_features.shape, 1, 1, 1)
777
+
778
+ def cache_text(self, text):
779
+ self.eval()
780
+ with torch.no_grad():
781
+ if self.cache_text_features is None:
782
+ self.cache_text_features = self.encode_text(text)
783
+ self.train()
784
+ return self.cache_text_features
785
+
786
+ def forward_original(self, image, text):
787
+ b = image.shape[0]
788
+ video_features, img_features = self.encode_video(image)
789
+ img_features = img_features.mean(dim=1, keepdim=False)
790
+
791
+ if self.use_cache:
792
+ text_features = self.cache_text(text)
793
+ else:
794
+ text_features = self.encode_text(text)
795
+
796
+ text_features = text_features.unsqueeze(0).expand(b, -1, -1)
797
+ text_features = text_features + self.prompts_generator(
798
+ text_features, img_features
799
+ )
800
+
801
+ video_features = video_features / video_features.norm(dim=-1, keepdim=True)
802
+ text_features = text_features / text_features.norm(dim=-1, keepdim=True)
803
+ logit_scale = self.logit_scale.exp()
804
+ logits = torch.einsum("bd,bkd->bk", video_features, logit_scale * text_features)
805
+
806
+ return logits
807
+
808
+
809
+ def build_x_clip_model(
810
+ pretrained_path="./pretrained_weights/k400_32_8.pth",
811
+ droppath=0.0,
812
+ use_checkpoint=False,
813
+ logger=None,
814
+ prompts_alpha=1e-1,
815
+ prompts_layers=2,
816
+ use_cache=True,
817
+ mit_layers=4,
818
+ **kwargs,
819
+ ):
820
+ state_dict = torch.load(pretrained_path, map_location="cpu")["model"]
821
+ T = int(pretrained_path.split("_")[-1].split(".")[0])
822
+ print(T)
823
+ vit = "visual.proj" in state_dict
824
+
825
+ if vit:
826
+ vision_width = state_dict["visual.conv1.weight"].shape[0]
827
+ vision_layers = len(
828
+ [
829
+ k
830
+ for k in state_dict.keys()
831
+ if k.startswith("visual.") and k.endswith(".attn.in_proj_weight")
832
+ ]
833
+ )
834
+ vision_patch_size = state_dict["visual.conv1.weight"].shape[-1]
835
+ grid_size = round(
836
+ (state_dict["visual.positional_embedding"].shape[0] - 1) ** 0.5
837
+ )
838
+ image_resolution = vision_patch_size * grid_size
839
+ else:
840
+ counts: list = [
841
+ len(
842
+ set(
843
+ k.split(".")[2]
844
+ for k in state_dict
845
+ if k.startswith(f"visual.layer{b}")
846
+ )
847
+ )
848
+ for b in [1, 2, 3, 4]
849
+ ]
850
+ vision_layers = tuple(counts)
851
+
852
+ vision_width = state_dict["visual.layer1.0.conv1.weight"].shape[0]
853
+ output_width = round(
854
+ (state_dict["visual.attnpool.positional_embedding"].shape[0] - 1) ** 0.5
855
+ )
856
+ vision_patch_size = None
857
+ assert (
858
+ output_width ** 2 + 1
859
+ == state_dict["visual.attnpool.positional_embedding"].shape[0]
860
+ )
861
+ image_resolution = output_width * 32
862
+
863
+ embed_dim = state_dict["text_projection"].shape[1]
864
+ context_length = state_dict["positional_embedding"].shape[0]
865
+ vocab_size = state_dict["token_embedding.weight"].shape[0]
866
+ transformer_width = state_dict["ln_final.weight"].shape[0]
867
+ transformer_heads = transformer_width // 64
868
+ transformer_layers = len(
869
+ set(
870
+ k.split(".")[2]
871
+ for k in state_dict
872
+ if k.startswith(f"transformer.resblocks")
873
+ )
874
+ )
875
+
876
+ model = XCLIP(
877
+ embed_dim,
878
+ image_resolution,
879
+ vision_layers,
880
+ vision_width,
881
+ vision_patch_size,
882
+ context_length,
883
+ vocab_size,
884
+ transformer_width,
885
+ transformer_heads,
886
+ transformer_layers,
887
+ T=T,
888
+ droppath=droppath,
889
+ mit_layers=mit_layers,
890
+ prompts_alpha=prompts_alpha,
891
+ prompts_layers=prompts_layers,
892
+ use_checkpoint=use_checkpoint,
893
+ use_cache=use_cache,
894
+ )
895
+
896
+ for key in ["input_resolution", "context_length", "vocab_size"]:
897
+ if key in state_dict:
898
+ del state_dict[key]
899
+
900
+ msg = model.load_state_dict(state_dict, strict=False)
901
+
902
+ return model.eval()
cover/version.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "1.0.0"
2
+
3
+
4
+ def parse_version_info(version_str):
5
+ version_info = []
6
+ for x in version_str.split("."):
7
+ if x.isdigit():
8
+ version_info.append(int(x))
9
+ elif x.find("rc") != -1:
10
+ patch_version = x.split("rc")
11
+ version_info.append(int(patch_version[0]))
12
+ version_info.append(f"rc{patch_version[1]}")
13
+ return tuple(version_info)
14
+
15
+
16
+ version_info = parse_version_info(__version__)
demo/video_1.mp4 ADDED
Binary file (353 kB). View file
 
demo/video_2.mp4 ADDED
Binary file (218 kB). View file
 
evaluate_a_set_of_videos.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ import argparse
4
+ import os
5
+ import pickle as pkl
6
+
7
+ import decord
8
+ import numpy as np
9
+ import yaml
10
+ from tqdm import tqdm
11
+
12
+ from cover.datasets import (
13
+ UnifiedFrameSampler,
14
+ ViewDecompositionDataset,
15
+ spatial_temporal_view_decomposition,
16
+ )
17
+ from cover.models import COVER
18
+
19
+ mean, std = (
20
+ torch.FloatTensor([123.675, 116.28, 103.53]),
21
+ torch.FloatTensor([58.395, 57.12, 57.375]),
22
+ )
23
+
24
+ mean_clip, std_clip = (
25
+ torch.FloatTensor([122.77, 116.75, 104.09]),
26
+ torch.FloatTensor([68.50, 66.63, 70.32])
27
+ )
28
+
29
+ def fuse_results(results: list):
30
+ x = (results[0] + results[1] + results[2])
31
+ return {
32
+ "semantic" : results[0],
33
+ "technical": results[1],
34
+ "aesthetic": results[2],
35
+ "overall" : x,
36
+ }
37
+
38
+ def parse_args():
39
+ parser = argparse.ArgumentParser()
40
+ parser.add_argument("-o", "--opt" , type=str, default="./cover.yml", help="the option file")
41
+ parser.add_argument('-d', "--device", type=str, default="cuda" , help='CUDA device id')
42
+ parser.add_argument("-i", "--input_video_dir", type=str, default="./demo", help="the input video dir")
43
+ parser.add_argument( "--output", type=str, default="./demo.csv" , help='output file to store predict mos value')
44
+ args = parser.parse_args()
45
+ return args
46
+
47
+
48
+ if __name__ == "__main__":
49
+
50
+ args = parse_args()
51
+
52
+ with open(args.opt, "r") as f:
53
+ opt = yaml.safe_load(f)
54
+
55
+ ### Load COVER
56
+ evaluator = COVER(**opt["model"]["args"]).to(args.device)
57
+ state_dict = torch.load(opt["test_load_path"], map_location=args.device)
58
+
59
+ # set strict=False here to avoid error of missing
60
+ # weight of prompt_learner in clip-iqa+, cross-gate
61
+ evaluator.load_state_dict(state_dict['state_dict'], strict=False)
62
+
63
+
64
+ video_paths = []
65
+ all_results = {}
66
+
67
+ with open(args.output, "w") as w:
68
+ w.write(f"path, semantic score, technical score, aesthetic score, overall/final score\n")
69
+
70
+ dopt = opt["data"]["val-l1080p"]["args"]
71
+
72
+ dopt["anno_file"] = None
73
+ dopt["data_prefix"] = args.input_video_dir
74
+
75
+ dataset = ViewDecompositionDataset(dopt)
76
+
77
+ dataloader = torch.utils.data.DataLoader(
78
+ dataset, batch_size=1, num_workers=opt["num_workers"], pin_memory=True,
79
+ )
80
+
81
+ sample_types = ["semantic", "technical", "aesthetic"]
82
+
83
+ for i, data in enumerate(tqdm(dataloader, desc="Testing")):
84
+ if len(data.keys()) == 1:
85
+ ## failed data
86
+ continue
87
+
88
+ video = {}
89
+ for key in sample_types:
90
+ if key in data:
91
+ video[key] = data[key].to(args.device)
92
+ b, c, t, h, w = video[key].shape
93
+ video[key] = (
94
+ video[key]
95
+ .reshape(
96
+ b, c, data["num_clips"][key], t // data["num_clips"][key], h, w
97
+ )
98
+ .permute(0, 2, 1, 3, 4, 5)
99
+ .reshape(
100
+ b * data["num_clips"][key], c, t // data["num_clips"][key], h, w
101
+ )
102
+ )
103
+
104
+ with torch.no_grad():
105
+ results = evaluator(video, reduce_scores=False)
106
+ results = [np.mean(l.cpu().numpy()) for l in results]
107
+
108
+ rescaled_results = fuse_results(results)
109
+ # all_results[data["name"][0]] = rescaled_results
110
+
111
+ # with open(
112
+ # f"cover_predictions/val-custom_{args.input_video_dir.split('/')[-1]}.pkl", "wb"
113
+ # ) as wf:
114
+ # pkl.dump(all_results, wf)
115
+
116
+ with open(args.output, "a") as w:
117
+ w.write(
118
+ f'{data["name"][0].split("/")[-1]},{rescaled_results["semantic"]:4f},{rescaled_results["technical"]:4f},{rescaled_results["aesthetic"]:4f},{rescaled_results["overall"]:4f}\n'
119
+ )
evaluate_one_dataset.py ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os
3
+ import csv
4
+
5
+ import torch
6
+
7
+ import pandas as pd
8
+ import numpy as np
9
+ import pickle as pkl
10
+ import decord
11
+ import yaml
12
+
13
+ from scipy import stats
14
+ from sklearn.metrics import mean_squared_error
15
+ from scipy.optimize import curve_fit
16
+ from cover.datasets import UnifiedFrameSampler, spatial_temporal_view_decomposition
17
+ from cover.models import COVER
18
+
19
+ # use case
20
+ # python evaluate_on_ytugc.py -o cover.yml -d cuda:3 --output result.csv -uh 0
21
+
22
+ def save_to_csv(video_name, pre_smos, pre_tmos, pre_amos, pre_overall, filename):
23
+ combined_data = list(zip(video_name, pre_smos, pre_tmos, pre_amos, pre_overall))
24
+
25
+ with open(filename, 'w', newline='') as csvfile:
26
+ writer = csv.writer(csvfile)
27
+ writer.writerow(['Video', 'semantic score', 'technical score', 'aesthetic score', 'overall/final score'])
28
+ writer.writerows(combined_data)
29
+
30
+ mean_cover, std_cover = (
31
+ torch.FloatTensor([123.675, 116.28, 103.53]),
32
+ torch.FloatTensor([58.395, 57.12, 57.375]),
33
+ )
34
+
35
+ mean_clip, std_clip = (
36
+ torch.FloatTensor([122.77, 116.75, 104.09]),
37
+ torch.FloatTensor([68.50, 66.63, 70.32])
38
+ )
39
+
40
+ def fuse_results(results: list):
41
+ x = (results[0] + results[1] + results[2])
42
+ return {
43
+ "semantic" : results[0],
44
+ "technical": results[1],
45
+ "aesthetic": results[2],
46
+ "overall" : x,
47
+ }
48
+
49
+
50
+ def gaussian_rescale(pr):
51
+ # The results should follow N(0,1)
52
+ pr = (pr - np.mean(pr)) / np.std(pr)
53
+ return pr
54
+
55
+
56
+ def uniform_rescale(pr):
57
+ # The result scores should follow U(0,1)
58
+ return np.arange(len(pr))[np.argsort(pr).argsort()] / len(pr)
59
+
60
+
61
+ def parse_args():
62
+ parser = argparse.ArgumentParser()
63
+ parser.add_argument("-o", "--opt" , type=str, default="./cover.yml", help="the option file")
64
+ parser.add_argument('-d', "--device", type=str, default="cuda:0" , help='CUDA device id')
65
+ parser.add_argument("-t", "--target_set", type=str, default="val-ytugc", help="target_set")
66
+ parser.add_argument( "--output", type=str, default="ytugc.csv" , help='output file to store predict mos value')
67
+ args = parser.parse_args()
68
+ return args
69
+
70
+
71
+ def logistic_func(X, bayta1, bayta2, bayta3, bayta4):
72
+ # 4-parameter logistic function
73
+ logisticPart = 1 + np.exp(np.negative(np.divide(X - bayta3, np.abs(bayta4))))
74
+ yhat = bayta2 + np.divide(bayta1 - bayta2, logisticPart)
75
+ return yhat
76
+
77
+
78
+ if __name__ == '__main__':
79
+ args = parse_args()
80
+
81
+ with open(args.opt, "r") as f:
82
+ opt = yaml.safe_load(f)
83
+
84
+ ### Load COVER
85
+ evaluator = COVER(**opt["model"]["args"]).to(args.device)
86
+ state_dict = torch.load(opt["test_load_path"], map_location=args.device)
87
+
88
+ # set strict=False here to avoid error of missing
89
+ # weight of prompt_learner in clip-iqa+, cross-gate
90
+ evaluator.load_state_dict(state_dict['state_dict'], strict=False)
91
+
92
+ dopt = opt["data"][args.target_set]["args"]
93
+ temporal_samplers = {}
94
+ for stype, sopt in dopt["sample_types"].items():
95
+ temporal_samplers[stype] = UnifiedFrameSampler(
96
+ sopt["clip_len"] // sopt["t_frag"],
97
+ sopt["t_frag"],
98
+ sopt["frame_interval"],
99
+ sopt["num_clips"],
100
+ )
101
+
102
+ if args.target_set == 'val-livevqc':
103
+ videos_dir = './datasets/LIVE_VQC/Video/'
104
+ datainfo = './datasets/LIVE_VQC/metainfo/LIVE_VQC_metadata.csv'
105
+ df = pd.read_csv(datainfo)
106
+ files = df['File'].tolist()
107
+ mos = df['MOS'].tolist()
108
+ elif args.target_set == 'val-kv1k':
109
+ videos_dir = './datasets/KoNViD/KoNViD_1k_videos/'
110
+ datainfo = './datasets/KoNViD/metainfo/KoNVid_metadata.csv'
111
+ df = pd.read_csv(datainfo)
112
+ files = df['Filename'].tolist()
113
+ files = [str(file) + '.mp4' for file in files]
114
+ mos = df['MOS'].tolist()
115
+ elif args.target_set == 'val-ytugc':
116
+ videos_dir = './datasets/YouTubeUGC/'
117
+ datainfo = './datasets/YouTubeUGC/../meta_info/Youtube-UGC_metadata.csv'
118
+ df = pd.read_csv(datainfo)
119
+ files = df['filename'].tolist()
120
+ mos = df['MOSFull'].tolist()
121
+ files = [str(file) + '_crf_10_ss_00_t_20.0.mp4' for file in files]
122
+ else:
123
+ print("unsupported video dataset for evaluation")
124
+ assert(0)
125
+
126
+ print(len(files))
127
+
128
+ pure_name_list = []
129
+ pre_overall = np.zeros(len(mos))
130
+ pre_smos = np.zeros(len(mos))
131
+ pre_tmos = np.zeros(len(mos))
132
+ pre_amos = np.zeros(len(mos))
133
+ gt_mos = np.array(mos)
134
+ count = 0
135
+
136
+ for vi in range(len(mos)):
137
+ video = files[vi]
138
+ pure_name = os.path.splitext(video)[0]
139
+ video_path = os.path.join(videos_dir, video)
140
+
141
+ views, _ = spatial_temporal_view_decomposition(
142
+ video_path, dopt["sample_types"], temporal_samplers
143
+ )
144
+
145
+ for k, v in views.items():
146
+ num_clips = dopt["sample_types"][k].get("num_clips", 1)
147
+ if k == 'technical' or k == 'aesthetic':
148
+ views[k] = (
149
+ ((v.permute(1, 2, 3, 0) - mean_cover) / std_cover)
150
+ .permute(3, 0, 1, 2)
151
+ .reshape(v.shape[0], num_clips, -1, *v.shape[2:])
152
+ .transpose(0, 1)
153
+ .to(args.device)
154
+ )
155
+ elif k == 'semantic':
156
+ views[k] = (
157
+ ((v.permute(1, 2, 3, 0) - mean_clip) / std_clip)
158
+ .permute(3, 0, 1, 2)
159
+ .reshape(v.shape[0], num_clips, -1, *v.shape[2:])
160
+ .transpose(0, 1)
161
+ .to(args.device)
162
+ )
163
+
164
+ results = [r.mean().item() for r in evaluator(views)]
165
+
166
+
167
+ pre_overall[count] = fuse_results(results)['overall']
168
+ pre_smos[count] = results[0]
169
+ pre_tmos[count] = results[1]
170
+ pre_amos[count] = results[2]
171
+ pure_name_list.append(pure_name)
172
+ print("Process ", video, ", predicted quality score is ", pre_overall[count])
173
+ count += 1
174
+
175
+
176
+ SROCC = stats.spearmanr(pre_overall, gt_mos)[0]
177
+ KROCC = stats.stats.kendalltau(pre_overall, gt_mos)[0]
178
+
179
+ # logistic regression btw y_pred & y
180
+ beta_init = [np.max(gt_mos), np.min(gt_mos), np.mean(pre_overall), 0.5]
181
+ popt, _ = curve_fit(logistic_func, pre_overall, gt_mos, p0=beta_init, maxfev=int(1e8))
182
+ pre_overall_logistic = logistic_func(pre_overall, *popt)
183
+
184
+ PLCC = stats.pearsonr(gt_mos, pre_overall_logistic)[0]
185
+ RMSE = np.sqrt(mean_squared_error(gt_mos, pre_overall_logistic))
186
+
187
+ print("Test results: SROCC={:.4f}, KROCC={:.4f}, PLCC={:.4f}, RMSE={:.4f}"
188
+ .format(SROCC, KROCC, PLCC, RMSE))
189
+
190
+ save_to_csv(pure_name_list, pre_smos, pre_tmos, pre_amos, pre_overall, args.output)
evaluate_one_video.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ import argparse
4
+ import pickle as pkl
5
+
6
+ import decord
7
+ from decord import VideoReader
8
+ import numpy as np
9
+ import yaml
10
+
11
+ from cover.datasets import UnifiedFrameSampler, spatial_temporal_view_decomposition
12
+ from cover.models import COVER
13
+
14
+ mean, std = (
15
+ torch.FloatTensor([123.675, 116.28, 103.53]),
16
+ torch.FloatTensor([58.395, 57.12, 57.375]),
17
+ )
18
+
19
+ mean_clip, std_clip = (
20
+ torch.FloatTensor([122.77, 116.75, 104.09]),
21
+ torch.FloatTensor([68.50, 66.63, 70.32])
22
+ )
23
+
24
+ def fuse_results(results: list):
25
+ x = (results[0] + results[1] + results[2])
26
+ return {
27
+ "semantic" : results[0],
28
+ "technical": results[1],
29
+ "aesthetic": results[2],
30
+ "overall" : x,
31
+ }
32
+
33
+
34
+ def parse_args():
35
+ parser = argparse.ArgumentParser()
36
+ parser.add_argument("-o", "--opt" , type=str, default="./cover.yml", help="the option file")
37
+ parser.add_argument("--video_path", type=str, default="./demo/video_1.mp4" , help='output file to store predict mos value')
38
+ args = parser.parse_args()
39
+ return args
40
+
41
+ if __name__ == "__main__":
42
+
43
+ args = parse_args()
44
+
45
+ """
46
+ BASIC SETTINGS
47
+ """
48
+ torch.cuda.current_device()
49
+ torch.cuda.empty_cache()
50
+ torch.backends.cudnn.benchmark = True
51
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
52
+ with open(args.opt, "r") as f:
53
+ opt = yaml.safe_load(f)
54
+
55
+ dopt = opt["data"]["val-ytugc"]["args"]
56
+ temporal_samplers = {}
57
+ for stype, sopt in dopt["sample_types"].items():
58
+ temporal_samplers[stype] = UnifiedFrameSampler(
59
+ sopt["clip_len"] // sopt["t_frag"],
60
+ sopt["t_frag"],
61
+ sopt["frame_interval"],
62
+ sopt["num_clips"],
63
+ )
64
+
65
+ """
66
+ LOAD MODEL
67
+ """
68
+ evaluator = COVER(**opt["model"]["args"]).to(device)
69
+ state_dict = torch.load(opt["test_load_path"], map_location=device)
70
+
71
+ # set strict=False here to avoid error of missing
72
+ # weight of prompt_learner in clip-iqa+, cross-gate
73
+ evaluator.load_state_dict(state_dict['state_dict'], strict=False)
74
+
75
+ """
76
+ TESTING
77
+ """
78
+ views, _ = spatial_temporal_view_decomposition(
79
+ args.video_path, dopt["sample_types"], temporal_samplers
80
+ )
81
+
82
+ for k, v in views.items():
83
+ num_clips = dopt["sample_types"][k].get("num_clips", 1)
84
+ if k == 'technical' or k == 'aesthetic':
85
+ views[k] = (
86
+ ((v.permute(1, 2, 3, 0) - mean) / std)
87
+ .permute(3, 0, 1, 2)
88
+ .reshape(v.shape[0], num_clips, -1, *v.shape[2:])
89
+ .transpose(0, 1)
90
+ .to(device)
91
+ )
92
+ elif k == 'semantic':
93
+ views[k] = (
94
+ ((v.permute(1, 2, 3, 0) - mean_clip) / std_clip)
95
+ .permute(3, 0, 1, 2)
96
+ .reshape(v.shape[0], num_clips, -1, *v.shape[2:])
97
+ .transpose(0, 1)
98
+ .to(device)
99
+ )
100
+
101
+ results = [r.mean().item() for r in evaluator(views)]
102
+ pred_score = fuse_results(results)
103
+ print(f"path, semantic score, technical score, aesthetic score, overall/final score")
104
+ print(f'{args.video_path.split("/")[-1]},{pred_score["semantic"]:4f},{pred_score["technical"]:4f},{pred_score["aesthetic"]:4f},{pred_score["overall"]:4f}')
105
+
examplar_data_labels/CVD2014/labels.txt ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Test3/City/Test03_City_D12.avi, 14.948471, 29.099967117037156, 20.65
2
+ Test5/City/Test05_City_D05.avi, 15.0, 24.0, 42.92
3
+ Test3/Talking_head/Test03_Talking_Head_D10.avi, 16.4, 25.0, 53.33
4
+ Test5/Television/Test05_Television_D03.avi, 21.04, 25.0, 63.93
5
+ Test5/City/Test05_City_D04.avi, 15.014389, 23.977, 63.85
6
+ Test4/City/Test04_City_D01.avi, 15.015015, 29.97000002997, 61.85
7
+ Test3/Talking_head/Test03_Talking_Head_D08.avi, 16.308336, 24.649970420035498, 46.1
8
+ Test2/City/Test02_City_D05.avi, 15.755979, 29.06833092550659, 45.91
9
+ Test4/Talking_head/Test04_Talking_Head_D14.avi, 14.983296, 23.96001552609006, 52.76
10
+ Test3/Talking_head/Test03_Talking_Head_D09.avi, 16.36177, 29.520033770918634, 37.85
11
+ Test4/Newspaper/Test04_Newspaper_D08.avi, 14.98327, 14.950007176003444, 29.73
12
+ Test1/Traffic/Test01_Traffic_D01.avi, 23.166435, 30.00030000300003, 67.74
13
+ Test6/Talking_head/Test06_Talking_head_D12.avi, 15.011408999999999, 24.981, 57.39
14
+ Test3/City/Test03_City_D02.avi, 15.048382, 29.97000002997, 16.48
15
+ Test5/Television/Test05_Television_D11.avi, 20.488353, 24.941, 23.3
16
+ Test3/City/Test03_City_D13.avi, 14.976324, 29.58002283577763, 32.65
17
+ Test2/Talking_head/Test02_Talking_Head_D07.avi, 16.832148, 29.526832509042592, 36.58
18
+ Test1/Talking_head/Test01_Talking_Head_D06.avi, 16.638224, 27.52697643690817, 38.22
19
+ Test5/Television/Test05_Television_D06.avi, 20.615546, 17.123, 33.32
20
+ Test3/Newspaper/Test03_Newspaper_D05.avi, 15.015015, 29.97000002997, 40.1
21
+ Test5/Talking_head/Test05_Talking_Head_D05.avi, 15.0, 24.0, 69.78
22
+ Test5/City/Test05_City_D10.avi, 14.999302, 28.668, 65.29
23
+ Test6/Talking_head/Test06_Talking_head_D01.avi, 15.0, 30.8, 14.9
24
+ Test3/Talking_head/Test03_Talking_Head_D01.avi, 15.549999999999999, 20.0, 39.03
25
+ Test2/Traffic/Test02_Traffic_D03.avi, 25.726547, 14.96508645330444, 21.78
26
+ Test3/Talking_head/Test03_Talking_Head_D12.avi, 16.288678, 29.099967117037156, 20.86
27
+ Test1/Talking_head/Test01_Talking_Head_D03.avi, 17.260519, 14.715663941325705, 31.98
28
+ Test3/City/Test03_City_D11.avi, 15.204692999999999, 22.229978014551744, 15.47
29
+ Test1/Talking_head/Test01_Talking_Head_D09.avi, 16.12, 25.0, 64.59
30
+ Test6/City/Test06_City_D01.avi, 15.0, 30.8, 17.23
31
+ Test5/Talking_head/Test05_Talking_Head_D07.avi, 14.979391, 19.894, 73.71
32
+ Test3/Newspaper/Test03_Newspaper_D06.avi, 14.966444, 14.89999865900012, 31.2
33
+ Test1/Traffic/Test01_Traffic_D08.avi, 24.119999999999997, 25.0, 63.4
34
+ Test2/Traffic/Test02_Traffic_D07.avi, 25.966395, 29.461155466517397, 61.43
35
+ Test4/Newspaper/Test04_Newspaper_D07.avi, 15.023477999999999, 14.909995810291177, 32.42
36
+ Test3/Talking_head/Test03_Talking_Head_D11.avi, 16.115344, 23.5800116956858, 38.37
37
+ Test4/Talking_head/Test04_Talking_Head_D13.avi, 13.04638, 29.97000002997, 59.4
38
+ Test1/Traffic/Test01_Traffic_D07.avi, 22.068345, 29.453953162323682, 65.53
39
+ Test3/Talking_head/Test03_Talking_Head_D06.avi, 15.234900999999999, 14.89999865900012, 28.2
40
+ Test5/Talking_head/Test05_Talking_Head_D03.avi, 15.0, 25.0, 83.25
41
+ Test2/Talking_head/Test02_Talking_Head_D05.avi, 17.334484, 28.728862739239606, 46.3
42
+ Test6/Television/Test06_Television_D11.avi, 22.015213, 29.843, 23.8
43
+ Test4/Newspaper/Test04_Newspaper_D09.avi, 15.013294, 30.040043377822638, 87.46
44
+ Test4/City/Test04_City_D02.avi, 15.0, 25.0, 70.56
45
+ Test6/City/Test06_City_D07.avi, 15.009443999999998, 29.648, 58.78
46
+ Test1/Traffic/Test01_Traffic_D03.avi, 26.332442, 14.848603637313946, 27.76
47
+ Test6/Talking_head/Test06_Talking_head_D10.avi, 14.997193, 24.938, 23.31
48
+ Test5/City/Test05_City_D08.avi, 15.000497, 30.199, 63.76
49
+ Test4/Talking_head/Test04_Talking_Head_D01.avi, 14.839504, 23.99002015161693, 51.43
50
+ Test1/Talking_head/Test01_Talking_Head_D05.avi, 17.148035999999998, 14.753875843184005, 35.1
51
+ Test6/City/Test06_City_D12.avi, 15.009005, 24.985, 53.03
52
+ Test6/Talking_head/Test06_Talking_head_D04.avi, 15.0, 25.0, 76.79
53
+ Test6/Television/Test06_Television_D03.avi, 21.997249999999998, 24.003, 28.29
54
+ Test3/City/Test03_City_D04.avi, 15.006241, 21.124543973906963, 13.96
55
+ Test3/Newspaper/Test03_Newspaper_D07.avi, 15.003326999999999, 30.059999759520004, 49.85
56
+ Test1/City/Test01_City_D04.avi, 11.498643, 30.003540417769297, 30.23
57
+ Test5/City/Test05_City_D03.avi, 15.0, 25.0, 68.62
58
+ Test5/Television/Test05_Television_D12.avi, 20.735758999999998, 24.981, 35.12
59
+ Test3/Newspaper/Test03_Newspaper_D10.avi, 15.0, 25.0, 49.95
60
+ Test2/Talking_head/Test02_Talking_Head_D10.avi, 16.846667999999998, 29.79817694753435, 33.8
61
+ Test5/City/Test05_City_D09.avi, 15.0, 30.0, 79.61
62
+ Test3/City/Test03_City_D10.avi, 15.008216, 24.320012451846374, 50.12
63
+ Test5/Talking_head/Test05_Talking_Head_D16.avi, 15.016328999999999, 29.701, 72.57
64
+ Test2/Talking_head/Test02_Talking_Head_D06.avi, 16.845816, 23.151149223047433, 24.35
65
+ Test6/Talking_head/Test06_Talking_head_D09.avi, 15.033011, 12.173210172908277, 0.58
66
+ Test6/Television/Test06_Television_D02.avi, 21.990837, 24.01, 28.81
67
+ Test4/Talking_head/Test04_Talking_Head_D09.avi, 13.48391, 30.109961579689024, 75.11
68
+ Test6/City/Test06_City_D05.avi, 15.005583, 28.656, 61.12
69
+ Test2/City/Test02_City_D04.avi, 16.499721, 14.848735927110525, 9.25
70
+ Test5/City/Test05_City_D11.avi, 15.007503999999999, 29.985, 73.55
71
+ Test5/Television/Test05_Television_D15.avi, 20.688726, 29.678, 36.1
72
+ Test4/Talking_head/Test04_Talking_Head_D05.avi, 14.347681, 29.97000002997, 93.38
73
+ Test6/City/Test06_City_D08.avi, 15.0, 30.0, 72.68
74
+ Test4/Newspaper/Test04_Newspaper_D14.avi, 15.012504, 23.98001026344439, 41.92
75
+ Test1/City/Test01_City_D06.avi, 11.363332999999999, 28.600764784450337, 43.78
76
+ Test5/Talking_head/Test05_Talking_Head_D15.avi, 14.983148, 29.967, 63.92
77
+ Test6/City/Test06_City_D11.avi, 15.011896, 29.843, 62.03
78
+ Test1/City/Test01_City_D05.avi, 11.708831, 14.689766814641585, 25.08
79
+ Test2/Talking_head/Test02_Talking_Head_D09.avi, 16.919999999999998, 25.0, 64.84
80
+ Test4/Talking_head/Test04_Talking_Head_D02.avi, 15.52, 25.0, 83.6
81
+ Test4/Newspaper/Test04_Newspaper_D06.avi, 15.027104, 12.910005641672466, 5.08
82
+ Test6/City/Test06_City_D06.avi, 14.983493, 23.626, 55.78
83
+ Test6/Talking_head/Test06_Talking_head_D11.avi, 14.990084999999999, 29.753, 42.32
84
+ Test3/Talking_head/Test03_Talking_Head_D03.avi, 15.799983999999998, 30.00003000003, 67.38
85
+ Test5/City/Test05_City_D12.avi, 15.010207, 24.983, 64.24
86
+ Test4/Newspaper/Test04_Newspaper_D13.avi, 15.015015, 29.97000002997, 64.41
87
+ Test6/Television/Test06_Television_D05.avi, 22.01717, 17.123, 27.85
88
+ Test1/Talking_head/Test01_Talking_Head_D02.avi, 17.028, 15.151515151515152, 31.65
89
+ Test1/Talking_head/Test01_Talking_Head_D01.avi, 16.166505, 30.00030000300003, 69.37
90
+ Test3/Newspaper/Test03_Newspaper_D13.avi, 15.11155, 29.58002283577763, 38.65
91
+ Test5/City/Test05_City_D01.avi, 15.000487, 30.799, 26.07
92
+ Test2/Talking_head/Test02_Talking_Head_D01.avi, 16.966497, 30.00030000300003, 64.68
93
+ Test4/Newspaper/Test04_Newspaper_D11.avi, 14.985992, 14.280002284800366, 4.14
94
+ Test3/Newspaper/Test03_Newspaper_D08.avi, 14.945312, 24.690016838591482, 47.8
95
+ Test3/Newspaper/Test03_Newspaper_D09.avi, 15.013591, 29.43999246336193, 47.22
96
+ Test6/Television/Test06_Television_D08.avi, 22.000733, 29.999, 23.61
97
+ Test2/Traffic/Test02_Traffic_D05.avi, 26.822181999999998, 28.670300149372263, 30.89
98
+ Test4/City/Test04_City_D05.avi, 15.015015, 29.97000002997, 65.86
99
+ Test4/Talking_head/Test04_Talking_Head_D11.avi, 14.856345, 14.269996903410672, 4.5
100
+ Test2/Traffic/Test02_Traffic_D06.avi, 25.941309999999998, 24.169943708201103, 34.66
101
+ Test2/City/Test02_City_D06.avi, 14.589739, 24.949041582567606, 38.99
102
+ Test5/Talking_head/Test05_Talking_Head_D04.avi, 15.015015, 23.976, 80.15
103
+ Test5/Television/Test05_Television_D13.avi, 20.279189, 25.001, 27.91
104
+ Test6/Television/Test06_Television_D06.avi, 22.01384, 19.942, 33.68
105
+ Test5/Television/Test05_Television_D05.avi, 21.375, 24.0, 48.27
106
+ Test5/Television/Test05_Television_D01.avi, 20.975388, 30.798, 11.45
107
+ Test6/Talking_head/Test06_Talking_head_D03.avi, 14.997499999999999, 24.004, 53.44
108
+ Test3/City/Test03_City_D08.avi, 14.033662, 24.369975215735206, 30.01
109
+ Test1/City/Test01_City_D01.avi, 12.133212, 30.00030000300003, 67.29
110
+ Test4/City/Test04_City_D08.avi, 15.030068, 14.969992649733609, 26.17
111
+ Test1/Talking_head/Test01_Talking_Head_D08.avi, 16.08, 25.0, 53.69
112
+ Test2/Traffic/Test02_Traffic_D02.avi, 20.776552, 29.985726794046034, 25.39
113
+ Test2/Traffic/Test02_Traffic_D01.avi, 26.866398, 30.00030000300003, 69.62
114
+ Test6/Television/Test06_Television_D09.avi, 22.022837, 10.670741467141585, 7.23
115
+ Test5/Television/Test05_Television_D07.avi, 20.409357, 17.1, 43.05
116
+ Test4/City/Test04_City_D03.avi, 15.0, 25.0, 68.6
117
+ Test2/City/Test02_City_D08.avi, 15.837328, 29.73986539736921, 36.88
118
+ Test5/Talking_head/Test05_Talking_Head_D02.avi, 14.993753, 24.01, 65.4
119
+ Test2/City/Test02_City_D10.avi, 16.454064, 29.415225320625957, 0.21
120
+ Test4/Newspaper/Test04_Newspaper_D01.avi, 15.015015, 29.97000002997, 70.38
121
+ Test4/City/Test04_City_D10.avi, 15.018037999999999, 24.9699736067379, 38.64
122
+ Test2/Talking_head/Test02_Talking_Head_D02.avi, 16.564215, 30.00444065721727, 22.64
123
+ Test2/Traffic/Test02_Traffic_D08.avi, 25.939173, 29.87759150759339, 29.35
124
+ Test2/City/Test02_City_D01.avi, 15.333179999999999, 30.00030000300003, 53.35
125
+ Test6/Talking_head/Test06_Talking_head_D08.avi, 15.0, 30.0, 62.56
126
+ Test3/Newspaper/Test03_Newspaper_D11.avi, 15.014636999999999, 23.910002749650317, 46.51
127
+ Test6/Talking_head/Test06_Talking_head_D07.avi, 15.001681999999999, 14.865, 32.99
128
+ Test4/Newspaper/Test04_Newspaper_D10.avi, 14.983972999999999, 24.9600015974401, 52.99
129
+ Test6/City/Test06_City_D10.avi, 14.999630999999999, 27.134, 23.58
130
+ Test6/City/Test06_City_D13.avi, 15.015452, 27.505, 64.28
131
+ Test4/Talking_head/Test04_Talking_Head_D08.avi, 13.913037, 14.950007176003444, 38.31
132
+ Test6/City/Test06_City_D16.avi, 14.998655, 29.736, 58.33
133
+ Test3/Talking_head/Test03_Talking_Head_D02.avi, 15.081748, 29.97000002997, 36.32
134
+ Test6/Talking_head/Test06_Talking_head_D15.avi, 14.993459, 29.813, 73.35
135
+ Test5/City/Test05_City_D15.avi, 15.000167, 29.933, 71.29
136
+ Test2/City/Test02_City_D07.avi, 15.816123, 29.526832509042592, 62.54
137
+ Test1/City/Test01_City_D03.avi, 11.75584, 14.460897732531235, 17.96
138
+ Test1/Traffic/Test01_Traffic_D04.avi, 21.209263999999997, 29.98689572656749, 25.72
139
+ Test2/City/Test02_City_D03.avi, 16.039728, 14.96284725027756, 18.83
140
+ Test6/Talking_head/Test06_Talking_head_D02.avi, 14.994377, 24.009, 57.48
141
+ Test5/Talking_head/Test05_Talking_Head_D08.avi, 14.982325999999999, 24.896, 61.01
142
+ Test4/Talking_head/Test04_Talking_Head_D03.avi, 14.24, 25.0, 77.09
143
+ Test5/Talking_head/Test05_Talking_Head_D06.avi, 14.990884, 29.618, 71.0
144
+ Test5/Talking_head/Test05_Talking_Head_D11.avi, 14.994187, 24.943, 56.29
145
+ Test3/Newspaper/Test03_Newspaper_D04.avi, 15.017282, 23.572841258129685, 39.98
146
+ Test5/Television/Test05_Television_D02.avi, 21.157850999999997, 24.01, 42.29
147
+ Test5/Television/Test05_Television_D16.avi, 20.881045, 29.692, 45.88
148
+ Test6/Television/Test06_Television_D15.avi, 21.997242, 29.731, 33.46
149
+ Test5/Talking_head/Test05_Talking_Head_D14.avi, 14.985776, 24.957, 67.29
150
+ Test3/Newspaper/Test03_Newspaper_D03.avi, 14.999984999999999, 30.00003000003, 55.89
151
+ Test3/City/Test03_City_D05.avi, 15.015015, 29.97000002997, 22.47
152
+ Test5/Talking_head/Test05_Talking_Head_D12.avi, 15.009606, 24.984, 68.36
153
+ Test4/Newspaper/Test04_Newspaper_D02.avi, 15.0, 25.0, 82.05
154
+ Test5/City/Test05_City_D16.avi, 15.016328999999999, 29.701, 76.85
155
+ Test4/City/Test04_City_D09.avi, 15.011643, 30.109961579689024, 62.08
156
+ Test3/City/Test03_City_D07.avi, 14.912008, 30.109961579689024, 35.74
157
+ Test4/Talking_head/Test04_Talking_Head_D04.avi, 15.066652, 30.00003000003, 83.73
158
+ Test5/City/Test05_City_D07.avi, 15.002127999999999, 28.196, 66.67
159
+ Test4/City/Test04_City_D14.avi, 14.997898, 23.669983620371333, 42.63
160
+ Test5/Television/Test05_Television_D08.avi, 21.296433, 20.379, 27.84
161
+ Test6/Television/Test06_Television_D16.avi, 21.986746, 28.972, 20.54
162
+ Test2/Talking_head/Test02_Talking_Head_D08.avi, 16.572551, 29.8686674691382, 35.7
163
+ Test3/Talking_head/Test03_Talking_Head_D13.avi, 15.673661, 29.54000307216032, 32.72
164
+ Test4/Talking_head/Test04_Talking_Head_D07.avi, 14.515042999999999, 14.950007176003444, 31.15
165
+ Test3/Newspaper/Test03_Newspaper_D12.avi, 15.120292, 29.099967117037156, 41.32
166
+ Test4/Newspaper/Test04_Newspaper_D03.avi, 15.0, 25.0, 81.33
167
+ Test6/Talking_head/Test06_Talking_head_D06.avi, 14.980642999999999, 23.764, 61.99
168
+ Test6/City/Test06_City_D15.avi, 15.006364, 29.854, 71.95
169
+ Test4/Newspaper/Test04_Newspaper_D12.avi, 14.982835, 29.099967117037156, 37.95
170
+ Test4/City/Test04_City_D11.avi, 14.985992, 14.280002284800366, -1.11
171
+ Test5/Talking_head/Test05_Talking_Head_D09.avi, 15.0, 30.0, 68.07
172
+ Test4/City/Test04_City_D06.avi, 14.990694, 10.739996498761142, -6.5
173
+ Test6/City/Test06_City_D02.avi, 15.015015, 29.97, 72.11
174
+ Test4/City/Test04_City_D04.avi, 14.999984999999999, 30.00003000003, 69.0
175
+ Test6/Talking_head/Test06_Talking_head_D05.avi, 15.009051999999999, 17.123, 45.07
176
+ Test6/Television/Test06_Television_D12.avi, 22.014969999999998, 24.983, 22.48
177
+ Test3/City/Test03_City_D09.avi, 15.006895, 28.92003898421255, 30.86
178
+ Test4/Talking_head/Test04_Talking_Head_D10.avi, 13.879999999999999, 25.0, 61.18
179
+ Test6/Television/Test06_Television_D10.avi, 22.014595999999997, 24.938, 14.77
180
+ Test4/Newspaper/Test04_Newspaper_D04.avi, 14.999984999999999, 30.00003000003, 82.52
181
+ Test1/Traffic/Test01_Traffic_D02.avi, 22.044, 15.151515151515152, 29.57
182
+ Test4/City/Test04_City_D07.avi, 15.020033, 14.979994217722233, 16.12
183
+ Test3/Talking_head/Test03_Talking_Head_D07.avi, 16.058394, 30.140000301400004, 35.29
184
+ Test4/Talking_head/Test04_Talking_Head_D12.avi, 15.945034999999999, 29.099967117037156, 17.06
185
+ Test1/Talking_head/Test01_Talking_Head_D04.avi, 16.930918, 30.06334346468008, 30.87
186
+ Test6/Talking_head/Test06_Talking_head_D16.avi, 15.007688, 29.918, 33.02
187
+ Test1/Talking_head/Test01_Talking_Head_D07.avi, 16.832297, 29.52657096120799, 35.99
188
+ Test1/Traffic/Test01_Traffic_D05.avi, 24.218208, 14.86484879475806, 30.21
189
+ Test3/City/Test03_City_D06.avi, 15.100672999999999, 14.89999865900012, 12.85
190
+ Test6/City/Test06_City_D04.avi, 15.0, 25.0, 67.94
191
+ Test5/Talking_head/Test05_Talking_Head_D01.avi, 15.000487, 30.799, 46.8
192
+ Test1/Traffic/Test01_Traffic_D09.avi, 24.799999999999997, 25.0, 60.7
193
+ Test5/City/Test05_City_D02.avi, 15.015015, 29.97, 76.43
194
+ Test5/City/Test05_City_D14.avi, 15.010318, 30.046, 64.27
195
+ Test1/City/Test01_City_D02.avi, 11.418, 15.151515151515152, 24.86
196
+ Test2/Talking_head/Test02_Talking_Head_D03.avi, 17.592412, 14.949627231044994, 22.11
197
+ Test5/Television/Test05_Television_D10.avi, 21.294514, 24.936, 22.5
198
+ Test4/Newspaper/Test04_Newspaper_D05.avi, 15.015015, 29.97000002997, 79.36
199
+ Test4/City/Test04_City_D12.avi, 15.017199, 29.099967117037156, 35.17
200
+ Test6/Television/Test06_Television_D04.avi, 22.0, 25.0, 39.1
201
+ Test5/Television/Test05_Television_D14.avi, 20.645419999999998, 24.945, 18.45
202
+ Test5/Television/Test05_Television_D04.avi, 21.020144, 23.977, 55.0
203
+ Test6/Television/Test06_Television_D07.avi, 21.985433, 14.828, 22.59
204
+ Test3/Newspaper/Test03_Newspaper_D02.avi, 15.015015, 29.97000002997, 36.79
205
+ Test6/Talking_head/Test06_Talking_head_D13.avi, 14.994187, 24.943, 47.76
206
+ Test1/City/Test01_City_D08.avi, 11.799999999999999, 25.0, 63.68
207
+ Test2/City/Test02_City_D02.avi, 15.923357, 29.95599464386816, 9.99
208
+ Test6/Television/Test06_Television_D01.avi, 22.013702, 30.799, 10.93
209
+ Test2/Traffic/Test02_Traffic_D04.avi, 24.58421, 14.846928170561512, 24.69
210
+ Test5/Talking_head/Test05_Talking_Head_D10.avi, 14.997193, 24.938, 60.05
211
+ Test4/City/Test04_City_D13.avi, 15.015015, 29.97000002997, 52.28
212
+ Test3/Newspaper/Test03_Newspaper_D01.avi, 14.999849999999999, 30.00030000300003, 50.49
213
+ Test1/Traffic/Test01_Traffic_D06.avi, 21.899675, 28.173934602663, 43.36
214
+ Test6/Talking_head/Test06_Talking_head_D14.avi, 14.985049, 29.763, 58.85
215
+ Test2/City/Test02_City_D09.avi, 16.08, 25.0, 56.28
216
+ Test6/City/Test06_City_D14.avi, 15.009381, 29.848, 72.46
217
+ Test1/City/Test01_City_D09.avi, 12.32, 25.0, 64.95
218
+ Test4/Talking_head/Test04_Talking_Head_D06.avi, 14.606734999999999, 9.790004405501982, 1.97
219
+ Test6/City/Test06_City_D09.avi, 15.018661999999999, 14.914776964425274, -0.85
220
+ Test6/City/Test06_City_D03.avi, 15.013511999999999, 29.973, 71.76
221
+ Test2/Talking_head/Test02_Talking_Head_D04.avi, 17.359872, 14.746652509880256, 16.69
222
+ Test3/City/Test03_City_D03.avi, 14.966652, 30.00003000003, 57.62
223
+ Test6/Television/Test06_Television_D13.avi, 22.011066, 24.942, 23.7
224
+ Test3/City/Test03_City_D01.avi, 14.788022, 31.579612202362156, 34.75
225
+ Test3/Talking_head/Test03_Talking_Head_D05.avi, 16.116115999999998, 29.97000002997, 36.98
226
+ Test2/Traffic/Test02_Traffic_D10.avi, 28.059312, 29.5801978323631, 27.35
227
+ Test5/City/Test05_City_D13.avi, 14.9988, 25.002, 62.65
228
+ Test5/City/Test05_City_D06.avi, 14.983801, 29.632, 75.32
229
+ Test1/City/Test01_City_D07.avi, 12.572566, 28.474695961433873, 51.67
230
+ Test6/Television/Test06_Television_D14.avi, 21.998995, 29.865, 31.73
231
+ Test5/Talking_head/Test05_Talking_Head_D13.avi, 14.9988, 25.002, 61.84
232
+ Test3/Talking_head/Test03_Talking_Head_D04.avi, 15.893785999999999, 14.09355867994092, 27.86
233
+ Test5/Television/Test05_Television_D09.avi, 20.666667, 30.0, 33.59
234
+ Test2/Traffic/Test02_Traffic_D09.avi, 27.04, 25.0, 64.3
examplar_data_labels/DIVIDE_MaxWell/train_labels.txt ADDED
The diff for this file is too large to render. See raw diff
 
examplar_data_labels/DIVIDE_MaxWell/val_labels.txt ADDED
@@ -0,0 +1,909 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 0000.mp4, 1.114285714, 1.057142857, 1.028571429
2
+ 0012.mp4, 1.538461538, 1.423076923, 1.384615385
3
+ 0014.mp4, 1.423076923, 1.653846154, 1.423076923
4
+ 0015.mp4, 1.576923077, 1.5, 1.423076923
5
+ 0016.mp4, 1.448275862, 1.793103448, 1.448275862
6
+ 0020.mp4, 1.423076923, 1.576923077, 1.461538462
7
+ 0022.mp4, 1.551724138, 1.724137931, 1.482758621
8
+ 0028.mp4, 1.538461538, 1.615384615, 1.5
9
+ 0030.mp4, 1.357142857, 1.785714286, 1.5
10
+ 0036.mp4, 1.428571429, 1.928571429, 1.535714286
11
+ 0039.mp4, 1.642857143, 1.678571429, 1.571428571
12
+ 0040.mp4, 1.657142857, 1.914285714, 1.571428571
13
+ 0042.mp4, 1.5, 1.821428571, 1.571428571
14
+ 0043.mp4, 1.769230769, 1.538461538, 1.576923077
15
+ 0047.mp4, 1.428571429, 1.857142857, 1.6
16
+ 0048.mp4, 1.6, 1.971428571, 1.6
17
+ 0049.mp4, 1.538461538, 1.615384615, 1.615384615
18
+ 0052.mp4, 1.514285714, 2.2, 1.628571429
19
+ 0054.mp4, 1.607142857, 1.892857143, 1.642857143
20
+ 0061.mp4, 1.586206897, 1.896551724, 1.689655172
21
+ 0071.mp4, 1.742857143, 1.942857143, 1.742857143
22
+ 0073.mp4, 1.714285714, 1.964285714, 1.75
23
+ 0078.mp4, 1.615384615, 2.153846154, 1.769230769
24
+ 0081.mp4, 1.485714286, 2.314285714, 1.771428571
25
+ 0082.mp4, 1.607142857, 2.178571429, 1.785714286
26
+ 0097.mp4, 2.192307692, 1.692307692, 1.807692308
27
+ 0098.mp4, 1.692307692, 2.076923077, 1.807692308
28
+ 0100.mp4, 1.714285714, 2.142857143, 1.821428571
29
+ 0101.mp4, 1.823529412, 2.088235294, 1.823529412
30
+ 0103.mp4, 1.5, 2.384615385, 1.846153846
31
+ 0108.mp4, 1.821428571, 2.25, 1.857142857
32
+ 0112.mp4, 1.75862069, 2.275862069, 1.862068966
33
+ 0119.mp4, 1.692307692, 1.884615385, 1.884615385
34
+ 0122.mp4, 1.730769231, 2.307692308, 1.884615385
35
+ 0131.mp4, 1.655172414, 2.068965517, 1.896551724
36
+ 0138.mp4, 1.828571429, 2.314285714, 1.914285714
37
+ 0144.mp4, 1.642857143, 2.535714286, 1.928571429
38
+ 0145.mp4, 1.75862069, 2.034482759, 1.931034483
39
+ 0158.mp4, 1.769230769, 2.423076923, 1.961538462
40
+ 0162.mp4, 2.0, 2.230769231, 1.961538462
41
+ 0166.mp4, 1.964285714, 2.75, 1.964285714
42
+ 0170.mp4, 1.941176471, 2.235294118, 1.970588235
43
+ 0176.mp4, 2.071428571, 2.142857143, 2.0
44
+ 0180.mp4, 1.857142857, 2.342857143, 2.0
45
+ 0182.mp4, 1.896551724, 2.482758621, 2.0
46
+ 0184.mp4, 1.571428571, 2.571428571, 2.0
47
+ 0192.mp4, 1.678571429, 2.821428571, 2.0
48
+ 0196.mp4, 1.911764706, 2.235294118, 2.0
49
+ 0198.mp4, 1.794117647, 2.352941176, 2.0
50
+ 0202.mp4, 2.034482759, 2.413793103, 2.034482759
51
+ 0212.mp4, 1.923076923, 2.115384615, 2.038461538
52
+ 0220.mp4, 2.085714286, 2.571428571, 2.057142857
53
+ 0221.mp4, 1.971428571, 2.571428571, 2.057142857
54
+ 0225.mp4, 2.117647059, 2.235294118, 2.058823529
55
+ 0227.mp4, 1.970588235, 2.411764706, 2.058823529
56
+ 0228.mp4, 1.882352941, 2.529411765, 2.058823529
57
+ 0232.mp4, 1.75, 2.571428571, 2.071428571
58
+ 0236.mp4, 2.0, 2.214285714, 2.071428571
59
+ 0243.mp4, 2.057142857, 2.257142857, 2.085714286
60
+ 0247.mp4, 1.942857143, 2.6, 2.085714286
61
+ 0250.mp4, 1.794117647, 2.676470588, 2.088235294
62
+ 0251.mp4, 2.0, 2.235294118, 2.088235294
63
+ 0252.mp4, 2.058823529, 2.588235294, 2.088235294
64
+ 0256.mp4, 2.103448276, 2.344827586, 2.103448276
65
+ 0261.mp4, 1.821428571, 2.607142857, 2.107142857
66
+ 0264.mp4, 2.0, 2.321428571, 2.107142857
67
+ 0266.mp4, 1.914285714, 2.457142857, 2.114285714
68
+ 0268.mp4, 1.942857143, 2.714285714, 2.114285714
69
+ 0288.mp4, 1.928571429, 2.392857143, 2.142857143
70
+ 0294.mp4, 2.029411765, 2.588235294, 2.147058824
71
+ 0296.mp4, 1.970588235, 2.382352941, 2.147058824
72
+ 0297.mp4, 2.235294118, 2.264705882, 2.147058824
73
+ 0300.mp4, 2.088235294, 2.411764706, 2.147058824
74
+ 0310.mp4, 2.068965517, 2.620689655, 2.172413793
75
+ 0319.mp4, 1.928571429, 2.785714286, 2.178571429
76
+ 0321.mp4, 1.892857143, 3.0, 2.178571429
77
+ 0331.mp4, 2.107142857, 2.464285714, 2.178571429
78
+ 0337.mp4, 2.230769231, 2.615384615, 2.192307692
79
+ 0352.mp4, 2.088235294, 2.441176471, 2.205882353
80
+ 0356.mp4, 2.068965517, 2.620689655, 2.206896552
81
+ 0362.mp4, 2.24137931, 2.655172414, 2.206896552
82
+ 0364.mp4, 2.142857143, 2.571428571, 2.214285714
83
+ 0371.mp4, 2.028571429, 2.657142857, 2.228571429
84
+ 0372.mp4, 2.0, 2.828571429, 2.228571429
85
+ 0379.mp4, 2.423076923, 2.153846154, 2.230769231
86
+ 0384.mp4, 2.176470588, 2.588235294, 2.235294118
87
+ 0388.mp4, 2.275862069, 2.344827586, 2.24137931
88
+ 0394.mp4, 2.24137931, 2.379310345, 2.24137931
89
+ 0402.mp4, 2.142857143, 2.5, 2.25
90
+ 0407.mp4, 2.071428571, 2.857142857, 2.25
91
+ 0412.mp4, 2.142857143, 2.771428571, 2.257142857
92
+ 0413.mp4, 2.142857143, 2.857142857, 2.257142857
93
+ 0415.mp4, 2.205882353, 2.470588235, 2.264705882
94
+ 0419.mp4, 2.153846154, 2.730769231, 2.269230769
95
+ 0425.mp4, 2.153846154, 2.461538462, 2.269230769
96
+ 0426.mp4, 2.153846154, 2.615384615, 2.269230769
97
+ 0430.mp4, 2.153846154, 2.807692308, 2.269230769
98
+ 0431.mp4, 2.0, 2.807692308, 2.269230769
99
+ 0435.mp4, 2.423076923, 2.192307692, 2.269230769
100
+ 0438.mp4, 2.153846154, 2.692307692, 2.269230769
101
+ 0441.mp4, 1.896551724, 2.827586207, 2.275862069
102
+ 0445.mp4, 2.068965517, 2.965517241, 2.275862069
103
+ 0449.mp4, 2.214285714, 2.678571429, 2.285714286
104
+ 0457.mp4, 2.142857143, 2.542857143, 2.285714286
105
+ 0459.mp4, 2.107142857, 2.571428571, 2.285714286
106
+ 0463.mp4, 2.214285714, 2.785714286, 2.285714286
107
+ 0472.mp4, 2.346153846, 2.461538462, 2.307692308
108
+ 0476.mp4, 2.115384615, 2.692307692, 2.307692308
109
+ 0478.mp4, 2.076923077, 2.884615385, 2.307692308
110
+ 0484.mp4, 2.615384615, 2.538461538, 2.307692308
111
+ 0487.mp4, 2.038461538, 2.730769231, 2.307692308
112
+ 0507.mp4, 2.214285714, 2.642857143, 2.321428571
113
+ 0514.mp4, 2.205882353, 2.411764706, 2.323529412
114
+ 0526.mp4, 2.103448276, 2.75862069, 2.344827586
115
+ 0533.mp4, 2.346153846, 2.576923077, 2.346153846
116
+ 0540.mp4, 2.461538462, 2.5, 2.346153846
117
+ 0544.mp4, 1.923076923, 2.730769231, 2.346153846
118
+ 0545.mp4, 2.153846154, 2.730769231, 2.346153846
119
+ 0551.mp4, 2.307692308, 2.884615385, 2.346153846
120
+ 0553.mp4, 2.153846154, 2.923076923, 2.346153846
121
+ 0554.mp4, 2.269230769, 2.461538462, 2.346153846
122
+ 0557.mp4, 2.264705882, 2.588235294, 2.352941176
123
+ 0558.mp4, 2.357142857, 2.392857143, 2.357142857
124
+ 0571.mp4, 2.107142857, 2.857142857, 2.357142857
125
+ 0578.mp4, 2.142857143, 2.685714286, 2.371428571
126
+ 0583.mp4, 2.344827586, 2.827586207, 2.379310345
127
+ 0585.mp4, 2.206896552, 2.931034483, 2.379310345
128
+ 0589.mp4, 1.931034483, 2.75862069, 2.379310345
129
+ 0593.mp4, 2.264705882, 2.852941176, 2.382352941
130
+ 0595.mp4, 2.382352941, 2.558823529, 2.382352941
131
+ 0601.mp4, 2.692307692, 2.269230769, 2.384615385
132
+ 0602.mp4, 2.192307692, 2.538461538, 2.384615385
133
+ 0606.mp4, 1.923076923, 2.5, 2.384615385
134
+ 0619.mp4, 2.0, 2.785714286, 2.392857143
135
+ 0627.mp4, 2.142857143, 3.0, 2.392857143
136
+ 0629.mp4, 2.228571429, 2.885714286, 2.4
137
+ 0635.mp4, 2.352941176, 2.558823529, 2.411764706
138
+ 0639.mp4, 2.323529412, 2.647058824, 2.411764706
139
+ 0642.mp4, 2.352941176, 2.794117647, 2.411764706
140
+ 0649.mp4, 2.24137931, 2.655172414, 2.413793103
141
+ 0657.mp4, 2.115384615, 2.538461538, 2.423076923
142
+ 0666.mp4, 2.153846154, 2.538461538, 2.423076923
143
+ 0669.mp4, 2.269230769, 2.692307692, 2.423076923
144
+ 0672.mp4, 2.307692308, 2.730769231, 2.423076923
145
+ 0683.mp4, 2.807692308, 2.230769231, 2.423076923
146
+ 0684.mp4, 2.538461538, 2.615384615, 2.423076923
147
+ 0686.mp4, 2.321428571, 2.642857143, 2.428571429
148
+ 0689.mp4, 2.357142857, 2.821428571, 2.428571429
149
+ 0692.mp4, 2.357142857, 2.821428571, 2.428571429
150
+ 0695.mp4, 2.357142857, 2.428571429, 2.428571429
151
+ 0699.mp4, 2.25, 2.678571429, 2.428571429
152
+ 0700.mp4, 2.114285714, 2.742857143, 2.428571429
153
+ 0704.mp4, 2.171428571, 2.828571429, 2.428571429
154
+ 0715.mp4, 2.24137931, 2.75862069, 2.448275862
155
+ 0716.mp4, 2.655172414, 2.275862069, 2.448275862
156
+ 0722.mp4, 2.034482759, 2.896551724, 2.448275862
157
+ 0723.mp4, 2.413793103, 2.931034483, 2.448275862
158
+ 0726.mp4, 2.371428571, 2.714285714, 2.457142857
159
+ 0729.mp4, 1.914285714, 3.171428571, 2.457142857
160
+ 0730.mp4, 2.346153846, 2.5, 2.461538462
161
+ 0731.mp4, 2.423076923, 2.538461538, 2.461538462
162
+ 0732.mp4, 2.307692308, 2.538461538, 2.461538462
163
+ 0741.mp4, 2.423076923, 2.807692308, 2.461538462
164
+ 0742.mp4, 2.192307692, 2.884615385, 2.461538462
165
+ 0750.mp4, 2.5, 2.576923077, 2.461538462
166
+ 0753.mp4, 2.730769231, 2.346153846, 2.461538462
167
+ 0754.mp4, 2.307692308, 2.576923077, 2.461538462
168
+ 0757.mp4, 2.461538462, 2.769230769, 2.461538462
169
+ 0760.mp4, 2.230769231, 2.923076923, 2.461538462
170
+ 0774.mp4, 2.25, 2.642857143, 2.464285714
171
+ 0775.mp4, 2.321428571, 2.857142857, 2.464285714
172
+ 0790.mp4, 2.2, 2.8, 2.485714286
173
+ 0791.mp4, 2.257142857, 2.857142857, 2.485714286
174
+ 0793.mp4, 2.371428571, 2.885714286, 2.485714286
175
+ 0798.mp4, 2.5, 2.653846154, 2.5
176
+ 0799.mp4, 2.5, 2.653846154, 2.5
177
+ 0800.mp4, 2.192307692, 2.653846154, 2.5
178
+ 0805.mp4, 2.384615385, 2.730769231, 2.5
179
+ 0807.mp4, 2.214285714, 2.75, 2.5
180
+ 0809.mp4, 2.269230769, 2.769230769, 2.5
181
+ 0813.mp4, 2.269230769, 2.884615385, 2.5
182
+ 0821.mp4, 2.192307692, 3.115384615, 2.5
183
+ 0824.mp4, 2.653846154, 2.461538462, 2.5
184
+ 0826.mp4, 2.346153846, 3.0, 2.5
185
+ 0828.mp4, 2.384615385, 2.576923077, 2.5
186
+ 0834.mp4, 2.178571429, 2.892857143, 2.5
187
+ 0838.mp4, 2.178571429, 3.071428571, 2.5
188
+ 0841.mp4, 2.441176471, 2.882352941, 2.5
189
+ 0843.mp4, 2.558823529, 2.735294118, 2.5
190
+ 0854.mp4, 2.657142857, 2.485714286, 2.514285714
191
+ 0857.mp4, 2.285714286, 2.714285714, 2.514285714
192
+ 0859.mp4, 2.628571429, 2.885714286, 2.514285714
193
+ 0861.mp4, 2.228571429, 3.0, 2.514285714
194
+ 0869.mp4, 2.172413793, 2.827586207, 2.517241379
195
+ 0872.mp4, 2.294117647, 2.735294118, 2.529411765
196
+ 0887.mp4, 2.535714286, 2.607142857, 2.535714286
197
+ 0891.mp4, 2.357142857, 2.785714286, 2.535714286
198
+ 0893.mp4, 2.392857143, 2.821428571, 2.535714286
199
+ 0894.mp4, 2.214285714, 2.821428571, 2.535714286
200
+ 0900.mp4, 2.615384615, 2.692307692, 2.538461538
201
+ 0905.mp4, 2.846153846, 2.423076923, 2.538461538
202
+ 0908.mp4, 2.461538462, 2.615384615, 2.538461538
203
+ 0911.mp4, 2.384615385, 2.730769231, 2.538461538
204
+ 0915.mp4, 2.461538462, 2.846153846, 2.538461538
205
+ 0917.mp4, 2.230769231, 2.846153846, 2.538461538
206
+ 0929.mp4, 2.457142857, 2.828571429, 2.542857143
207
+ 0930.mp4, 2.314285714, 2.914285714, 2.542857143
208
+ 0939.mp4, 2.275862069, 2.965517241, 2.551724138
209
+ 0949.mp4, 2.529411765, 2.558823529, 2.558823529
210
+ 0950.mp4, 2.588235294, 2.647058824, 2.558823529
211
+ 0952.mp4, 2.382352941, 2.617647059, 2.558823529
212
+ 0955.mp4, 2.647058824, 2.529411765, 2.558823529
213
+ 0959.mp4, 2.607142857, 2.607142857, 2.571428571
214
+ 0962.mp4, 2.571428571, 2.785714286, 2.571428571
215
+ 0963.mp4, 2.371428571, 2.8, 2.571428571
216
+ 0968.mp4, 2.285714286, 2.857142857, 2.571428571
217
+ 0970.mp4, 2.485714286, 2.885714286, 2.571428571
218
+ 0983.mp4, 2.285714286, 2.785714286, 2.571428571
219
+ 0995.mp4, 2.384615385, 2.807692308, 2.576923077
220
+ 1006.mp4, 2.346153846, 2.961538462, 2.576923077
221
+ 1010.mp4, 2.653846154, 2.384615385, 2.576923077
222
+ 1011.mp4, 3.115384615, 2.384615385, 2.576923077
223
+ 1017.mp4, 2.75862069, 2.517241379, 2.586206897
224
+ 1019.mp4, 2.517241379, 2.793103448, 2.586206897
225
+ 1021.mp4, 2.379310345, 2.827586207, 2.586206897
226
+ 1027.mp4, 2.379310345, 3.0, 2.586206897
227
+ 1039.mp4, 2.5, 2.852941176, 2.588235294
228
+ 1041.mp4, 2.457142857, 2.685714286, 2.6
229
+ 1043.mp4, 2.257142857, 2.8, 2.6
230
+ 1047.mp4, 2.142857143, 3.028571429, 2.6
231
+ 1057.mp4, 2.464285714, 2.535714286, 2.607142857
232
+ 1060.mp4, 2.357142857, 2.892857143, 2.607142857
233
+ 1064.mp4, 2.615384615, 2.807692308, 2.615384615
234
+ 1068.mp4, 2.461538462, 3.076923077, 2.615384615
235
+ 1069.mp4, 2.538461538, 2.615384615, 2.615384615
236
+ 1070.mp4, 2.615384615, 2.730769231, 2.615384615
237
+ 1075.mp4, 2.461538462, 2.846153846, 2.615384615
238
+ 1079.mp4, 2.384615385, 2.961538462, 2.615384615
239
+ 1091.mp4, 2.384615385, 3.153846154, 2.615384615
240
+ 1098.mp4, 2.769230769, 2.615384615, 2.615384615
241
+ 1102.mp4, 2.5, 2.794117647, 2.617647059
242
+ 1111.mp4, 2.441176471, 2.852941176, 2.617647059
243
+ 1122.mp4, 2.586206897, 2.965517241, 2.620689655
244
+ 1128.mp4, 2.514285714, 2.885714286, 2.628571429
245
+ 1134.mp4, 2.428571429, 2.892857143, 2.642857143
246
+ 1149.mp4, 2.392857143, 2.964285714, 2.642857143
247
+ 1156.mp4, 2.5, 2.970588235, 2.647058824
248
+ 1161.mp4, 2.615384615, 2.769230769, 2.653846154
249
+ 1168.mp4, 2.346153846, 2.884615385, 2.653846154
250
+ 1174.mp4, 2.384615385, 3.0, 2.653846154
251
+ 1181.mp4, 2.384615385, 3.269230769, 2.653846154
252
+ 1187.mp4, 2.346153846, 3.076923077, 2.653846154
253
+ 1190.mp4, 3.153846154, 2.230769231, 2.653846154
254
+ 1198.mp4, 2.538461538, 3.038461538, 2.653846154
255
+ 1200.mp4, 2.384615385, 3.076923077, 2.653846154
256
+ 1204.mp4, 2.413793103, 3.0, 2.655172414
257
+ 1214.mp4, 2.551724138, 2.862068966, 2.655172414
258
+ 1215.mp4, 2.714285714, 2.828571429, 2.657142857
259
+ 1229.mp4, 2.571428571, 3.071428571, 2.678571429
260
+ 1231.mp4, 2.571428571, 2.964285714, 2.678571429
261
+ 1232.mp4, 2.392857143, 2.857142857, 2.678571429
262
+ 1236.mp4, 2.5, 2.857142857, 2.678571429
263
+ 1242.mp4, 2.571428571, 3.214285714, 2.678571429
264
+ 1243.mp4, 2.607142857, 2.714285714, 2.678571429
265
+ 1247.mp4, 2.571428571, 2.928571429, 2.678571429
266
+ 1248.mp4, 2.535714286, 2.964285714, 2.678571429
267
+ 1249.mp4, 2.428571429, 3.0, 2.678571429
268
+ 1256.mp4, 2.448275862, 2.931034483, 2.689655172
269
+ 1264.mp4, 2.448275862, 3.206896552, 2.689655172
270
+ 1278.mp4, 2.538461538, 2.807692308, 2.692307692
271
+ 1281.mp4, 2.538461538, 2.846153846, 2.692307692
272
+ 1283.mp4, 2.538461538, 3.0, 2.692307692
273
+ 1286.mp4, 2.423076923, 3.076923077, 2.692307692
274
+ 1290.mp4, 2.5, 3.192307692, 2.692307692
275
+ 1293.mp4, 2.538461538, 2.923076923, 2.692307692
276
+ 1300.mp4, 2.923076923, 2.615384615, 2.692307692
277
+ 1301.mp4, 2.153846154, 3.269230769, 2.692307692
278
+ 1311.mp4, 2.558823529, 3.058823529, 2.705882353
279
+ 1312.mp4, 2.764705882, 2.882352941, 2.705882353
280
+ 1329.mp4, 2.75, 3.035714286, 2.714285714
281
+ 1334.mp4, 2.464285714, 3.0, 2.714285714
282
+ 1335.mp4, 2.607142857, 2.892857143, 2.714285714
283
+ 1336.mp4, 2.321428571, 3.178571429, 2.714285714
284
+ 1339.mp4, 2.5, 3.035714286, 2.714285714
285
+ 1346.mp4, 2.392857143, 2.892857143, 2.714285714
286
+ 1349.mp4, 2.392857143, 3.142857143, 2.714285714
287
+ 1357.mp4, 2.827586207, 2.827586207, 2.724137931
288
+ 1369.mp4, 2.576923077, 2.807692308, 2.730769231
289
+ 1371.mp4, 2.615384615, 2.846153846, 2.730769231
290
+ 1384.mp4, 2.538461538, 3.0, 2.730769231
291
+ 1389.mp4, 2.5, 3.076923077, 2.730769231
292
+ 1403.mp4, 2.115384615, 2.961538462, 2.730769231
293
+ 1404.mp4, 2.423076923, 3.0, 2.730769231
294
+ 1407.mp4, 3.038461538, 2.5, 2.730769231
295
+ 1409.mp4, 2.615384615, 2.846153846, 2.730769231
296
+ 1410.mp4, 2.807692308, 2.923076923, 2.730769231
297
+ 1416.mp4, 2.461538462, 3.115384615, 2.730769231
298
+ 1419.mp4, 2.615384615, 3.307692308, 2.730769231
299
+ 1420.mp4, 2.735294118, 3.058823529, 2.735294118
300
+ 1421.mp4, 2.588235294, 2.941176471, 2.735294118
301
+ 1430.mp4, 2.4, 2.885714286, 2.742857143
302
+ 1435.mp4, 2.75, 2.571428571, 2.75
303
+ 1446.mp4, 2.464285714, 3.178571429, 2.75
304
+ 1447.mp4, 2.5, 3.178571429, 2.75
305
+ 1452.mp4, 2.571428571, 3.035714286, 2.75
306
+ 1454.mp4, 2.321428571, 3.071428571, 2.75
307
+ 1456.mp4, 2.321428571, 3.107142857, 2.75
308
+ 1460.mp4, 2.724137931, 3.0, 2.75862069
309
+ 1461.mp4, 2.724137931, 3.034482759, 2.75862069
310
+ 1465.mp4, 2.517241379, 3.0, 2.75862069
311
+ 1466.mp4, 2.586206897, 3.034482759, 2.75862069
312
+ 1471.mp4, 2.655172414, 2.862068966, 2.75862069
313
+ 1473.mp4, 2.75862069, 3.068965517, 2.75862069
314
+ 1487.mp4, 2.615384615, 3.192307692, 2.769230769
315
+ 1491.mp4, 2.730769231, 2.846153846, 2.769230769
316
+ 1495.mp4, 2.5, 2.923076923, 2.769230769
317
+ 1501.mp4, 2.730769231, 3.0, 2.769230769
318
+ 1502.mp4, 2.5, 3.076923077, 2.769230769
319
+ 1512.mp4, 2.5, 3.5, 2.769230769
320
+ 1524.mp4, 2.514285714, 3.371428571, 2.771428571
321
+ 1530.mp4, 2.607142857, 2.928571429, 2.785714286
322
+ 1532.mp4, 2.642857143, 3.0, 2.785714286
323
+ 1538.mp4, 2.785714286, 2.892857143, 2.785714286
324
+ 1541.mp4, 2.642857143, 2.964285714, 2.785714286
325
+ 1546.mp4, 2.357142857, 3.142857143, 2.785714286
326
+ 1570.mp4, 2.551724138, 3.172413793, 2.793103448
327
+ 1572.mp4, 2.75862069, 3.206896552, 2.793103448
328
+ 1593.mp4, 2.653846154, 2.769230769, 2.807692308
329
+ 1594.mp4, 2.615384615, 2.846153846, 2.807692308
330
+ 1596.mp4, 2.769230769, 2.846153846, 2.807692308
331
+ 1598.mp4, 2.807692308, 2.884615385, 2.807692308
332
+ 1600.mp4, 2.807692308, 2.923076923, 2.807692308
333
+ 1602.mp4, 2.807692308, 3.0, 2.807692308
334
+ 1604.mp4, 2.576923077, 3.0, 2.807692308
335
+ 1606.mp4, 2.615384615, 3.038461538, 2.807692308
336
+ 1607.mp4, 2.846153846, 3.038461538, 2.807692308
337
+ 1611.mp4, 2.730769231, 3.038461538, 2.807692308
338
+ 1615.mp4, 2.615384615, 3.076923077, 2.807692308
339
+ 1620.mp4, 2.576923077, 3.153846154, 2.807692308
340
+ 1622.mp4, 2.576923077, 3.153846154, 2.807692308
341
+ 1625.mp4, 2.846153846, 2.730769231, 2.807692308
342
+ 1632.mp4, 3.038461538, 2.423076923, 2.807692308
343
+ 1635.mp4, 2.423076923, 3.192307692, 2.807692308
344
+ 1637.mp4, 2.730769231, 2.961538462, 2.807692308
345
+ 1639.mp4, 2.884615385, 2.692307692, 2.807692308
346
+ 1643.mp4, 3.0, 2.923076923, 2.807692308
347
+ 1649.mp4, 2.846153846, 3.038461538, 2.807692308
348
+ 1653.mp4, 2.692307692, 3.153846154, 2.807692308
349
+ 1654.mp4, 2.730769231, 3.230769231, 2.807692308
350
+ 1661.mp4, 2.607142857, 2.964285714, 2.821428571
351
+ 1662.mp4, 2.678571429, 3.0, 2.821428571
352
+ 1665.mp4, 2.464285714, 3.178571429, 2.821428571
353
+ 1668.mp4, 2.714285714, 3.0, 2.821428571
354
+ 1678.mp4, 2.678571429, 2.678571429, 2.821428571
355
+ 1683.mp4, 2.647058824, 3.058823529, 2.823529412
356
+ 1684.mp4, 2.588235294, 3.029411765, 2.823529412
357
+ 1685.mp4, 2.558823529, 3.147058824, 2.823529412
358
+ 1688.mp4, 2.794117647, 2.911764706, 2.823529412
359
+ 1695.mp4, 2.655172414, 3.137931034, 2.827586207
360
+ 1705.mp4, 2.827586207, 3.068965517, 2.827586207
361
+ 1707.mp4, 2.542857143, 2.857142857, 2.828571429
362
+ 1722.mp4, 2.653846154, 3.269230769, 2.846153846
363
+ 1727.mp4, 2.769230769, 2.884615385, 2.846153846
364
+ 1729.mp4, 2.846153846, 2.923076923, 2.846153846
365
+ 1730.mp4, 2.846153846, 2.923076923, 2.846153846
366
+ 1731.mp4, 2.730769231, 2.923076923, 2.846153846
367
+ 1739.mp4, 2.923076923, 3.038461538, 2.846153846
368
+ 1752.mp4, 2.769230769, 3.423076923, 2.846153846
369
+ 1759.mp4, 2.730769231, 3.115384615, 2.846153846
370
+ 1766.mp4, 2.807692308, 2.846153846, 2.846153846
371
+ 1767.mp4, 2.653846154, 3.307692308, 2.846153846
372
+ 1773.mp4, 2.764705882, 3.058823529, 2.852941176
373
+ 1778.mp4, 2.794117647, 2.970588235, 2.852941176
374
+ 1795.mp4, 2.685714286, 3.2, 2.857142857
375
+ 1797.mp4, 2.642857143, 3.25, 2.857142857
376
+ 1807.mp4, 2.857142857, 2.714285714, 2.857142857
377
+ 1808.mp4, 2.678571429, 3.0, 2.857142857
378
+ 1820.mp4, 2.75862069, 3.172413793, 2.862068966
379
+ 1824.mp4, 2.724137931, 2.931034483, 2.862068966
380
+ 1848.mp4, 2.576923077, 2.923076923, 2.884615385
381
+ 1852.mp4, 2.730769231, 3.0, 2.884615385
382
+ 1853.mp4, 2.846153846, 3.0, 2.884615385
383
+ 1855.mp4, 2.269230769, 3.076923077, 2.884615385
384
+ 1863.mp4, 2.730769231, 3.153846154, 2.884615385
385
+ 1868.mp4, 2.730769231, 3.076923077, 2.884615385
386
+ 1887.mp4, 2.846153846, 3.192307692, 2.884615385
387
+ 1893.mp4, 2.6, 3.457142857, 2.885714286
388
+ 1896.mp4, 2.464285714, 3.214285714, 2.892857143
389
+ 1897.mp4, 2.607142857, 3.035714286, 2.892857143
390
+ 1909.mp4, 2.678571429, 2.857142857, 2.892857143
391
+ 1911.mp4, 2.892857143, 2.928571429, 2.892857143
392
+ 1913.mp4, 2.678571429, 3.25, 2.892857143
393
+ 1915.mp4, 2.551724138, 2.896551724, 2.896551724
394
+ 1928.mp4, 2.586206897, 3.137931034, 2.896551724
395
+ 1929.mp4, 2.862068966, 3.172413793, 2.896551724
396
+ 1941.mp4, 2.857142857, 2.971428571, 2.914285714
397
+ 1951.mp4, 2.692307692, 3.115384615, 2.923076923
398
+ 1952.mp4, 2.615384615, 3.153846154, 2.923076923
399
+ 1958.mp4, 2.769230769, 3.269230769, 2.923076923
400
+ 1963.mp4, 2.884615385, 2.807692308, 2.923076923
401
+ 1969.mp4, 2.576923077, 3.0, 2.923076923
402
+ 1972.mp4, 2.846153846, 3.076923077, 2.923076923
403
+ 1974.mp4, 2.653846154, 3.115384615, 2.923076923
404
+ 1981.mp4, 2.653846154, 3.153846154, 2.923076923
405
+ 1986.mp4, 2.884615385, 3.192307692, 2.923076923
406
+ 1987.mp4, 2.538461538, 3.192307692, 2.923076923
407
+ 1992.mp4, 2.769230769, 2.846153846, 2.923076923
408
+ 1996.mp4, 2.692307692, 3.269230769, 2.923076923
409
+ 2000.mp4, 2.807692308, 3.192307692, 2.923076923
410
+ 2013.mp4, 2.678571429, 3.071428571, 2.928571429
411
+ 2026.mp4, 3.0, 3.0, 2.931034483
412
+ 2033.mp4, 2.655172414, 3.275862069, 2.931034483
413
+ 2043.mp4, 2.735294118, 3.147058824, 2.941176471
414
+ 2060.mp4, 2.714285714, 3.2, 2.942857143
415
+ 2061.mp4, 2.714285714, 3.371428571, 2.942857143
416
+ 2062.mp4, 3.153846154, 2.923076923, 2.961538462
417
+ 2068.mp4, 2.730769231, 3.076923077, 2.961538462
418
+ 2071.mp4, 2.923076923, 3.076923077, 2.961538462
419
+ 2073.mp4, 2.961538462, 3.153846154, 2.961538462
420
+ 2074.mp4, 2.884615385, 3.153846154, 2.961538462
421
+ 2080.mp4, 2.653846154, 3.269230769, 2.961538462
422
+ 2092.mp4, 2.769230769, 3.307692308, 2.961538462
423
+ 2093.mp4, 2.576923077, 3.269230769, 2.961538462
424
+ 2094.mp4, 2.769230769, 3.230769231, 2.961538462
425
+ 2096.mp4, 2.615384615, 3.076923077, 2.961538462
426
+ 2097.mp4, 2.653846154, 2.884615385, 2.961538462
427
+ 2104.mp4, 2.769230769, 3.115384615, 2.961538462
428
+ 2106.mp4, 2.884615385, 3.153846154, 2.961538462
429
+ 2112.mp4, 2.607142857, 3.0, 2.964285714
430
+ 2132.mp4, 2.607142857, 3.142857143, 2.964285714
431
+ 2134.mp4, 3.035714286, 2.75, 2.964285714
432
+ 2138.mp4, 2.714285714, 3.214285714, 2.964285714
433
+ 2149.mp4, 2.75862069, 3.137931034, 2.965517241
434
+ 2164.mp4, 2.75862069, 3.275862069, 2.965517241
435
+ 2178.mp4, 2.705882353, 2.941176471, 2.970588235
436
+ 2179.mp4, 2.882352941, 2.941176471, 2.970588235
437
+ 2188.mp4, 2.628571429, 3.171428571, 2.971428571
438
+ 2190.mp4, 2.8, 3.257142857, 2.971428571
439
+ 2191.mp4, 2.571428571, 3.257142857, 2.971428571
440
+ 2199.mp4, 2.771428571, 3.571428571, 2.971428571
441
+ 2203.mp4, 2.961538462, 3.0, 3.0
442
+ 2205.mp4, 2.923076923, 3.038461538, 3.0
443
+ 2208.mp4, 2.892857143, 3.071428571, 3.0
444
+ 2213.mp4, 3.115384615, 3.076923077, 3.0
445
+ 2215.mp4, 2.821428571, 3.107142857, 3.0
446
+ 2232.mp4, 2.807692308, 3.192307692, 3.0
447
+ 2239.mp4, 2.75, 3.214285714, 3.0
448
+ 2240.mp4, 2.653846154, 3.230769231, 3.0
449
+ 2255.mp4, 2.692307692, 3.269230769, 3.0
450
+ 2264.mp4, 2.846153846, 3.346153846, 3.0
451
+ 2265.mp4, 2.730769231, 3.346153846, 3.0
452
+ 2268.mp4, 2.807692308, 3.384615385, 3.0
453
+ 2284.mp4, 2.653846154, 3.192307692, 3.0
454
+ 2291.mp4, 2.615384615, 3.346153846, 3.0
455
+ 2303.mp4, 3.0, 3.107142857, 3.0
456
+ 2306.mp4, 2.714285714, 3.357142857, 3.0
457
+ 2315.mp4, 2.827586207, 3.172413793, 3.0
458
+ 2317.mp4, 2.551724138, 3.275862069, 3.0
459
+ 2320.mp4, 2.75862069, 3.137931034, 3.0
460
+ 2326.mp4, 2.970588235, 2.911764706, 3.0
461
+ 2327.mp4, 2.558823529, 3.205882353, 3.0
462
+ 2328.mp4, 3.0, 3.117647059, 3.0
463
+ 2330.mp4, 3.029411765, 3.147058824, 3.0
464
+ 2332.mp4, 2.911764706, 3.117647059, 3.0
465
+ 2333.mp4, 3.0, 3.088235294, 3.0
466
+ 2335.mp4, 2.882352941, 3.176470588, 3.0
467
+ 2340.mp4, 2.794117647, 3.117647059, 3.0
468
+ 2343.mp4, 2.8, 3.285714286, 3.028571429
469
+ 2347.mp4, 2.764705882, 3.352941176, 3.029411765
470
+ 2359.mp4, 3.0, 2.911764706, 3.029411765
471
+ 2371.mp4, 2.793103448, 3.24137931, 3.034482759
472
+ 2376.mp4, 2.793103448, 3.034482759, 3.034482759
473
+ 2379.mp4, 2.793103448, 3.137931034, 3.034482759
474
+ 2384.mp4, 2.821428571, 3.714285714, 3.035714286
475
+ 2385.mp4, 2.821428571, 3.357142857, 3.035714286
476
+ 2389.mp4, 2.928571429, 3.357142857, 3.035714286
477
+ 2390.mp4, 3.142857143, 3.214285714, 3.035714286
478
+ 2391.mp4, 2.714285714, 3.464285714, 3.035714286
479
+ 2395.mp4, 3.142857143, 3.035714286, 3.035714286
480
+ 2396.mp4, 2.892857143, 3.071428571, 3.035714286
481
+ 2399.mp4, 2.714285714, 3.178571429, 3.035714286
482
+ 2408.mp4, 2.884615385, 3.192307692, 3.038461538
483
+ 2420.mp4, 2.884615385, 3.038461538, 3.038461538
484
+ 2428.mp4, 2.730769231, 3.192307692, 3.038461538
485
+ 2439.mp4, 2.884615385, 3.384615385, 3.038461538
486
+ 2440.mp4, 3.0, 3.384615385, 3.038461538
487
+ 2456.mp4, 2.653846154, 3.346153846, 3.038461538
488
+ 2462.mp4, 3.115384615, 3.230769231, 3.038461538
489
+ 2463.mp4, 2.961538462, 3.115384615, 3.038461538
490
+ 2466.mp4, 2.628571429, 3.171428571, 3.057142857
491
+ 2471.mp4, 2.628571429, 3.4, 3.057142857
492
+ 2473.mp4, 2.714285714, 3.714285714, 3.057142857
493
+ 2482.mp4, 2.794117647, 3.264705882, 3.058823529
494
+ 2484.mp4, 2.735294118, 3.264705882, 3.058823529
495
+ 2503.mp4, 2.965517241, 3.310344828, 3.068965517
496
+ 2505.mp4, 3.137931034, 3.068965517, 3.068965517
497
+ 2508.mp4, 2.586206897, 3.24137931, 3.068965517
498
+ 2510.mp4, 3.034482759, 3.310344828, 3.068965517
499
+ 2512.mp4, 2.928571429, 3.107142857, 3.071428571
500
+ 2514.mp4, 2.75, 3.535714286, 3.071428571
501
+ 2516.mp4, 2.75, 3.285714286, 3.071428571
502
+ 2524.mp4, 2.785714286, 3.428571429, 3.071428571
503
+ 2527.mp4, 2.892857143, 3.285714286, 3.071428571
504
+ 2532.mp4, 3.115384615, 3.115384615, 3.076923077
505
+ 2535.mp4, 2.884615385, 3.153846154, 3.076923077
506
+ 2545.mp4, 3.0, 3.346153846, 3.076923077
507
+ 2554.mp4, 2.961538462, 3.153846154, 3.076923077
508
+ 2564.mp4, 3.038461538, 3.192307692, 3.076923077
509
+ 2568.mp4, 3.076923077, 3.230769231, 3.076923077
510
+ 2576.mp4, 3.057142857, 2.971428571, 3.085714286
511
+ 2579.mp4, 3.0, 3.257142857, 3.085714286
512
+ 2587.mp4, 2.714285714, 3.542857143, 3.085714286
513
+ 2589.mp4, 2.970588235, 3.323529412, 3.088235294
514
+ 2596.mp4, 3.147058824, 3.147058824, 3.088235294
515
+ 2597.mp4, 3.029411765, 3.264705882, 3.088235294
516
+ 2598.mp4, 3.0, 3.117647059, 3.088235294
517
+ 2599.mp4, 2.794117647, 3.205882353, 3.088235294
518
+ 2604.mp4, 3.103448276, 3.137931034, 3.103448276
519
+ 2610.mp4, 2.620689655, 3.379310345, 3.103448276
520
+ 2621.mp4, 3.310344828, 3.137931034, 3.103448276
521
+ 2623.mp4, 3.034482759, 3.034482759, 3.103448276
522
+ 2624.mp4, 2.965517241, 3.103448276, 3.103448276
523
+ 2627.mp4, 3.068965517, 2.931034483, 3.103448276
524
+ 2629.mp4, 2.931034483, 3.482758621, 3.103448276
525
+ 2642.mp4, 2.821428571, 3.357142857, 3.107142857
526
+ 2650.mp4, 2.964285714, 3.25, 3.107142857
527
+ 2657.mp4, 2.828571429, 3.085714286, 3.114285714
528
+ 2660.mp4, 3.142857143, 3.171428571, 3.114285714
529
+ 2663.mp4, 2.742857143, 3.371428571, 3.114285714
530
+ 2665.mp4, 2.771428571, 3.428571429, 3.114285714
531
+ 2673.mp4, 3.038461538, 3.269230769, 3.115384615
532
+ 2679.mp4, 2.961538462, 3.384615385, 3.115384615
533
+ 2686.mp4, 3.076923077, 3.076923077, 3.115384615
534
+ 2690.mp4, 2.884615385, 3.153846154, 3.115384615
535
+ 2699.mp4, 3.076923077, 3.230769231, 3.115384615
536
+ 2716.mp4, 2.923076923, 3.192307692, 3.115384615
537
+ 2720.mp4, 3.653846154, 2.653846154, 3.115384615
538
+ 2723.mp4, 2.923076923, 3.192307692, 3.115384615
539
+ 2727.mp4, 3.076923077, 2.730769231, 3.115384615
540
+ 2730.mp4, 2.923076923, 3.307692308, 3.115384615
541
+ 2737.mp4, 3.323529412, 3.176470588, 3.117647059
542
+ 2750.mp4, 2.896551724, 3.172413793, 3.137931034
543
+ 2751.mp4, 3.068965517, 3.137931034, 3.137931034
544
+ 2754.mp4, 3.0, 3.24137931, 3.137931034
545
+ 2756.mp4, 2.827586207, 3.137931034, 3.137931034
546
+ 2772.mp4, 3.035714286, 3.285714286, 3.142857143
547
+ 2783.mp4, 3.0, 3.25, 3.142857143
548
+ 2795.mp4, 3.0, 3.535714286, 3.142857143
549
+ 2797.mp4, 2.714285714, 3.628571429, 3.142857143
550
+ 2805.mp4, 3.117647059, 3.294117647, 3.147058824
551
+ 2816.mp4, 2.923076923, 3.346153846, 3.153846154
552
+ 2824.mp4, 2.923076923, 3.5, 3.153846154
553
+ 2834.mp4, 2.846153846, 3.076923077, 3.153846154
554
+ 2836.mp4, 3.0, 3.576923077, 3.153846154
555
+ 2838.mp4, 3.192307692, 2.961538462, 3.153846154
556
+ 2845.mp4, 3.307692308, 3.153846154, 3.153846154
557
+ 2854.mp4, 2.846153846, 3.423076923, 3.153846154
558
+ 2870.mp4, 2.971428571, 3.571428571, 3.171428571
559
+ 2875.mp4, 3.275862069, 3.068965517, 3.172413793
560
+ 2877.mp4, 3.034482759, 3.448275862, 3.172413793
561
+ 2887.mp4, 3.034482759, 3.344827586, 3.172413793
562
+ 2890.mp4, 3.029411765, 3.294117647, 3.176470588
563
+ 2891.mp4, 3.058823529, 3.205882353, 3.176470588
564
+ 2894.mp4, 3.088235294, 3.235294118, 3.176470588
565
+ 2901.mp4, 3.178571429, 3.142857143, 3.178571429
566
+ 2903.mp4, 2.821428571, 3.357142857, 3.178571429
567
+ 2905.mp4, 3.035714286, 3.25, 3.178571429
568
+ 2909.mp4, 2.964285714, 3.0, 3.178571429
569
+ 2911.mp4, 2.964285714, 3.464285714, 3.178571429
570
+ 2927.mp4, 3.115384615, 3.461538462, 3.192307692
571
+ 2933.mp4, 3.038461538, 3.576923077, 3.192307692
572
+ 2937.mp4, 3.038461538, 3.192307692, 3.192307692
573
+ 2939.mp4, 3.384615385, 3.230769231, 3.192307692
574
+ 2941.mp4, 2.923076923, 3.269230769, 3.192307692
575
+ 2944.mp4, 2.923076923, 3.346153846, 3.192307692
576
+ 2957.mp4, 3.076923077, 3.538461538, 3.192307692
577
+ 2970.mp4, 2.885714286, 3.314285714, 3.2
578
+ 2971.mp4, 2.914285714, 3.4, 3.2
579
+ 2977.mp4, 3.088235294, 3.441176471, 3.205882353
580
+ 2996.mp4, 2.896551724, 3.344827586, 3.206896552
581
+ 2997.mp4, 3.0, 3.448275862, 3.206896552
582
+ 2998.mp4, 2.75862069, 3.24137931, 3.206896552
583
+ 3003.mp4, 3.0, 3.357142857, 3.214285714
584
+ 3012.mp4, 3.25, 3.321428571, 3.214285714
585
+ 3017.mp4, 3.035714286, 3.357142857, 3.214285714
586
+ 3019.mp4, 3.035714286, 3.107142857, 3.214285714
587
+ 3021.mp4, 2.714285714, 3.314285714, 3.228571429
588
+ 3023.mp4, 3.028571429, 3.457142857, 3.228571429
589
+ 3029.mp4, 3.115384615, 3.115384615, 3.230769231
590
+ 3035.mp4, 3.192307692, 3.192307692, 3.230769231
591
+ 3036.mp4, 2.923076923, 3.230769231, 3.230769231
592
+ 3047.mp4, 3.230769231, 3.346153846, 3.230769231
593
+ 3049.mp4, 3.076923077, 3.346153846, 3.230769231
594
+ 3053.mp4, 3.153846154, 3.384615385, 3.230769231
595
+ 3065.mp4, 3.192307692, 3.538461538, 3.230769231
596
+ 3069.mp4, 3.076923077, 3.269230769, 3.230769231
597
+ 3074.mp4, 2.961538462, 3.576923077, 3.230769231
598
+ 3096.mp4, 3.0, 3.615384615, 3.230769231
599
+ 3101.mp4, 3.264705882, 3.470588235, 3.235294118
600
+ 3102.mp4, 3.294117647, 3.235294118, 3.235294118
601
+ 3103.mp4, 3.235294118, 3.264705882, 3.235294118
602
+ 3118.mp4, 3.206896552, 3.172413793, 3.24137931
603
+ 3123.mp4, 3.137931034, 3.344827586, 3.24137931
604
+ 3134.mp4, 3.107142857, 3.607142857, 3.25
605
+ 3138.mp4, 3.107142857, 3.428571429, 3.25
606
+ 3154.mp4, 3.117647059, 3.264705882, 3.264705882
607
+ 3157.mp4, 3.0, 3.147058824, 3.264705882
608
+ 3165.mp4, 3.230769231, 3.346153846, 3.269230769
609
+ 3170.mp4, 2.923076923, 3.423076923, 3.269230769
610
+ 3175.mp4, 2.961538462, 3.692307692, 3.269230769
611
+ 3182.mp4, 3.269230769, 3.269230769, 3.269230769
612
+ 3183.mp4, 3.192307692, 3.269230769, 3.269230769
613
+ 3192.mp4, 2.961538462, 3.423076923, 3.269230769
614
+ 3195.mp4, 3.192307692, 3.461538462, 3.269230769
615
+ 3196.mp4, 3.230769231, 3.5, 3.269230769
616
+ 3197.mp4, 3.038461538, 3.5, 3.269230769
617
+ 3200.mp4, 3.038461538, 3.653846154, 3.269230769
618
+ 3203.mp4, 3.038461538, 3.653846154, 3.269230769
619
+ 3204.mp4, 3.307692308, 3.115384615, 3.269230769
620
+ 3212.mp4, 3.192307692, 3.230769231, 3.269230769
621
+ 3213.mp4, 3.115384615, 3.307692308, 3.269230769
622
+ 3216.mp4, 3.034482759, 3.551724138, 3.275862069
623
+ 3221.mp4, 3.137931034, 3.344827586, 3.275862069
624
+ 3223.mp4, 3.137931034, 3.344827586, 3.275862069
625
+ 3228.mp4, 3.034482759, 3.517241379, 3.275862069
626
+ 3252.mp4, 3.057142857, 3.514285714, 3.285714286
627
+ 3253.mp4, 3.0, 3.542857143, 3.285714286
628
+ 3254.mp4, 2.942857143, 3.571428571, 3.285714286
629
+ 3257.mp4, 2.571428571, 3.742857143, 3.285714286
630
+ 3260.mp4, 3.176470588, 3.352941176, 3.294117647
631
+ 3262.mp4, 3.205882353, 3.411764706, 3.294117647
632
+ 3263.mp4, 3.205882353, 3.294117647, 3.294117647
633
+ 3270.mp4, 3.076923077, 3.307692308, 3.307692308
634
+ 3281.mp4, 3.115384615, 3.461538462, 3.307692308
635
+ 3287.mp4, 3.230769231, 3.538461538, 3.307692308
636
+ 3290.mp4, 2.961538462, 3.538461538, 3.307692308
637
+ 3291.mp4, 3.038461538, 3.576923077, 3.307692308
638
+ 3295.mp4, 3.5, 3.076923077, 3.307692308
639
+ 3301.mp4, 3.115384615, 3.576923077, 3.307692308
640
+ 3305.mp4, 3.307692308, 3.576923077, 3.307692308
641
+ 3313.mp4, 3.192307692, 3.5, 3.307692308
642
+ 3315.mp4, 3.230769231, 3.576923077, 3.307692308
643
+ 3317.mp4, 3.153846154, 3.692307692, 3.307692308
644
+ 3320.mp4, 3.103448276, 3.275862069, 3.310344828
645
+ 3327.mp4, 3.206896552, 3.24137931, 3.310344828
646
+ 3328.mp4, 3.068965517, 3.586206897, 3.310344828
647
+ 3336.mp4, 3.257142857, 3.342857143, 3.314285714
648
+ 3339.mp4, 3.171428571, 3.657142857, 3.314285714
649
+ 3340.mp4, 3.357142857, 3.321428571, 3.321428571
650
+ 3341.mp4, 3.214285714, 3.535714286, 3.321428571
651
+ 3343.mp4, 3.178571429, 3.392857143, 3.321428571
652
+ 3347.mp4, 3.178571429, 3.464285714, 3.321428571
653
+ 3348.mp4, 3.25, 3.357142857, 3.321428571
654
+ 3350.mp4, 3.178571429, 3.5, 3.321428571
655
+ 3351.mp4, 3.178571429, 3.428571429, 3.321428571
656
+ 3359.mp4, 3.382352941, 3.205882353, 3.323529412
657
+ 3362.mp4, 3.323529412, 3.147058824, 3.323529412
658
+ 3365.mp4, 3.085714286, 3.514285714, 3.342857143
659
+ 3383.mp4, 3.24137931, 3.413793103, 3.344827586
660
+ 3389.mp4, 3.379310345, 3.517241379, 3.344827586
661
+ 3392.mp4, 3.461538462, 3.307692308, 3.346153846
662
+ 3393.mp4, 3.230769231, 3.384615385, 3.346153846
663
+ 3396.mp4, 3.0, 3.461538462, 3.346153846
664
+ 3399.mp4, 3.423076923, 3.5, 3.346153846
665
+ 3404.mp4, 2.961538462, 3.576923077, 3.346153846
666
+ 3406.mp4, 3.307692308, 3.307692308, 3.346153846
667
+ 3409.mp4, 3.384615385, 3.346153846, 3.346153846
668
+ 3422.mp4, 3.192307692, 3.653846154, 3.346153846
669
+ 3427.mp4, 3.576923077, 3.0, 3.346153846
670
+ 3441.mp4, 3.384615385, 3.307692308, 3.346153846
671
+ 3447.mp4, 2.923076923, 3.576923077, 3.346153846
672
+ 3448.mp4, 3.264705882, 3.352941176, 3.352941176
673
+ 3455.mp4, 3.382352941, 3.411764706, 3.352941176
674
+ 3468.mp4, 3.071428571, 3.571428571, 3.357142857
675
+ 3473.mp4, 3.214285714, 3.464285714, 3.357142857
676
+ 3476.mp4, 3.142857143, 3.607142857, 3.357142857
677
+ 3481.mp4, 3.514285714, 3.514285714, 3.371428571
678
+ 3490.mp4, 3.310344828, 3.517241379, 3.379310345
679
+ 3493.mp4, 3.24137931, 3.379310345, 3.379310345
680
+ 3497.mp4, 3.275862069, 3.413793103, 3.379310345
681
+ 3498.mp4, 3.068965517, 3.310344828, 3.379310345
682
+ 3514.mp4, 3.352941176, 3.382352941, 3.382352941
683
+ 3519.mp4, 3.230769231, 3.307692308, 3.384615385
684
+ 3522.mp4, 3.230769231, 3.384615385, 3.384615385
685
+ 3530.mp4, 3.153846154, 3.5, 3.384615385
686
+ 3537.mp4, 3.192307692, 3.615384615, 3.384615385
687
+ 3542.mp4, 3.269230769, 3.346153846, 3.384615385
688
+ 3555.mp4, 3.230769231, 3.461538462, 3.384615385
689
+ 3556.mp4, 3.076923077, 3.5, 3.384615385
690
+ 3558.mp4, 3.269230769, 3.538461538, 3.384615385
691
+ 3570.mp4, 3.457142857, 3.171428571, 3.4
692
+ 3579.mp4, 3.411764706, 3.411764706, 3.411764706
693
+ 3580.mp4, 3.352941176, 3.5, 3.411764706
694
+ 3581.mp4, 3.470588235, 3.323529412, 3.411764706
695
+ 3582.mp4, 3.352941176, 3.617647059, 3.411764706
696
+ 3583.mp4, 3.323529412, 3.529411765, 3.411764706
697
+ 3593.mp4, 3.310344828, 3.586206897, 3.413793103
698
+ 3594.mp4, 3.482758621, 3.517241379, 3.413793103
699
+ 3595.mp4, 3.206896552, 3.75862069, 3.413793103
700
+ 3608.mp4, 3.068965517, 3.551724138, 3.413793103
701
+ 3610.mp4, 3.310344828, 3.586206897, 3.413793103
702
+ 3614.mp4, 3.192307692, 3.5, 3.423076923
703
+ 3620.mp4, 3.269230769, 3.615384615, 3.423076923
704
+ 3625.mp4, 3.307692308, 3.307692308, 3.423076923
705
+ 3636.mp4, 3.230769231, 3.5, 3.423076923
706
+ 3638.mp4, 3.307692308, 3.5, 3.423076923
707
+ 3642.mp4, 3.269230769, 3.576923077, 3.423076923
708
+ 3650.mp4, 3.230769231, 3.730769231, 3.423076923
709
+ 3655.mp4, 3.307692308, 3.653846154, 3.423076923
710
+ 3656.mp4, 3.346153846, 3.346153846, 3.423076923
711
+ 3658.mp4, 3.307692308, 3.538461538, 3.423076923
712
+ 3664.mp4, 3.5, 3.285714286, 3.428571429
713
+ 3676.mp4, 3.171428571, 3.714285714, 3.428571429
714
+ 3680.mp4, 3.382352941, 3.647058824, 3.441176471
715
+ 3686.mp4, 3.235294118, 3.470588235, 3.441176471
716
+ 3695.mp4, 3.310344828, 3.586206897, 3.448275862
717
+ 3696.mp4, 3.275862069, 3.586206897, 3.448275862
718
+ 3701.mp4, 3.551724138, 3.448275862, 3.448275862
719
+ 3704.mp4, 3.344827586, 3.448275862, 3.448275862
720
+ 3708.mp4, 2.942857143, 3.4, 3.457142857
721
+ 3710.mp4, 3.423076923, 3.423076923, 3.461538462
722
+ 3712.mp4, 3.269230769, 3.461538462, 3.461538462
723
+ 3717.mp4, 3.384615385, 3.576923077, 3.461538462
724
+ 3718.mp4, 3.346153846, 3.576923077, 3.461538462
725
+ 3720.mp4, 3.423076923, 3.615384615, 3.461538462
726
+ 3721.mp4, 3.153846154, 3.615384615, 3.461538462
727
+ 3722.mp4, 3.307692308, 3.615384615, 3.461538462
728
+ 3728.mp4, 3.384615385, 3.5, 3.461538462
729
+ 3729.mp4, 3.5, 3.346153846, 3.461538462
730
+ 3732.mp4, 3.076923077, 3.538461538, 3.461538462
731
+ 3738.mp4, 3.076923077, 3.884615385, 3.461538462
732
+ 3740.mp4, 3.346153846, 3.384615385, 3.461538462
733
+ 3742.mp4, 3.384615385, 3.461538462, 3.461538462
734
+ 3759.mp4, 3.5, 3.5, 3.464285714
735
+ 3761.mp4, 3.285714286, 3.678571429, 3.464285714
736
+ 3762.mp4, 3.323529412, 3.529411765, 3.470588235
737
+ 3764.mp4, 3.441176471, 3.735294118, 3.470588235
738
+ 3765.mp4, 3.382352941, 3.441176471, 3.470588235
739
+ 3767.mp4, 3.5, 3.441176471, 3.470588235
740
+ 3771.mp4, 3.482758621, 3.551724138, 3.482758621
741
+ 3772.mp4, 3.413793103, 3.586206897, 3.482758621
742
+ 3781.mp4, 3.413793103, 3.413793103, 3.482758621
743
+ 3783.mp4, 3.413793103, 3.344827586, 3.482758621
744
+ 3785.mp4, 3.448275862, 3.275862069, 3.482758621
745
+ 3790.mp4, 3.230769231, 3.307692308, 3.5
746
+ 3792.mp4, 3.384615385, 3.384615385, 3.5
747
+ 3795.mp4, 3.615384615, 3.423076923, 3.5
748
+ 3797.mp4, 3.423076923, 3.423076923, 3.5
749
+ 3799.mp4, 3.230769231, 3.461538462, 3.5
750
+ 3814.mp4, 3.192307692, 3.615384615, 3.5
751
+ 3815.mp4, 3.384615385, 3.615384615, 3.5
752
+ 3818.mp4, 3.5, 3.615384615, 3.5
753
+ 3821.mp4, 3.107142857, 3.678571429, 3.5
754
+ 3826.mp4, 3.346153846, 3.807692308, 3.5
755
+ 3829.mp4, 3.730769231, 3.423076923, 3.5
756
+ 3836.mp4, 3.307692308, 3.307692308, 3.5
757
+ 3838.mp4, 3.321428571, 3.571428571, 3.5
758
+ 3839.mp4, 3.25, 3.821428571, 3.5
759
+ 3845.mp4, 3.529411765, 3.382352941, 3.5
760
+ 3860.mp4, 3.413793103, 3.482758621, 3.517241379
761
+ 3866.mp4, 3.482758621, 3.586206897, 3.517241379
762
+ 3871.mp4, 3.588235294, 3.558823529, 3.529411765
763
+ 3878.mp4, 3.464285714, 3.392857143, 3.535714286
764
+ 3880.mp4, 3.307692308, 3.538461538, 3.538461538
765
+ 3883.mp4, 3.538461538, 3.538461538, 3.538461538
766
+ 3884.mp4, 3.384615385, 3.576923077, 3.538461538
767
+ 3885.mp4, 3.384615385, 3.576923077, 3.538461538
768
+ 3886.mp4, 3.307692308, 3.615384615, 3.538461538
769
+ 3888.mp4, 3.384615385, 3.653846154, 3.538461538
770
+ 3895.mp4, 3.576923077, 3.461538462, 3.538461538
771
+ 3904.mp4, 3.461538462, 3.653846154, 3.538461538
772
+ 3908.mp4, 3.346153846, 3.730769231, 3.538461538
773
+ 3909.mp4, 3.461538462, 3.769230769, 3.538461538
774
+ 3918.mp4, 3.342857143, 3.542857143, 3.542857143
775
+ 3925.mp4, 3.482758621, 3.620689655, 3.551724138
776
+ 3928.mp4, 3.344827586, 3.827586207, 3.551724138
777
+ 3930.mp4, 3.413793103, 3.551724138, 3.551724138
778
+ 3948.mp4, 3.285714286, 3.857142857, 3.571428571
779
+ 3951.mp4, 3.428571429, 3.607142857, 3.571428571
780
+ 3952.mp4, 3.428571429, 3.678571429, 3.571428571
781
+ 3954.mp4, 3.5, 3.538461538, 3.576923077
782
+ 3957.mp4, 3.461538462, 3.615384615, 3.576923077
783
+ 3959.mp4, 3.5, 3.615384615, 3.576923077
784
+ 3963.mp4, 3.730769231, 3.692307692, 3.576923077
785
+ 3967.mp4, 3.538461538, 3.884615385, 3.576923077
786
+ 3971.mp4, 3.5, 3.730769231, 3.576923077
787
+ 3977.mp4, 3.192307692, 3.423076923, 3.576923077
788
+ 3991.mp4, 3.551724138, 3.551724138, 3.586206897
789
+ 3992.mp4, 3.379310345, 3.827586207, 3.586206897
790
+ 3995.mp4, 3.586206897, 3.517241379, 3.586206897
791
+ 4009.mp4, 3.285714286, 3.571428571, 3.6
792
+ 4018.mp4, 3.571428571, 3.642857143, 3.607142857
793
+ 4020.mp4, 3.576923077, 3.576923077, 3.615384615
794
+ 4022.mp4, 3.346153846, 3.615384615, 3.615384615
795
+ 4026.mp4, 3.615384615, 3.730769231, 3.615384615
796
+ 4031.mp4, 3.384615385, 3.5, 3.615384615
797
+ 4035.mp4, 3.346153846, 3.653846154, 3.615384615
798
+ 4037.mp4, 3.461538462, 3.653846154, 3.615384615
799
+ 4054.mp4, 3.275862069, 3.655172414, 3.620689655
800
+ 4057.mp4, 3.517241379, 3.620689655, 3.620689655
801
+ 4063.mp4, 3.551724138, 3.620689655, 3.620689655
802
+ 4064.mp4, 3.413793103, 3.448275862, 3.620689655
803
+ 4069.mp4, 3.571428571, 3.742857143, 3.628571429
804
+ 4072.mp4, 3.678571429, 3.642857143, 3.642857143
805
+ 4073.mp4, 3.714285714, 3.75, 3.642857143
806
+ 4078.mp4, 3.588235294, 3.588235294, 3.647058824
807
+ 4084.mp4, 3.576923077, 3.653846154, 3.653846154
808
+ 4087.mp4, 3.576923077, 3.692307692, 3.653846154
809
+ 4091.mp4, 3.615384615, 3.769230769, 3.653846154
810
+ 4098.mp4, 3.5, 3.615384615, 3.653846154
811
+ 4103.mp4, 3.576923077, 3.615384615, 3.653846154
812
+ 4105.mp4, 3.923076923, 3.692307692, 3.653846154
813
+ 4115.mp4, 3.551724138, 3.896551724, 3.655172414
814
+ 4120.mp4, 3.448275862, 3.793103448, 3.655172414
815
+ 4123.mp4, 3.344827586, 3.827586207, 3.655172414
816
+ 4125.mp4, 3.724137931, 3.586206897, 3.655172414
817
+ 4131.mp4, 3.535714286, 3.892857143, 3.678571429
818
+ 4132.mp4, 3.714285714, 3.964285714, 3.678571429
819
+ 4133.mp4, 3.464285714, 3.75, 3.678571429
820
+ 4142.mp4, 3.551724138, 3.655172414, 3.689655172
821
+ 4148.mp4, 3.517241379, 3.724137931, 3.689655172
822
+ 4149.mp4, 3.586206897, 3.862068966, 3.689655172
823
+ 4151.mp4, 3.653846154, 3.615384615, 3.692307692
824
+ 4152.mp4, 3.307692308, 3.692307692, 3.692307692
825
+ 4154.mp4, 3.538461538, 3.807692308, 3.692307692
826
+ 4156.mp4, 3.5, 3.846153846, 3.692307692
827
+ 4183.mp4, 3.371428571, 3.771428571, 3.714285714
828
+ 4187.mp4, 3.457142857, 3.914285714, 3.714285714
829
+ 4218.mp4, 3.692307692, 3.884615385, 3.730769231
830
+ 4222.mp4, 3.4, 3.857142857, 3.742857143
831
+ 4224.mp4, 3.678571429, 3.607142857, 3.75
832
+ 4228.mp4, 3.642857143, 3.75, 3.75
833
+ 4230.mp4, 3.655172414, 3.75862069, 3.75862069
834
+ 4231.mp4, 3.75862069, 3.793103448, 3.75862069
835
+ 4234.mp4, 3.586206897, 3.965517241, 3.75862069
836
+ 4237.mp4, 3.676470588, 3.794117647, 3.764705882
837
+ 4243.mp4, 3.576923077, 3.846153846, 3.769230769
838
+ 4244.mp4, 3.538461538, 3.846153846, 3.769230769
839
+ 4245.mp4, 3.576923077, 3.884615385, 3.769230769
840
+ 4254.mp4, 3.692307692, 3.846153846, 3.769230769
841
+ 4265.mp4, 3.807692308, 3.846153846, 3.769230769
842
+ 4269.mp4, 3.457142857, 4.0, 3.771428571
843
+ 4274.mp4, 3.642857143, 3.964285714, 3.785714286
844
+ 4281.mp4, 3.793103448, 4.0, 3.793103448
845
+ 4284.mp4, 3.793103448, 3.862068966, 3.793103448
846
+ 4288.mp4, 3.911764706, 3.764705882, 3.794117647
847
+ 4290.mp4, 3.485714286, 3.942857143, 3.8
848
+ 4294.mp4, 3.730769231, 3.769230769, 3.807692308
849
+ 4303.mp4, 3.730769231, 3.769230769, 3.807692308
850
+ 4311.mp4, 3.620689655, 3.827586207, 3.827586207
851
+ 4312.mp4, 3.896551724, 3.620689655, 3.827586207
852
+ 4314.mp4, 3.862068966, 3.724137931, 3.827586207
853
+ 4315.mp4, 3.655172414, 3.896551724, 3.827586207
854
+ 4316.mp4, 3.714285714, 3.571428571, 3.828571429
855
+ 4318.mp4, 3.885714286, 4.085714286, 3.828571429
856
+ 4333.mp4, 3.642857143, 3.892857143, 3.857142857
857
+ 4338.mp4, 3.642857143, 3.928571429, 3.857142857
858
+ 4343.mp4, 3.793103448, 3.724137931, 3.862068966
859
+ 4353.mp4, 3.730769231, 3.884615385, 3.884615385
860
+ 4356.mp4, 3.769230769, 4.0, 3.884615385
861
+ 4361.mp4, 3.896551724, 3.896551724, 3.896551724
862
+ 4362.mp4, 3.823529412, 3.852941176, 3.911764706
863
+ 4364.mp4, 3.882352941, 3.882352941, 3.911764706
864
+ 4370.mp4, 4.0, 3.846153846, 3.923076923
865
+ 4377.mp4, 3.961538462, 4.0, 3.923076923
866
+ 4380.mp4, 3.961538462, 3.961538462, 3.923076923
867
+ 4382.mp4, 3.75, 3.892857143, 3.928571429
868
+ 4386.mp4, 3.896551724, 3.931034483, 3.931034483
869
+ 4388.mp4, 3.793103448, 3.862068966, 3.931034483
870
+ 4389.mp4, 3.896551724, 3.827586207, 3.931034483
871
+ 4391.mp4, 3.735294118, 3.970588235, 3.941176471
872
+ 4393.mp4, 3.923076923, 4.0, 3.961538462
873
+ 4412.mp4, 3.961538462, 3.961538462, 4.0
874
+ 4413.mp4, 4.0, 3.961538462, 4.0
875
+ 4414.mp4, 3.714285714, 3.964285714, 4.0
876
+ 4418.mp4, 3.923076923, 4.038461538, 4.0
877
+ 4420.mp4, 3.892857143, 4.071428571, 4.0
878
+ 4421.mp4, 3.923076923, 4.115384615, 4.0
879
+ 4423.mp4, 3.961538462, 4.0, 4.0
880
+ 4426.mp4, 3.724137931, 3.896551724, 4.0
881
+ 4427.mp4, 4.068965517, 3.965517241, 4.0
882
+ 4433.mp4, 4.0, 4.034482759, 4.034482759
883
+ 4437.mp4, 4.035714286, 4.071428571, 4.035714286
884
+ 4438.mp4, 4.038461538, 4.115384615, 4.038461538
885
+ 4439.mp4, 3.961538462, 4.0, 4.038461538
886
+ 4448.mp4, 4.107142857, 4.142857143, 4.071428571
887
+ 4454.mp4, 4.115384615, 4.115384615, 4.076923077
888
+ 4455.mp4, 4.076923077, 4.307692308, 4.076923077
889
+ 4461.mp4, 4.034482759, 3.896551724, 4.103448276
890
+ 4462.mp4, 3.862068966, 4.137931034, 4.103448276
891
+ 4463.mp4, 3.785714286, 4.142857143, 4.107142857
892
+ 4469.mp4, 4.076923077, 4.038461538, 4.115384615
893
+ 4470.mp4, 3.884615385, 4.076923077, 4.115384615
894
+ 4474.mp4, 4.076923077, 4.230769231, 4.115384615
895
+ 4477.mp4, 4.103448276, 4.206896552, 4.137931034
896
+ 4482.mp4, 4.0, 3.961538462, 4.153846154
897
+ 4484.mp4, 4.307692308, 4.153846154, 4.153846154
898
+ 4497.mp4, 4.172413793, 4.172413793, 4.206896552
899
+ 4504.mp4, 4.071428571, 4.428571429, 4.25
900
+ 4517.mp4, 4.068965517, 4.379310345, 4.310344828
901
+ 4521.mp4, 4.230769231, 4.307692308, 4.346153846
902
+ 4523.mp4, 4.269230769, 4.461538462, 4.346153846
903
+ 4524.mp4, 4.142857143, 4.5, 4.357142857
904
+ 4526.mp4, 4.24137931, 4.517241379, 4.379310345
905
+ 4527.mp4, 4.307692308, 4.384615385, 4.384615385
906
+ 4535.mp4, 4.314285714, 4.4, 4.428571429
907
+ 4536.mp4, 4.171428571, 4.4, 4.457142857
908
+ 4538.mp4, 4.428571429, 4.428571429, 4.464285714
909
+ 4541.mp4, 4.4, 4.514285714, 4.685714286
examplar_data_labels/KoNViD/labels.txt ADDED
@@ -0,0 +1,1200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ KoNViD_1k_videos/4542323058.mp4, 8.008, 29.97002997002997, 3.22
2
+ KoNViD_1k_videos/9753414792.mp4, 8.008008, 29.97, 3.84
3
+ KoNViD_1k_videos/6935410837.mp4, 8.0, 25.0, 3.24
4
+ KoNViD_1k_videos/8171831850.mp4, 8.008, 29.97002997002997, 4.444029851
5
+ KoNViD_1k_videos/11465976586.mp4, 8.008008, 29.97, 3.94
6
+ KoNViD_1k_videos/4323977167.mp4, 8.008, 29.97002997002997, 3.26
7
+ KoNViD_1k_videos/7005478889.mp4, 8.008, 23.976023976023978, 2.86
8
+ KoNViD_1k_videos/6346026937.mp4, 8.008, 23.976023976023978, 2.14
9
+ KoNViD_1k_videos/10672253555.mp4, 8.008008, 29.97, 1.614785992
10
+ KoNViD_1k_videos/6812062788.mp4, 8.008, 29.97002997002997, 2.52
11
+ KoNViD_1k_videos/3930579113.mp4, 8.008, 29.97002997002997, 2.86
12
+ KoNViD_1k_videos/6981897920.mp4, 8.008, 29.97002997002997, 3.3
13
+ KoNViD_1k_videos/8957212602.mp4, 8.008008, 29.97, 3.56
14
+ KoNViD_1k_videos/4771991539.mp4, 8.0, 24.0, 3.0
15
+ KoNViD_1k_videos/8420230247.mp4, 8.008, 29.97002997002997, 3.96
16
+ KoNViD_1k_videos/4161940753.mp4, 8.0, 24.0, 3.66
17
+ KoNViD_1k_videos/8408744905.mp4, 8.008, 29.97002997002997, 3.5
18
+ KoNViD_1k_videos/13291575674.mp4, 8.008, 29.97002997002997, 2.66
19
+ KoNViD_1k_videos/6538025379.mp4, 8.008, 29.97002997002997, 3.8
20
+ KoNViD_1k_videos/7344072960.mp4, 8.0, 24.0, 2.769874477
21
+ KoNViD_1k_videos/9969637164.mp4, 8.008008, 29.97, 3.42
22
+ KoNViD_1k_videos/8688311915.mp4, 8.008, 29.97002997002997, 2.98
23
+ KoNViD_1k_videos/6929069239.mp4, 8.008, 29.97002997002997, 2.58
24
+ KoNViD_1k_videos/9248029519.mp4, 8.008008, 29.97, 2.7
25
+ KoNViD_1k_videos/3617102785.mp4, 8.008, 29.97002997002997, 3.08
26
+ KoNViD_1k_videos/9189459433.mp4, 8.0, 25.0, 2.22
27
+ KoNViD_1k_videos/3359662128.mp4, 8.008, 23.976023976023978, 3.08
28
+ KoNViD_1k_videos/6408325533.mp4, 8.008, 29.97002997002997, 1.26
29
+ KoNViD_1k_videos/5310216885.mp4, 8.008, 29.97002997002997, 2.24
30
+ KoNViD_1k_videos/9455400456.mp4, 8.0, 24.0, 2.22
31
+ KoNViD_1k_videos/7820483368.mp4, 8.008, 29.97002997002997, 2.76
32
+ KoNViD_1k_videos/4713503488.mp4, 8.008, 23.976023976023978, 3.88
33
+ KoNViD_1k_videos/6024650797.mp4, 8.008, 29.97002997002997, 2.54
34
+ KoNViD_1k_videos/8548046708.mp4, 8.008, 23.976023976023978, 3.36
35
+ KoNViD_1k_videos/8078127133.mp4, 8.008, 29.97002997002997, 2.618867925
36
+ KoNViD_1k_videos/5490599661.mp4, 8.008, 29.97002997002997, 2.88
37
+ KoNViD_1k_videos/6749780445.mp4, 8.008, 29.97002997002997, 2.02
38
+ KoNViD_1k_videos/5433352304.mp4, 8.008, 29.97002997002997, 2.22
39
+ KoNViD_1k_videos/12262452854.mp4, 8.0, 24.0, 4.24
40
+ KoNViD_1k_videos/4299787720.mp4, 8.008, 29.97002997002997, 2.849802372
41
+ KoNViD_1k_videos/4453069241.mp4, 8.008, 29.97002997002997, 2.92
42
+ KoNViD_1k_videos/4915461487.mp4, 8.0, 24.0, 2.24
43
+ KoNViD_1k_videos/8565847545.mp4, 8.008, 29.97002997002997, 3.08
44
+ KoNViD_1k_videos/7012065923.mp4, 8.008, 29.97002997002997, 3.48
45
+ KoNViD_1k_videos/4970919913.mp4, 8.0, 24.0, 2.32
46
+ KoNViD_1k_videos/5424989067.mp4, 8.008, 29.97002997002997, 2.4
47
+ KoNViD_1k_videos/3318634083.mp4, 8.008, 29.97002997002997, 2.3
48
+ KoNViD_1k_videos/8317937019.mp4, 8.008, 29.97002997002997, 2.48
49
+ KoNViD_1k_videos/8667534666.mp4, 8.008, 29.97002997002997, 4.02
50
+ KoNViD_1k_videos/6060761361.mp4, 8.008, 29.97002997002997, 3.06
51
+ KoNViD_1k_videos/4161747134.mp4, 8.008, 29.97002997002997, 3.48
52
+ KoNViD_1k_videos/8937776831.mp4, 8.008008, 29.97, 3.9
53
+ KoNViD_1k_videos/11237101334.mp4, 8.0, 24.0, 2.26
54
+ KoNViD_1k_videos/8182593817.mp4, 8.008, 29.97002997002997, 2.8
55
+ KoNViD_1k_videos/11403577283.mp4, 8.008, 23.976023976023978, 3.06
56
+ KoNViD_1k_videos/4482595368.mp4, 8.008, 29.97002997002997, 3.86
57
+ KoNViD_1k_videos/5564650233.mp4, 8.008, 29.97002997002997, 3.484251969
58
+ KoNViD_1k_videos/4626131441.mp4, 8.008, 23.976023976023978, 2.68
59
+ KoNViD_1k_videos/6100104774.mp4, 8.008, 29.97002997002997, 3.16
60
+ KoNViD_1k_videos/8088901520.mp4, 8.008, 29.97002997002997, 3.68
61
+ KoNViD_1k_videos/4557456762.mp4, 8.008, 29.97002997002997, 3.24
62
+ KoNViD_1k_videos/5622498540.mp4, 8.008, 29.97002997002997, 3.0
63
+ KoNViD_1k_videos/9086621277.mp4, 8.0, 24.0, 3.28
64
+ KoNViD_1k_videos/8097209231.mp4, 8.008, 29.97002997002997, 3.4
65
+ KoNViD_1k_videos/5263204150.mp4, 8.008, 29.97002997002997, 3.64
66
+ KoNViD_1k_videos/3407992311.mp4, 8.008, 29.97002997002997, 4.08
67
+ KoNViD_1k_videos/4472493392.mp4, 8.008, 23.976023976023978, 2.38
68
+ KoNViD_1k_videos/6350328925.mp4, 8.008, 23.976023976023978, 3.4
69
+ KoNViD_1k_videos/5512609909.mp4, 8.008, 29.97002997002997, 3.3
70
+ KoNViD_1k_videos/6047579133.mp4, 8.008, 29.97002997002997, 3.12
71
+ KoNViD_1k_videos/8592011515.mp4, 8.0, 24.0, 2.0
72
+ KoNViD_1k_videos/7834396986.mp4, 8.008, 29.97002997002997, 3.3
73
+ KoNViD_1k_videos/8554748075.mp4, 8.008, 23.976023976023978, 2.089068826
74
+ KoNViD_1k_videos/6073473868.mp4, 8.0, 24.0, 2.62
75
+ KoNViD_1k_videos/3887621073.mp4, 8.008, 29.97002997002997, 1.58
76
+ KoNViD_1k_videos/6874423527.mp4, 8.0, 24.0, 2.82
77
+ KoNViD_1k_videos/7123482247.mp4, 8.008, 23.976023976023978, 3.258064516
78
+ KoNViD_1k_videos/6008700446.mp4, 8.008, 29.97002997002997, 2.0
79
+ KoNViD_1k_videos/12816380173.mp4, 8.008, 23.976023976023978, 3.28
80
+ KoNViD_1k_videos/3504083604.mp4, 8.008, 29.97002997002997, 2.38
81
+ KoNViD_1k_videos/5962679589.mp4, 8.008, 29.97002997002997, 1.86
82
+ KoNViD_1k_videos/8236900278.mp4, 8.0, 24.0, 3.36
83
+ KoNViD_1k_videos/8909265380.mp4, 8.008008, 29.97, 2.78
84
+ KoNViD_1k_videos/3015973424.mp4, 8.008, 23.976023976023978, 3.1
85
+ KoNViD_1k_videos/11646665363.mp4, 8.008, 29.97002997002997, 3.7
86
+ KoNViD_1k_videos/8151400613.mp4, 8.008, 29.97002997002997, 2.98
87
+ KoNViD_1k_videos/6830500351.mp4, 8.008, 29.97002997002997, 2.94
88
+ KoNViD_1k_videos/6680559525.mp4, 8.008, 29.97002997002997, 3.3
89
+ KoNViD_1k_videos/5858864133.mp4, 8.008, 29.97002997002997, 2.48
90
+ KoNViD_1k_videos/7175971942.mp4, 8.008, 29.97002997002997, 2.32
91
+ KoNViD_1k_videos/3664548641.mp4, 8.0, 24.0, 3.121568627
92
+ KoNViD_1k_videos/10208239954.mp4, 8.008008, 29.97, 3.12
93
+ KoNViD_1k_videos/8773297527.mp4, 8.008008, 29.97, 3.72
94
+ KoNViD_1k_videos/8915047035.mp4, 8.008008, 29.97, 3.68
95
+ KoNViD_1k_videos/10602272313.mp4, 8.008008, 29.97, 3.1
96
+ KoNViD_1k_videos/5621374631.mp4, 8.008, 29.97002997002997, 2.52
97
+ KoNViD_1k_videos/9732912473.mp4, 8.008008, 29.97, 3.42
98
+ KoNViD_1k_videos/4991520511.mp4, 8.008, 29.97002997002997, 1.98
99
+ KoNViD_1k_videos/6811275923.mp4, 8.008, 23.976023976023978, 3.36
100
+ KoNViD_1k_videos/5169144673.mp4, 8.008, 29.97002997002997, 2.98
101
+ KoNViD_1k_videos/5790468384.mp4, 8.0, 24.0, 2.36
102
+ KoNViD_1k_videos/6571028091.mp4, 8.008, 29.97002997002997, 3.16
103
+ KoNViD_1k_videos/5920289173.mp4, 8.008, 29.97002997002997, 3.44
104
+ KoNViD_1k_videos/5032172516.mp4, 8.008, 29.97002997002997, 3.48
105
+ KoNViD_1k_videos/3715429501.mp4, 8.008, 29.97002997002997, 2.9
106
+ KoNViD_1k_videos/4970624827.mp4, 8.0, 25.0, 1.66
107
+ KoNViD_1k_videos/7521145978.mp4, 8.008, 29.97002997002997, 3.44
108
+ KoNViD_1k_videos/5825661459.mp4, 8.008, 29.97002997002997, 3.32
109
+ KoNViD_1k_videos/8479364057.mp4, 8.008, 23.976023976023978, 2.74
110
+ KoNViD_1k_videos/8148981356.mp4, 8.008, 29.97002997002997, 3.16
111
+ KoNViD_1k_videos/12567402335.mp4, 8.008008, 29.97, 1.8
112
+ KoNViD_1k_videos/8589300397.mp4, 8.008, 29.97002997002997, 3.6
113
+ KoNViD_1k_videos/10500520233.mp4, 8.008008, 29.97, 2.86
114
+ KoNViD_1k_videos/3934294391.mp4, 8.008, 29.97002997002997, 2.26
115
+ KoNViD_1k_videos/8387302863.mp4, 8.008, 29.97002997002997, 3.3
116
+ KoNViD_1k_videos/7402519316.mp4, 8.008, 29.97002997002997, 2.98
117
+ KoNViD_1k_videos/7651248048.mp4, 8.0, 24.0, 2.08
118
+ KoNViD_1k_videos/4005506137.mp4, 8.008, 23.976023976023978, 2.24
119
+ KoNViD_1k_videos/3459410764.mp4, 8.008, 29.97002997002997, 3.692946058
120
+ KoNViD_1k_videos/8039356416.mp4, 8.008, 29.97002997002997, 2.46
121
+ KoNViD_1k_videos/5698463243.mp4, 8.0, 25.0, 2.96
122
+ KoNViD_1k_videos/6865667129.mp4, 8.008, 29.97002997002997, 3.3
123
+ KoNViD_1k_videos/4956991827.mp4, 8.008, 29.97002997002997, 2.98
124
+ KoNViD_1k_videos/4879585643.mp4, 8.008, 29.97002997002997, 1.84
125
+ KoNViD_1k_videos/8267321210.mp4, 8.008, 29.97002997002997, 3.42
126
+ KoNViD_1k_videos/8240363350.mp4, 8.008, 29.97002997002997, 3.22
127
+ KoNViD_1k_videos/5590704113.mp4, 8.008, 29.97002997002997, 3.12
128
+ KoNViD_1k_videos/5045897151.mp4, 8.008, 29.97002997002997, 2.24
129
+ KoNViD_1k_videos/6178882861.mp4, 8.008, 29.97002997002997, 2.76
130
+ KoNViD_1k_videos/7068941889.mp4, 8.008, 29.97002997002997, 2.8
131
+ KoNViD_1k_videos/3459287580.mp4, 8.008, 23.976023976023978, 3.5
132
+ KoNViD_1k_videos/4593126957.mp4, 8.008, 29.97002997002997, 3.1
133
+ KoNViD_1k_videos/8562619627.mp4, 8.0, 25.0, 4.26
134
+ KoNViD_1k_videos/13036222674.mp4, 8.008008, 29.97, 3.56
135
+ KoNViD_1k_videos/11540616946.mp4, 8.008008, 29.97, 4.08
136
+ KoNViD_1k_videos/4979493912.mp4, 8.008, 29.97002997002997, 3.14
137
+ KoNViD_1k_videos/12859019173.mp4, 8.008, 29.97002997002997, 3.72
138
+ KoNViD_1k_videos/5222599467.mp4, 8.008, 29.97002997002997, 2.6
139
+ KoNViD_1k_videos/6809728071.mp4, 8.008, 29.97002997002997, 3.52
140
+ KoNViD_1k_videos/5075205526.mp4, 8.0, 24.0, 3.18
141
+ KoNViD_1k_videos/6783810490.mp4, 8.008, 29.97002997002997, 1.88
142
+ KoNViD_1k_videos/8230249635.mp4, 8.008, 29.97002997002997, 2.66
143
+ KoNViD_1k_videos/8092884510.mp4, 8.0, 24.0, 2.42
144
+ KoNViD_1k_videos/8625682550.mp4, 8.0, 24.0, 1.84
145
+ KoNViD_1k_videos/8717647202.mp4, 8.0, 25.0, 3.64
146
+ KoNViD_1k_videos/3337642103.mp4, 8.008, 29.97002997002997, 3.308641975
147
+ KoNViD_1k_videos/8931854960.mp4, 8.008008, 29.97, 3.7
148
+ KoNViD_1k_videos/9459232387.mp4, 8.008008, 29.97, 3.04
149
+ KoNViD_1k_videos/4866009712.mp4, 8.008, 29.97002997002997, 2.42
150
+ KoNViD_1k_videos/8679156964.mp4, 8.0, 25.0, 3.8
151
+ KoNViD_1k_videos/8372557828.mp4, 8.008, 29.97002997002997, 2.08
152
+ KoNViD_1k_videos/8591330227.mp4, 8.0, 25.0, 2.36
153
+ KoNViD_1k_videos/6173852301.mp4, 8.008, 29.97002997002997, 2.2
154
+ KoNViD_1k_videos/5620430600.mp4, 8.008, 29.97002997002997, 2.5
155
+ KoNViD_1k_videos/12412571563.mp4, 8.008008, 29.97, 3.5
156
+ KoNViD_1k_videos/8689455432.mp4, 8.008, 29.97002997002997, 2.68
157
+ KoNViD_1k_videos/9427595515.mp4, 8.008008, 29.97, 2.06
158
+ KoNViD_1k_videos/5379891328.mp4, 8.008, 29.97002997002997, 3.5
159
+ KoNViD_1k_videos/5596033251.mp4, 8.008, 23.976023976023978, 3.66
160
+ KoNViD_1k_videos/9387089233.mp4, 8.008008, 29.97, 2.68
161
+ KoNViD_1k_videos/7547384258.mp4, 8.008, 29.97002997002997, 2.44
162
+ KoNViD_1k_videos/7708735292.mp4, 8.0, 24.0, 2.46
163
+ KoNViD_1k_videos/3533765114.mp4, 8.0, 24.0, 2.28
164
+ KoNViD_1k_videos/5280283886.mp4, 8.008, 29.97002997002997, 3.86
165
+ KoNViD_1k_videos/10655599784.mp4, 8.008008, 29.97, 3.26
166
+ KoNViD_1k_videos/3321308714.mp4, 8.008, 23.976023976023978, 3.04
167
+ KoNViD_1k_videos/6953979425.mp4, 8.008, 29.97002997002997, 3.02
168
+ KoNViD_1k_videos/4095112467.mp4, 8.008, 29.97002997002997, 2.4
169
+ KoNViD_1k_videos/9521557660.mp4, 8.0, 24.0, 3.6
170
+ KoNViD_1k_videos/5111411754.mp4, 8.008, 29.97002997002997, 2.68
171
+ KoNViD_1k_videos/5072411175.mp4, 8.0, 24.0, 2.5
172
+ KoNViD_1k_videos/3589925544.mp4, 8.0, 24.0, 3.16
173
+ KoNViD_1k_videos/7123683275.mp4, 8.0, 24.0, 2.06
174
+ KoNViD_1k_videos/7074513729.mp4, 8.008, 29.97002997002997, 1.56
175
+ KoNViD_1k_videos/12465909365.mp4, 8.008008, 29.97, 3.88
176
+ KoNViD_1k_videos/3240926995.mp4, 8.008, 29.97002997002997, 3.1484375
177
+ KoNViD_1k_videos/12952440214.mp4, 8.008, 29.97002997002997, 2.5
178
+ KoNViD_1k_videos/7236880666.mp4, 8.008, 29.97002997002997, 3.5
179
+ KoNViD_1k_videos/9144368303.mp4, 8.008008, 23.976, 2.9
180
+ KoNViD_1k_videos/10864372176.mp4, 8.008, 29.97002997002997, 3.14
181
+ KoNViD_1k_videos/7171297739.mp4, 8.008, 23.976023976023978, 2.06
182
+ KoNViD_1k_videos/3561516939.mp4, 8.008, 29.97002997002997, 2.44
183
+ KoNViD_1k_videos/4820762619.mp4, 8.008, 29.97002997002997, 2.16
184
+ KoNViD_1k_videos/6868110990.mp4, 8.008, 29.97002997002997, 3.18
185
+ KoNViD_1k_videos/7173586391.mp4, 8.008, 23.976023976023978, 2.74
186
+ KoNViD_1k_videos/4391946048.mp4, 7.5742329999999995, 29.97002997002997, 3.3
187
+ KoNViD_1k_videos/7274180028.mp4, 8.008, 23.976023976023978, 3.76
188
+ KoNViD_1k_videos/6062892973.mp4, 8.008, 29.97002997002997, 2.86
189
+ KoNViD_1k_videos/5149271412.mp4, 8.008, 29.97002997002997, 2.82
190
+ KoNViD_1k_videos/7117529219.mp4, 8.008, 29.97002997002997, 2.86
191
+ KoNViD_1k_videos/6937597827.mp4, 8.008, 29.97002997002997, 3.28
192
+ KoNViD_1k_videos/12355531285.mp4, 8.008008, 29.97, 4.22
193
+ KoNViD_1k_videos/5266562897.mp4, 8.008, 29.97002997002997, 2.56
194
+ KoNViD_1k_videos/5921571324.mp4, 8.008, 29.97002997002997, 3.24
195
+ KoNViD_1k_videos/8478531549.mp4, 8.008, 29.97002997002997, 2.3
196
+ KoNViD_1k_videos/6422437813.mp4, 8.008, 29.97002997002997, 2.46
197
+ KoNViD_1k_videos/5215773911.mp4, 8.008, 29.97002997002997, 3.54
198
+ KoNViD_1k_videos/13276560083.mp4, 8.008, 29.97002997002997, 3.2
199
+ KoNViD_1k_videos/6806699713.mp4, 8.008, 29.97002997002997, 3.76
200
+ KoNViD_1k_videos/8257892715.mp4, 8.008, 29.97002997002997, 1.98
201
+ KoNViD_1k_videos/7879889838.mp4, 8.008, 29.97002997002997, 2.76
202
+ KoNViD_1k_videos/5218188823.mp4, 8.0, 24.0, 3.0
203
+ KoNViD_1k_videos/5044148124.mp4, 8.008, 29.97002997002997, 2.18
204
+ KoNViD_1k_videos/5917982116.mp4, 8.008, 29.97002997002997, 3.84
205
+ KoNViD_1k_videos/5143368697.mp4, 8.008, 29.97002997002997, 3.1
206
+ KoNViD_1k_videos/7390590300.mp4, 8.008, 23.976023976023978, 2.28
207
+ KoNViD_1k_videos/6866970306.mp4, 8.008, 29.97002997002997, 3.32
208
+ KoNViD_1k_videos/5026138416.mp4, 8.008, 29.97002997002997, 3.22
209
+ KoNViD_1k_videos/5268797415.mp4, 8.0, 24.0, 2.44
210
+ KoNViD_1k_videos/12965735905.mp4, 8.008008, 29.97, 3.18
211
+ KoNViD_1k_videos/4800261002.mp4, 8.008, 29.97002997002997, 3.98
212
+ KoNViD_1k_videos/9513714022.mp4, 8.008008, 29.97, 3.8
213
+ KoNViD_1k_videos/7495255120.mp4, 8.008, 23.976023976023978, 2.42
214
+ KoNViD_1k_videos/5223525504.mp4, 8.008, 29.97002997002997, 3.08
215
+ KoNViD_1k_videos/10127250986.mp4, 8.008008, 29.97, 2.3
216
+ KoNViD_1k_videos/6971905222.mp4, 8.008, 29.97002997002997, 2.6
217
+ KoNViD_1k_videos/6250048696.mp4, 8.008, 29.97002997002997, 3.08
218
+ KoNViD_1k_videos/12044525214.mp4, 8.008008, 29.97, 3.0
219
+ KoNViD_1k_videos/6181812275.mp4, 8.008, 29.97002997002997, 3.06
220
+ KoNViD_1k_videos/10716218856.mp4, 8.008008, 23.976, 2.06
221
+ KoNViD_1k_videos/6724048499.mp4, 8.008, 29.97002997002997, 2.56
222
+ KoNViD_1k_videos/12379836133.mp4, 8.0, 25.0, 4.24
223
+ KoNViD_1k_videos/4877128768.mp4, 8.008, 29.97002997002997, 3.42
224
+ KoNViD_1k_videos/10609570083.mp4, 8.008008, 29.97, 3.1
225
+ KoNViD_1k_videos/5319047612.mp4, 8.008, 29.97002997002997, 1.346863469
226
+ KoNViD_1k_videos/9000209228.mp4, 8.0, 25.0, 2.1
227
+ KoNViD_1k_videos/12923454185.mp4, 8.008, 29.97002997002997, 2.88
228
+ KoNViD_1k_videos/6868246166.mp4, 8.0, 24.0, 2.68
229
+ KoNViD_1k_videos/4397381332.mp4, 8.008, 29.97002997002997, 3.56
230
+ KoNViD_1k_videos/7381539666.mp4, 8.008, 29.97002997002997, 2.86
231
+ KoNViD_1k_videos/4971091773.mp4, 8.0, 24.0, 2.9
232
+ KoNViD_1k_videos/4655662252.mp4, 8.008, 29.97002997002997, 3.94
233
+ KoNViD_1k_videos/5947649803.mp4, 8.008, 29.97002997002997, 3.42
234
+ KoNViD_1k_videos/4375496636.mp4, 8.008, 29.97002997002997, 3.6
235
+ KoNViD_1k_videos/4980230630.mp4, 8.008, 29.97002997002997, 3.72
236
+ KoNViD_1k_videos/8140107150.mp4, 8.0, 24.0, 2.4
237
+ KoNViD_1k_videos/8017059368.mp4, 8.008, 29.97002997002997, 1.907172996
238
+ KoNViD_1k_videos/5118641893.mp4, 8.0, 24.0, 2.76
239
+ KoNViD_1k_videos/7313643370.mp4, 8.008, 29.97002997002997, 3.68
240
+ KoNViD_1k_videos/5542380796.mp4, 7.708333, 24.0, 1.72
241
+ KoNViD_1k_videos/4565232312.mp4, 8.008, 29.97002997002997, 2.64
242
+ KoNViD_1k_videos/4573266245.mp4, 8.008, 29.97002997002997, 2.58
243
+ KoNViD_1k_videos/4959664675.mp4, 8.0, 24.0, 2.06
244
+ KoNViD_1k_videos/6370967061.mp4, 8.0, 24.0, 2.2
245
+ KoNViD_1k_videos/7954124226.mp4, 8.008, 29.97002997002997, 3.8
246
+ KoNViD_1k_videos/6096480589.mp4, 8.008, 29.97002997002997, 2.4
247
+ KoNViD_1k_videos/8069024908.mp4, 8.008, 29.97002997002997, 2.12
248
+ KoNViD_1k_videos/6971190423.mp4, 8.008, 29.97002997002997, 3.82
249
+ KoNViD_1k_videos/4477789338.mp4, 8.0, 24.0, 2.72
250
+ KoNViD_1k_videos/9258419063.mp4, 8.0, 25.0, 3.74
251
+ KoNViD_1k_videos/8657921837.mp4, 8.0, 24.0, 4.3
252
+ KoNViD_1k_videos/8477990214.mp4, 8.0, 24.0, 2.1
253
+ KoNViD_1k_videos/7978973307.mp4, 8.008, 29.97002997002997, 1.78
254
+ KoNViD_1k_videos/13079468475.mp4, 8.008008, 29.97, 3.68
255
+ KoNViD_1k_videos/5694745216.mp4, 8.0, 24.0, 2.66
256
+ KoNViD_1k_videos/8495275628.mp4, 8.008, 29.97002997002997, 3.48
257
+ KoNViD_1k_videos/9765162404.mp4, 8.008008, 29.97, 2.08
258
+ KoNViD_1k_videos/4642629203.mp4, 8.008, 23.976023976023978, 3.52
259
+ KoNViD_1k_videos/10270536464.mp4, 8.008008, 29.97, 3.42
260
+ KoNViD_1k_videos/4914377539.mp4, 8.008, 29.97002997002997, 2.2
261
+ KoNViD_1k_videos/5493602442.mp4, 8.0, 24.0, 2.5
262
+ KoNViD_1k_videos/5737374844.mp4, 8.0, 24.0, 3.0
263
+ KoNViD_1k_videos/8072548302.mp4, 8.008, 29.97002997002997, 3.02
264
+ KoNViD_1k_videos/3357588876.mp4, 8.0, 25.0, 2.96
265
+ KoNViD_1k_videos/6932519097.mp4, 8.008, 29.97002997002997, 3.12
266
+ KoNViD_1k_videos/4615948555.mp4, 8.008, 23.976023976023978, 3.02
267
+ KoNViD_1k_videos/7146539585.mp4, 8.008, 29.97002997002997, 3.4
268
+ KoNViD_1k_videos/5322533709.mp4, 8.008, 29.97002997002997, 2.76
269
+ KoNViD_1k_videos/5823644068.mp4, 8.0, 24.0, 2.9
270
+ KoNViD_1k_videos/12758594775.mp4, 8.008, 29.97002997002997, 2.94
271
+ KoNViD_1k_videos/7246583832.mp4, 8.0, 24.0, 2.86
272
+ KoNViD_1k_videos/5995204968.mp4, 8.008, 29.97002997002997, 3.28
273
+ KoNViD_1k_videos/6313200205.mp4, 8.008, 29.97002997002997, 3.78
274
+ KoNViD_1k_videos/4966446054.mp4, 8.0, 24.0, 2.26
275
+ KoNViD_1k_videos/9740517228.mp4, 8.008008, 23.976, 3.8
276
+ KoNViD_1k_videos/3863745355.mp4, 8.008, 23.976023976023978, 4.02
277
+ KoNViD_1k_videos/5902098420.mp4, 8.008, 29.97002997002997, 3.4
278
+ KoNViD_1k_videos/8483400755.mp4, 8.008, 29.97002997002997, 1.46
279
+ KoNViD_1k_videos/5579073175.mp4, 8.0, 25.0, 3.32
280
+ KoNViD_1k_videos/5756899543.mp4, 8.008, 29.97002997002997, 3.7
281
+ KoNViD_1k_videos/5831253770.mp4, 8.008, 29.97002997002997, 3.84
282
+ KoNViD_1k_videos/6492110563.mp4, 8.008, 29.97002997002997, 3.92
283
+ KoNViD_1k_videos/9709640314.mp4, 8.008008, 29.97, 4.02
284
+ KoNViD_1k_videos/5263261013.mp4, 8.008, 29.97002997002997, 2.74
285
+ KoNViD_1k_videos/6812860936.mp4, 8.008, 29.97002997002997, 2.88
286
+ KoNViD_1k_videos/8223463984.mp4, 8.008, 29.97002997002997, 2.94
287
+ KoNViD_1k_videos/4779773181.mp4, 8.0, 25.0, 4.14
288
+ KoNViD_1k_videos/12816958564.mp4, 8.0, 25.0, 2.84
289
+ KoNViD_1k_videos/5583994811.mp4, 8.008, 29.97002997002997, 3.529166667
290
+ KoNViD_1k_videos/4866613201.mp4, 8.008, 29.97002997002997, 2.2
291
+ KoNViD_1k_videos/4729578673.mp4, 8.008, 29.97002997002997, 3.28
292
+ KoNViD_1k_videos/4260295122.mp4, 8.008, 29.97002997002997, 3.32
293
+ KoNViD_1k_videos/4620516469.mp4, 8.008, 23.976023976023978, 3.52
294
+ KoNViD_1k_videos/7700672030.mp4, 8.008, 29.97002997002997, 3.3
295
+ KoNViD_1k_videos/8281263977.mp4, 8.008, 29.97002997002997, 3.14
296
+ KoNViD_1k_videos/9525693928.mp4, 8.008008, 29.97, 3.86
297
+ KoNViD_1k_videos/5858312093.mp4, 8.008, 29.97002997002997, 2.4
298
+ KoNViD_1k_videos/4136131472.mp4, 8.0, 24.0, 3.2
299
+ KoNViD_1k_videos/10521676086.mp4, 8.008008, 29.97, 2.88
300
+ KoNViD_1k_videos/5031142078.mp4, 8.0, 24.0, 2.2
301
+ KoNViD_1k_videos/12626612584.mp4, 8.008008, 29.97, 3.1
302
+ KoNViD_1k_videos/8069868772.mp4, 8.008, 29.97002997002997, 3.32
303
+ KoNViD_1k_videos/5816132223.mp4, 8.008, 29.97002997002997, 3.58
304
+ KoNViD_1k_videos/5052341075.mp4, 8.008, 23.976023976023978, 3.48
305
+ KoNViD_1k_videos/4029116553.mp4, 8.008, 23.976023976023978, 3.82
306
+ KoNViD_1k_videos/6894958520.mp4, 8.008, 29.97002997002997, 3.24
307
+ KoNViD_1k_videos/3471567822.mp4, 8.008, 29.97002997002997, 1.52
308
+ KoNViD_1k_videos/11920895013.mp4, 8.008, 29.97002997002997, 3.42
309
+ KoNViD_1k_videos/8491617830.mp4, 8.008, 23.976023976023978, 3.5
310
+ KoNViD_1k_videos/6948249567.mp4, 8.008, 29.97002997002997, 3.1
311
+ KoNViD_1k_videos/7832506570.mp4, 8.008, 29.97002997002997, 3.92
312
+ KoNViD_1k_videos/10541657773.mp4, 8.0, 24.0, 3.74
313
+ KoNViD_1k_videos/3860380907.mp4, 8.008, 29.97002997002997, 3.6
314
+ KoNViD_1k_videos/8286421427.mp4, 8.0, 25.0, 4.14
315
+ KoNViD_1k_videos/10694139655.mp4, 8.008008, 23.976, 1.82
316
+ KoNViD_1k_videos/8480588816.mp4, 8.008, 29.97002997002997, 3.74
317
+ KoNViD_1k_videos/9089204842.mp4, 8.008008, 23.976, 3.02
318
+ KoNViD_1k_videos/5412442677.mp4, 8.008, 29.97002997002997, 2.3
319
+ KoNViD_1k_videos/4465669054.mp4, 8.0, 24.0, 2.92
320
+ KoNViD_1k_videos/6059786344.mp4, 8.008, 29.97002997002997, 2.66
321
+ KoNViD_1k_videos/13286356624.mp4, 8.008, 29.97002997002997, 3.6
322
+ KoNViD_1k_videos/4842303128.mp4, 8.008, 29.97002997002997, 3.22
323
+ KoNViD_1k_videos/8455309721.mp4, 8.008, 29.97002997002997, 3.42
324
+ KoNViD_1k_videos/12020933633.mp4, 8.0, 25.0, 3.36
325
+ KoNViD_1k_videos/6149591212.mp4, 8.008, 29.97002997002997, 2.14
326
+ KoNViD_1k_videos/3710887216.mp4, 8.008, 29.97002997002997, 2.92
327
+ KoNViD_1k_videos/7385692814.mp4, 8.008, 29.97002997002997, 3.92
328
+ KoNViD_1k_videos/7341359052.mp4, 8.008, 29.97002997002997, 3.16
329
+ KoNViD_1k_videos/6913535960.mp4, 8.0, 25.0, 4.106299213
330
+ KoNViD_1k_videos/3722785424.mp4, 8.008, 29.97002997002997, 3.571428571
331
+ KoNViD_1k_videos/7029270875.mp4, 8.008, 23.976023976023978, 3.94
332
+ KoNViD_1k_videos/5115193359.mp4, 8.008, 29.97002997002997, 3.08
333
+ KoNViD_1k_videos/5115335471.mp4, 8.008, 29.97002997002997, 2.16
334
+ KoNViD_1k_videos/6324422954.mp4, 8.008, 29.97002997002997, 2.76
335
+ KoNViD_1k_videos/5029525594.mp4, 8.008, 29.97002997002997, 3.6
336
+ KoNViD_1k_videos/9060241382.mp4, 8.0, 25.0, 4.04
337
+ KoNViD_1k_videos/5266893553.mp4, 8.008, 29.97002997002997, 1.74
338
+ KoNViD_1k_videos/6864942042.mp4, 8.0, 24.0, 2.72
339
+ KoNViD_1k_videos/4867899444.mp4, 8.008, 29.97002997002997, 3.08
340
+ KoNViD_1k_videos/8005708131.mp4, 8.008, 29.97002997002997, 3.06
341
+ KoNViD_1k_videos/7556474634.mp4, 8.008, 29.97002997002997, 2.94
342
+ KoNViD_1k_videos/9480678073.mp4, 8.008008, 29.97, 4.04
343
+ KoNViD_1k_videos/8728910927.mp4, 8.0, 24.0, 2.78
344
+ KoNViD_1k_videos/4803433208.mp4, 8.008, 29.97002997002997, 3.823529412
345
+ KoNViD_1k_videos/10013374164.mp4, 8.008008, 29.97, 3.74
346
+ KoNViD_1k_videos/9095814754.mp4, 8.008008, 23.976, 3.8
347
+ KoNViD_1k_videos/5769119862.mp4, 8.008, 29.97002997002997, 3.36
348
+ KoNViD_1k_videos/9536307250.mp4, 8.008008, 29.97, 3.16
349
+ KoNViD_1k_videos/4067736860.mp4, 8.008, 29.97002997002997, 3.16
350
+ KoNViD_1k_videos/5901642116.mp4, 8.008, 29.97002997002997, 2.74
351
+ KoNViD_1k_videos/5418971855.mp4, 8.008, 29.97002997002997, 2.68
352
+ KoNViD_1k_videos/6946901845.mp4, 8.008, 29.97002997002997, 3.46
353
+ KoNViD_1k_videos/5475555707.mp4, 8.008, 29.97002997002997, 1.74
354
+ KoNViD_1k_videos/6275674780.mp4, 8.0, 24.0, 1.82
355
+ KoNViD_1k_videos/8631895364.mp4, 8.008, 29.97002997002997, 3.66
356
+ KoNViD_1k_videos/11864752706.mp4, 8.008, 29.97002997002997, 3.9
357
+ KoNViD_1k_videos/6208528782.mp4, 8.008, 29.97002997002997, 2.66
358
+ KoNViD_1k_videos/6280406572.mp4, 8.0, 24.0, 2.62
359
+ KoNViD_1k_videos/6469679863.mp4, 8.008, 29.97002997002997, 2.4
360
+ KoNViD_1k_videos/5379210063.mp4, 8.008, 29.97002997002997, 2.84
361
+ KoNViD_1k_videos/3469529939.mp4, 8.008, 29.97002997002997, 2.8
362
+ KoNViD_1k_videos/7897815844.mp4, 8.008, 29.97002997002997, 2.96
363
+ KoNViD_1k_videos/6900620920.mp4, 8.0, 24.0, 4.04
364
+ KoNViD_1k_videos/6120978816.mp4, 8.0, 24.0, 2.04
365
+ KoNViD_1k_videos/6340576433.mp4, 8.008, 29.97002997002997, 3.46
366
+ KoNViD_1k_videos/5369095478.mp4, 8.008, 29.97002997002997, 2.6
367
+ KoNViD_1k_videos/5246827285.mp4, 8.0, 24.0, 3.16
368
+ KoNViD_1k_videos/9814654176.mp4, 8.008008, 29.97, 1.94
369
+ KoNViD_1k_videos/7585780552.mp4, 8.008, 29.97002997002997, 3.52
370
+ KoNViD_1k_videos/10925288616.mp4, 8.0, 24.0, 1.46
371
+ KoNViD_1k_videos/10249998364.mp4, 7.799466, 23.976, 3.4
372
+ KoNViD_1k_videos/6300501644.mp4, 8.0, 24.0, 3.4
373
+ KoNViD_1k_videos/5688399126.mp4, 8.008, 29.97002997002997, 3.78
374
+ KoNViD_1k_videos/4472730048.mp4, 8.008, 29.97002997002997, 1.622406639
375
+ KoNViD_1k_videos/12461764004.mp4, 8.008, 23.976023976023978, 2.16
376
+ KoNViD_1k_videos/9812197286.mp4, 8.008008, 29.97, 2.88
377
+ KoNViD_1k_videos/9633321480.mp4, 8.0, 24.0, 4.08
378
+ KoNViD_1k_videos/5633788395.mp4, 8.008, 29.97002997002997, 3.04
379
+ KoNViD_1k_videos/5611452609.mp4, 8.008, 29.97002997002997, 3.38
380
+ KoNViD_1k_videos/8355599976.mp4, 8.008, 23.976023976023978, 3.62
381
+ KoNViD_1k_videos/8991258489.mp4, 8.0, 24.0, 3.86
382
+ KoNViD_1k_videos/9633125766.mp4, 8.008008, 29.97, 3.42
383
+ KoNViD_1k_videos/6242642418.mp4, 8.008, 29.97002997002997, 3.398373984
384
+ KoNViD_1k_videos/8304230962.mp4, 8.0, 24.0, 2.32
385
+ KoNViD_1k_videos/5250183455.mp4, 8.008, 29.97002997002997, 3.1
386
+ KoNViD_1k_videos/8731237129.mp4, 8.0, 24.0, 2.48
387
+ KoNViD_1k_videos/10016324794.mp4, 8.008008, 23.976, 2.96
388
+ KoNViD_1k_videos/6028497716.mp4, 8.0, 24.0, 1.48
389
+ KoNViD_1k_videos/4996677282.mp4, 8.008, 29.97002997002997, 2.56
390
+ KoNViD_1k_videos/4265470174.mp4, 8.008, 29.97002997002997, 1.56
391
+ KoNViD_1k_videos/10411421606.mp4, 8.008008, 29.97, 2.52
392
+ KoNViD_1k_videos/9195826668.mp4, 8.008008, 29.97, 3.16
393
+ KoNViD_1k_videos/10314276135.mp4, 8.008008, 29.97, 3.22
394
+ KoNViD_1k_videos/5974403907.mp4, 8.008, 23.976023976023978, 3.1
395
+ KoNViD_1k_videos/4837816091.mp4, 8.008, 23.976023976023978, 3.54
396
+ KoNViD_1k_videos/11913590004.mp4, 8.0, 24.0, 2.96
397
+ KoNViD_1k_videos/5489207006.mp4, 8.008, 29.97002997002997, 4.04
398
+ KoNViD_1k_videos/8741017589.mp4, 8.008008, 29.97, 3.676470588
399
+ KoNViD_1k_videos/4211514567.mp4, 8.0, 24.0, 2.74
400
+ KoNViD_1k_videos/3516162818.mp4, 8.008, 29.97002997002997, 3.6
401
+ KoNViD_1k_videos/5206708965.mp4, 8.0, 24.0, 3.28
402
+ KoNViD_1k_videos/10115791113.mp4, 8.008008, 29.97, 3.68
403
+ KoNViD_1k_videos/4283455837.mp4, 8.008, 29.97002997002997, 3.68
404
+ KoNViD_1k_videos/6184163826.mp4, 8.008, 29.97002997002997, 1.52
405
+ KoNViD_1k_videos/9564154386.mp4, 8.008008, 29.97, 2.1
406
+ KoNViD_1k_videos/8415984918.mp4, 8.008, 23.976023976023978, 2.38
407
+ KoNViD_1k_videos/7616950966.mp4, 8.008, 29.97002997002997, 2.66
408
+ KoNViD_1k_videos/11367153336.mp4, 8.008, 29.97002997002997, 2.7
409
+ KoNViD_1k_videos/7396603710.mp4, 8.008, 29.97002997002997, 3.64
410
+ KoNViD_1k_videos/8955950667.mp4, 8.008008, 29.97, 3.66
411
+ KoNViD_1k_videos/7066660513.mp4, 8.008, 29.97002997002997, 3.8
412
+ KoNViD_1k_videos/5307981340.mp4, 8.008, 29.97002997002997, 3.52
413
+ KoNViD_1k_videos/8182527128.mp4, 8.008, 29.97002997002997, 3.96
414
+ KoNViD_1k_videos/5329960963.mp4, 8.0, 24.0, 3.04
415
+ KoNViD_1k_videos/5564209652.mp4, 8.008, 29.97002997002997, 2.46
416
+ KoNViD_1k_videos/4088281204.mp4, 8.008, 29.97002997002997, 3.43776824
417
+ KoNViD_1k_videos/3744752110.mp4, 8.0, 24.0, 2.48
418
+ KoNViD_1k_videos/5988574543.mp4, 8.008, 29.97002997002997, 2.9
419
+ KoNViD_1k_videos/8587927758.mp4, 8.0, 24.0, 2.14
420
+ KoNViD_1k_videos/6021943274.mp4, 8.008, 23.976023976023978, 3.14
421
+ KoNViD_1k_videos/4328729913.mp4, 8.008, 29.97002997002997, 3.42
422
+ KoNViD_1k_videos/8118947664.mp4, 8.008, 29.97002997002997, 3.76
423
+ KoNViD_1k_videos/7517202292.mp4, 8.008, 29.97002997002997, 2.38
424
+ KoNViD_1k_videos/5266882983.mp4, 8.008, 29.97002997002997, 2.3
425
+ KoNViD_1k_videos/5318369391.mp4, 8.008, 29.97002997002997, 3.3
426
+ KoNViD_1k_videos/8729448612.mp4, 8.008008, 29.97, 2.7
427
+ KoNViD_1k_videos/4495254692.mp4, 7.966291999999999, 23.976023976023978, 3.9
428
+ KoNViD_1k_videos/7192436890.mp4, 8.008, 29.97002997002997, 2.78
429
+ KoNViD_1k_videos/6027407703.mp4, 8.008, 29.97002997002997, 3.68
430
+ KoNViD_1k_videos/6294328198.mp4, 8.008, 29.97002997002997, 2.52
431
+ KoNViD_1k_videos/5956265529.mp4, 8.008, 29.97002997002997, 2.04
432
+ KoNViD_1k_videos/4783174196.mp4, 8.008, 29.97002997002997, 2.7
433
+ KoNViD_1k_videos/8403451337.mp4, 8.008, 29.97002997002997, 2.78
434
+ KoNViD_1k_videos/6621826551.mp4, 8.008, 29.97002997002997, 3.42
435
+ KoNViD_1k_videos/6238991743.mp4, 8.008, 29.97002997002997, 1.68
436
+ KoNViD_1k_videos/9515797648.mp4, 8.008008, 29.97, 3.78
437
+ KoNViD_1k_videos/8152764264.mp4, 8.008, 29.97002997002997, 3.14
438
+ KoNViD_1k_videos/5455582932.mp4, 8.008, 29.97002997002997, 3.352272727
439
+ KoNViD_1k_videos/5079146602.mp4, 8.0, 24.0, 3.5
440
+ KoNViD_1k_videos/3975461732.mp4, 8.008, 29.97002997002997, 3.72
441
+ KoNViD_1k_videos/4870064087.mp4, 8.008, 29.97002997002997, 2.96
442
+ KoNViD_1k_videos/10838546955.mp4, 8.008008, 29.97, 3.0
443
+ KoNViD_1k_videos/4204485800.mp4, 8.008, 29.97002997002997, 1.72972973
444
+ KoNViD_1k_videos/3431305644.mp4, 8.0, 24.0, 3.06
445
+ KoNViD_1k_videos/9830655414.mp4, 8.008008, 29.97, 3.56
446
+ KoNViD_1k_videos/10019485755.mp4, 8.008008, 29.97, 3.7
447
+ KoNViD_1k_videos/5279319629.mp4, 8.008, 29.97002997002997, 3.88
448
+ KoNViD_1k_videos/5363205423.mp4, 8.0, 24.0, 3.36
449
+ KoNViD_1k_videos/6497857295.mp4, 8.008, 29.97002997002997, 2.38
450
+ KoNViD_1k_videos/12893008605.mp4, 8.008008, 29.97, 3.55465587
451
+ KoNViD_1k_videos/6924551254.mp4, 8.008, 29.97002997002997, 2.72
452
+ KoNViD_1k_videos/4281880002.mp4, 8.0, 24.0, 3.2
453
+ KoNViD_1k_videos/5295005655.mp4, 8.0, 24.0, 2.72
454
+ KoNViD_1k_videos/5506184490.mp4, 8.008, 29.97002997002997, 2.04
455
+ KoNViD_1k_videos/10340094005.mp4, 8.008008, 29.97, 3.32
456
+ KoNViD_1k_videos/12788630734.mp4, 8.008, 29.97002997002997, 3.26
457
+ KoNViD_1k_videos/4279268661.mp4, 8.008, 29.97002997002997, 3.86
458
+ KoNViD_1k_videos/6279406877.mp4, 8.008, 29.97002997002997, 3.5
459
+ KoNViD_1k_videos/8253260683.mp4, 8.0, 24.0, 3.08
460
+ KoNViD_1k_videos/10625504535.mp4, 8.008008, 29.97, 2.1
461
+ KoNViD_1k_videos/5754220740.mp4, 8.0, 24.0, 2.74
462
+ KoNViD_1k_videos/5360605734.mp4, 8.008, 29.97002997002997, 3.84
463
+ KoNViD_1k_videos/10931366544.mp4, 8.008, 23.976023976023978, 3.64
464
+ KoNViD_1k_videos/7520968610.mp4, 8.008, 29.97002997002997, 3.1
465
+ KoNViD_1k_videos/8459254855.mp4, 8.008, 29.97002997002997, 2.74
466
+ KoNViD_1k_videos/9369433766.mp4, 7.5492159999999995, 23.976, 1.52
467
+ KoNViD_1k_videos/6838572151.mp4, 8.0, 24.0, 2.6
468
+ KoNViD_1k_videos/5419510404.mp4, 8.0, 25.0, 2.26
469
+ KoNViD_1k_videos/8685321568.mp4, 7.5416669999999995, 24.0, 2.18
470
+ KoNViD_1k_videos/8281880253.mp4, 8.0, 24.0, 2.4
471
+ KoNViD_1k_videos/7358630018.mp4, 8.008, 23.976023976023978, 3.6
472
+ KoNViD_1k_videos/7622034108.mp4, 8.008, 29.97002997002997, 3.44
473
+ KoNViD_1k_videos/8643275693.mp4, 8.008, 29.97002997002997, 3.22
474
+ KoNViD_1k_videos/3263720175.mp4, 8.008, 23.976023976023978, 3.04
475
+ KoNViD_1k_videos/6223028206.mp4, 8.008, 29.97002997002997, 1.68
476
+ KoNViD_1k_videos/8536919744.mp4, 8.0, 24.0, 1.22
477
+ KoNViD_1k_videos/6291796040.mp4, 8.008, 29.97002997002997, 3.3
478
+ KoNViD_1k_videos/12032110574.mp4, 8.0, 25.0, 2.64
479
+ KoNViD_1k_videos/7481217984.mp4, 8.008, 29.97002997002997, 3.2
480
+ KoNViD_1k_videos/8052505414.mp4, 8.008, 29.97002997002997, 3.92
481
+ KoNViD_1k_videos/9908050493.mp4, 7.540874, 29.97, 2.96
482
+ KoNViD_1k_videos/5409101281.mp4, 8.008, 29.97002997002997, 2.54
483
+ KoNViD_1k_videos/7812985864.mp4, 8.008, 29.97002997002997, 2.76
484
+ KoNViD_1k_videos/11086166076.mp4, 8.008008, 29.97, 3.28
485
+ KoNViD_1k_videos/4823416929.mp4, 8.008, 29.97002997002997, 1.98
486
+ KoNViD_1k_videos/6408210773.mp4, 8.008, 23.976023976023978, 2.92
487
+ KoNViD_1k_videos/5678628993.mp4, 8.008, 29.97002997002997, 3.567226891
488
+ KoNViD_1k_videos/5777975441.mp4, 8.008, 29.97002997002997, 2.3
489
+ KoNViD_1k_videos/5392736832.mp4, 8.0, 24.0, 2.42
490
+ KoNViD_1k_videos/12279842993.mp4, 8.008008, 29.97, 3.72
491
+ KoNViD_1k_videos/3629928686.mp4, 8.008, 29.97002997002997, 3.48
492
+ KoNViD_1k_videos/8717841410.mp4, 8.008, 29.97002997002997, 3.22
493
+ KoNViD_1k_videos/5660712402.mp4, 8.008, 29.97002997002997, 3.82
494
+ KoNViD_1k_videos/3825035703.mp4, 8.008, 29.97002997002997, 3.44
495
+ KoNViD_1k_videos/8965205906.mp4, 8.008008, 29.97, 3.6
496
+ KoNViD_1k_videos/8156543755.mp4, 8.008, 29.97002997002997, 3.14
497
+ KoNViD_1k_videos/7541374876.mp4, 8.008, 29.97002997002997, 3.38
498
+ KoNViD_1k_videos/7938991482.mp4, 8.008, 29.97002997002997, 2.52
499
+ KoNViD_1k_videos/4905336825.mp4, 8.008, 29.97002997002997, 1.828125
500
+ KoNViD_1k_videos/8921577578.mp4, 8.008008, 29.97, 3.48
501
+ KoNViD_1k_videos/6518466029.mp4, 8.0, 24.0, 3.02
502
+ KoNViD_1k_videos/6677269327.mp4, 8.008, 29.97002997002997, 3.72
503
+ KoNViD_1k_videos/7453163014.mp4, 8.0, 24.0, 3.02
504
+ KoNViD_1k_videos/5705527370.mp4, 8.008, 29.97002997002997, 3.74
505
+ KoNViD_1k_videos/9464591256.mp4, 8.008008, 23.976, 3.98
506
+ KoNViD_1k_videos/11013388203.mp4, 8.008, 23.976023976023978, 3.76
507
+ KoNViD_1k_videos/6021794037.mp4, 8.008, 29.97002997002997, 3.02
508
+ KoNViD_1k_videos/4933934170.mp4, 8.008, 29.97002997002997, 1.96
509
+ KoNViD_1k_videos/4761429801.mp4, 8.0, 24.0, 3.08
510
+ KoNViD_1k_videos/8424428827.mp4, 8.008, 29.97002997002997, 3.38
511
+ KoNViD_1k_videos/10008004183.mp4, 8.0, 25.0, 4.06
512
+ KoNViD_1k_videos/11383768246.mp4, 8.008, 29.97002997002997, 3.92
513
+ KoNViD_1k_videos/9083519636.mp4, 8.008008, 29.97, 2.86
514
+ KoNViD_1k_videos/4932309592.mp4, 8.008, 29.97002997002997, 2.94
515
+ KoNViD_1k_videos/5882410880.mp4, 8.008, 29.97002997002997, 3.52
516
+ KoNViD_1k_videos/6296962313.mp4, 8.008, 29.97002997002997, 2.64
517
+ KoNViD_1k_videos/7992241767.mp4, 8.008, 29.97002997002997, 3.64
518
+ KoNViD_1k_videos/6760172457.mp4, 8.0, 24.0, 3.24
519
+ KoNViD_1k_videos/5459565205.mp4, 7.9079, 29.97002997002997, 3.52
520
+ KoNViD_1k_videos/5260933475.mp4, 8.0, 24.0, 3.08
521
+ KoNViD_1k_videos/5147152978.mp4, 8.008, 29.97002997002997, 2.597609562
522
+ KoNViD_1k_videos/8751856572.mp4, 8.008008, 29.97, 3.9
523
+ KoNViD_1k_videos/7977126554.mp4, 8.008, 29.97002997002997, 3.96
524
+ KoNViD_1k_videos/12633847193.mp4, 8.008008, 29.97, 2.92
525
+ KoNViD_1k_videos/12704590184.mp4, 8.008, 23.976023976023978, 3.9
526
+ KoNViD_1k_videos/8195185860.mp4, 8.008, 29.97002997002997, 3.64
527
+ KoNViD_1k_videos/4526948676.mp4, 8.008, 29.97002997002997, 3.34
528
+ KoNViD_1k_videos/8953002659.mp4, 8.008008, 29.97, 3.9
529
+ KoNViD_1k_videos/9478631712.mp4, 8.008008, 29.97, 3.706349206
530
+ KoNViD_1k_videos/9382662040.mp4, 8.0, 24.0, 2.76
531
+ KoNViD_1k_videos/3763235915.mp4, 8.0, 24.0, 2.343873518
532
+ KoNViD_1k_videos/9617144245.mp4, 7.741073999999999, 29.97, 3.82
533
+ KoNViD_1k_videos/9917916796.mp4, 8.008008, 29.97, 4.04
534
+ KoNViD_1k_videos/10595581344.mp4, 8.008008, 29.97, 4.14
535
+ KoNViD_1k_videos/5479646341.mp4, 8.008, 29.97002997002997, 3.02
536
+ KoNViD_1k_videos/8331470149.mp4, 8.008, 23.976023976023978, 3.02
537
+ KoNViD_1k_videos/6320892865.mp4, 8.008, 23.976023976023978, 2.84
538
+ KoNViD_1k_videos/7287123594.mp4, 8.008, 29.97002997002997, 3.48
539
+ KoNViD_1k_videos/9659398278.mp4, 8.008008, 29.97, 3.3
540
+ KoNViD_1k_videos/5150451769.mp4, 8.0, 25.0, 1.84
541
+ KoNViD_1k_videos/8758952624.mp4, 8.008008, 29.97, 4.3
542
+ KoNViD_1k_videos/5653702857.mp4, 8.008, 29.97002997002997, 2.86
543
+ KoNViD_1k_videos/8536800170.mp4, 8.0, 24.0, 3.12
544
+ KoNViD_1k_videos/7591242800.mp4, 8.0, 25.0, 3.16
545
+ KoNViD_1k_videos/10102107193.mp4, 8.0, 25.0, 2.18
546
+ KoNViD_1k_videos/4177089041.mp4, 8.008, 29.97002997002997, 1.66
547
+ KoNViD_1k_videos/8336226077.mp4, 8.008, 29.97002997002997, 2.08
548
+ KoNViD_1k_videos/6333567508.mp4, 8.008, 29.97002997002997, 3.18
549
+ KoNViD_1k_videos/3591184129.mp4, 8.008, 29.97002997002997, 2.58
550
+ KoNViD_1k_videos/8195899669.mp4, 8.008, 29.97002997002997, 2.06
551
+ KoNViD_1k_videos/5101801222.mp4, 8.008, 29.97002997002997, 3.32
552
+ KoNViD_1k_videos/8039435442.mp4, 8.008, 29.97002997002997, 3.1
553
+ KoNViD_1k_videos/6629527951.mp4, 8.0, 24.0, 2.25498008
554
+ KoNViD_1k_videos/8610122536.mp4, 8.008, 29.97002997002997, 3.58
555
+ KoNViD_1k_videos/4773686738.mp4, 8.008, 29.97002997002997, 2.32
556
+ KoNViD_1k_videos/6661657005.mp4, 8.008, 29.97002997002997, 1.9
557
+ KoNViD_1k_videos/9726323401.mp4, 8.008008, 29.97, 3.272
558
+ KoNViD_1k_videos/6273360207.mp4, 8.008, 29.97002997002997, 3.2
559
+ KoNViD_1k_videos/7057951213.mp4, 8.008, 29.97002997002997, 3.54
560
+ KoNViD_1k_videos/6849313907.mp4, 8.008, 29.97002997002997, 3.48
561
+ KoNViD_1k_videos/8663100289.mp4, 8.0, 24.0, 2.034883721
562
+ KoNViD_1k_videos/8583146942.mp4, 8.008, 29.97002997002997, 2.54
563
+ KoNViD_1k_videos/9651230522.mp4, 8.008008, 29.97, 2.56
564
+ KoNViD_1k_videos/8918155991.mp4, 8.008008, 29.97, 3.46
565
+ KoNViD_1k_videos/8587203405.mp4, 8.008, 29.97002997002997, 3.54
566
+ KoNViD_1k_videos/5941263615.mp4, 8.0, 25.0, 3.54
567
+ KoNViD_1k_videos/4850105788.mp4, 8.008, 29.97002997002997, 3.06
568
+ KoNViD_1k_videos/12099271596.mp4, 8.0, 24.0, 2.4
569
+ KoNViD_1k_videos/6625655725.mp4, 8.008, 29.97002997002997, 3.62
570
+ KoNViD_1k_videos/5133219451.mp4, 7.708333, 24.0, 2.32
571
+ KoNViD_1k_videos/7734422826.mp4, 8.008, 29.97002997002997, 2.82
572
+ KoNViD_1k_videos/5134417428.mp4, 8.008, 29.97002997002997, 2.26
573
+ KoNViD_1k_videos/8487362151.mp4, 8.008, 29.97002997002997, 3.54
574
+ KoNViD_1k_videos/4956521693.mp4, 8.008, 29.97002997002997, 2.8
575
+ KoNViD_1k_videos/9725289895.mp4, 8.008008, 29.97, 3.58
576
+ KoNViD_1k_videos/6157684615.mp4, 8.008, 29.97002997002997, 3.36
577
+ KoNViD_1k_videos/8527434285.mp4, 8.008, 29.97002997002997, 2.02
578
+ KoNViD_1k_videos/3583315023.mp4, 8.0, 24.0, 2.84
579
+ KoNViD_1k_videos/4801224837.mp4, 8.0, 24.0, 3.46
580
+ KoNViD_1k_videos/5739175520.mp4, 8.008, 29.97002997002997, 3.68
581
+ KoNViD_1k_videos/4775312025.mp4, 8.008, 29.97002997002997, 2.08
582
+ KoNViD_1k_videos/12186470506.mp4, 8.008, 29.97002997002997, 2.9
583
+ KoNViD_1k_videos/8616015387.mp4, 8.008, 29.97002997002997, 3.54
584
+ KoNViD_1k_videos/6096558457.mp4, 8.008, 29.97002997002997, 2.6
585
+ KoNViD_1k_videos/8331282929.mp4, 8.008, 23.976023976023978, 3.76
586
+ KoNViD_1k_videos/4733600732.mp4, 8.008, 29.97002997002997, 3.48
587
+ KoNViD_1k_videos/6381011947.mp4, 8.008, 29.97002997002997, 3.26
588
+ KoNViD_1k_videos/5170915466.mp4, 8.008, 29.97002997002997, 2.88
589
+ KoNViD_1k_videos/7644822866.mp4, 8.008, 29.97002997002997, 3.78
590
+ KoNViD_1k_videos/8604343265.mp4, 8.008, 23.976023976023978, 2.54
591
+ KoNViD_1k_videos/5273433233.mp4, 8.008, 29.97002997002997, 3.48
592
+ KoNViD_1k_videos/3627681183.mp4, 8.008, 29.97002997002997, 3.44
593
+ KoNViD_1k_videos/5815295661.mp4, 8.008, 29.97002997002997, 3.06
594
+ KoNViD_1k_videos/6015938306.mp4, 8.008, 29.97002997002997, 2.5
595
+ KoNViD_1k_videos/7848940332.mp4, 8.008, 29.97002997002997, 1.86
596
+ KoNViD_1k_videos/12330492194.mp4, 8.008008, 29.97, 3.26
597
+ KoNViD_1k_videos/5914289359.mp4, 8.008, 29.97002997002997, 3.56
598
+ KoNViD_1k_videos/3861648412.mp4, 8.0, 24.0, 2.68
599
+ KoNViD_1k_videos/5553355582.mp4, 8.008, 29.97002997002997, 2.22
600
+ KoNViD_1k_videos/6210381577.mp4, 8.0, 24.0, 3.22
601
+ KoNViD_1k_videos/11469243925.mp4, 8.008008, 29.97, 2.28
602
+ KoNViD_1k_videos/11944451643.mp4, 8.008, 29.97002997002997, 3.16
603
+ KoNViD_1k_videos/5014400375.mp4, 8.008, 29.97002997002997, 2.28
604
+ KoNViD_1k_videos/8724380666.mp4, 8.008, 29.97002997002997, 1.4
605
+ KoNViD_1k_videos/8541047953.mp4, 8.008, 29.97002997002997, 3.28
606
+ KoNViD_1k_videos/11095294653.mp4, 8.008008, 29.97, 1.86
607
+ KoNViD_1k_videos/8886176135.mp4, 8.008008, 29.97, 3.56
608
+ KoNViD_1k_videos/4969714039.mp4, 8.008, 29.97002997002997, 3.08
609
+ KoNViD_1k_videos/13255378043.mp4, 8.008, 29.97002997002997, 3.12
610
+ KoNViD_1k_videos/11617643563.mp4, 8.008, 29.97002997002997, 3.4
611
+ KoNViD_1k_videos/5999732346.mp4, 8.0, 24.0, 2.9
612
+ KoNViD_1k_videos/10837320963.mp4, 8.0, 25.0, 3.7
613
+ KoNViD_1k_videos/8129160337.mp4, 8.008, 29.97002997002997, 3.12
614
+ KoNViD_1k_videos/6794539977.mp4, 8.0, 25.0, 3.4
615
+ KoNViD_1k_videos/8733102851.mp4, 8.008, 29.97002997002997, 3.08
616
+ KoNViD_1k_videos/4373113207.mp4, 8.008, 29.97002997002997, 3.52
617
+ KoNViD_1k_videos/6340146486.mp4, 8.0, 25.0, 2.48
618
+ KoNViD_1k_videos/4652600104.mp4, 8.008, 29.97002997002997, 3.16
619
+ KoNViD_1k_videos/5559135240.mp4, 8.0, 24.0, 2.94
620
+ KoNViD_1k_videos/6985991227.mp4, 8.008, 29.97002997002997, 1.62
621
+ KoNViD_1k_videos/5632758635.mp4, 8.008, 29.97002997002997, 3.64
622
+ KoNViD_1k_videos/3339962845.mp4, 8.008, 29.97002997002997, 4.64
623
+ KoNViD_1k_videos/5636445745.mp4, 8.008, 29.97002997002997, 2.52
624
+ KoNViD_1k_videos/3770747002.mp4, 8.008, 29.97002997002997, 1.86
625
+ KoNViD_1k_videos/8643336872.mp4, 8.008, 29.97002997002997, 1.68
626
+ KoNViD_1k_videos/8588854353.mp4, 8.008, 23.976023976023978, 3.88
627
+ KoNViD_1k_videos/5668795950.mp4, 8.0, 24.0, 2.86
628
+ KoNViD_1k_videos/6497033987.mp4, 8.008, 29.97002997002997, 1.8
629
+ KoNViD_1k_videos/4677538291.mp4, 8.008, 29.97002997002997, 1.96
630
+ KoNViD_1k_videos/12274927193.mp4, 8.008, 23.976023976023978, 2.98
631
+ KoNViD_1k_videos/9256674744.mp4, 8.008008, 29.97, 2.26
632
+ KoNViD_1k_videos/5040855662.mp4, 8.008, 29.97002997002997, 3.18
633
+ KoNViD_1k_videos/9093682547.mp4, 8.008008, 23.976, 3.1
634
+ KoNViD_1k_videos/8716622241.mp4, 8.008, 29.97002997002997, 3.38
635
+ KoNViD_1k_videos/5935222726.mp4, 8.008, 29.97002997002997, 1.86
636
+ KoNViD_1k_videos/10149337843.mp4, 8.008008, 29.97, 3.94
637
+ KoNViD_1k_videos/5012335397.mp4, 8.008, 29.97002997002997, 2.74
638
+ KoNViD_1k_videos/3489123822.mp4, 8.0, 24.0, 3.8
639
+ KoNViD_1k_videos/6408909511.mp4, 8.008, 29.97002997002997, 3.26
640
+ KoNViD_1k_videos/7986004475.mp4, 8.0, 25.0, 3.58
641
+ KoNViD_1k_videos/5028474013.mp4, 8.008, 29.97002997002997, 3.52
642
+ KoNViD_1k_videos/5110540070.mp4, 8.008, 29.97002997002997, 2.64
643
+ KoNViD_1k_videos/5067647428.mp4, 8.008, 29.97002997002997, 3.6
644
+ KoNViD_1k_videos/8178490084.mp4, 8.008, 29.97002997002997, 3.42
645
+ KoNViD_1k_videos/5589046662.mp4, 8.008, 29.97002997002997, 3.134453782
646
+ KoNViD_1k_videos/5541058845.mp4, 8.008, 29.97002997002997, 3.38
647
+ KoNViD_1k_videos/5340069287.mp4, 8.008, 29.97002997002997, 3.7
648
+ KoNViD_1k_videos/6528988807.mp4, 8.0, 24.0, 3.94488189
649
+ KoNViD_1k_videos/9586728257.mp4, 8.008008, 29.97, 3.56
650
+ KoNViD_1k_videos/12836715003.mp4, 8.008008, 29.97, 3.3
651
+ KoNViD_1k_videos/11803329033.mp4, 8.008008, 29.97, 3.56
652
+ KoNViD_1k_videos/10417424703.mp4, 8.008008, 29.97, 1.88
653
+ KoNViD_1k_videos/8147563408.mp4, 7.640967, 29.97002997002997, 3.58
654
+ KoNViD_1k_videos/6293871635.mp4, 8.008, 29.97002997002997, 3.68
655
+ KoNViD_1k_videos/9047671138.mp4, 8.008008, 29.97, 1.98
656
+ KoNViD_1k_videos/4406917423.mp4, 8.008, 23.976023976023978, 3.18
657
+ KoNViD_1k_videos/4367069947.mp4, 8.0, 24.0, 2.88
658
+ KoNViD_1k_videos/5020714258.mp4, 8.008, 23.976023976023978, 2.4
659
+ KoNViD_1k_videos/4744073127.mp4, 8.008, 29.97002997002997, 1.42
660
+ KoNViD_1k_videos/4843048363.mp4, 8.008, 29.97002997002997, 3.2
661
+ KoNViD_1k_videos/7734563104.mp4, 8.008, 23.976023976023978, 2.7
662
+ KoNViD_1k_videos/8670600555.mp4, 8.008, 29.97002997002997, 3.16
663
+ KoNViD_1k_videos/6582418637.mp4, 8.008, 29.97002997002997, 3.12
664
+ KoNViD_1k_videos/7383232724.mp4, 8.008, 29.97002997002997, 2.6
665
+ KoNViD_1k_videos/6343817087.mp4, 8.008, 29.97002997002997, 2.64
666
+ KoNViD_1k_videos/5827345790.mp4, 8.008, 29.97002997002997, 2.66
667
+ KoNViD_1k_videos/4922109288.mp4, 8.008, 29.97002997002997, 3.2
668
+ KoNViD_1k_videos/6932589551.mp4, 8.008, 29.97002997002997, 3.62
669
+ KoNViD_1k_videos/6338210961.mp4, 8.008, 29.97002997002997, 3.08
670
+ KoNViD_1k_videos/3437816776.mp4, 8.0, 24.0, 3.5
671
+ KoNViD_1k_videos/8431144343.mp4, 8.0, 24.0, 3.82
672
+ KoNViD_1k_videos/5893119747.mp4, 8.0, 25.0, 4.12
673
+ KoNViD_1k_videos/4270576961.mp4, 8.008, 29.97002997002997, 2.54
674
+ KoNViD_1k_videos/7633708420.mp4, 8.008, 23.976023976023978, 2.78
675
+ KoNViD_1k_videos/4383007820.mp4, 8.008, 29.97002997002997, 3.06
676
+ KoNViD_1k_videos/8582023917.mp4, 8.008, 29.97002997002997, 2.04
677
+ KoNViD_1k_videos/5504464599.mp4, 8.0, 24.0, 1.94
678
+ KoNViD_1k_videos/8532196494.mp4, 8.008, 29.97002997002997, 2.46
679
+ KoNViD_1k_videos/8201673711.mp4, 8.008, 29.97002997002997, 3.54
680
+ KoNViD_1k_videos/6879288904.mp4, 8.0, 24.0, 2.78
681
+ KoNViD_1k_videos/9513903384.mp4, 8.008008, 29.97, 4.04
682
+ KoNViD_1k_videos/4033322131.mp4, 8.008, 29.97002997002997, 2.86
683
+ KoNViD_1k_videos/10403478556.mp4, 8.008008, 23.976, 3.68
684
+ KoNViD_1k_videos/8207372644.mp4, 8.008, 29.97002997002997, 3.24
685
+ KoNViD_1k_videos/6131895268.mp4, 8.008, 29.97002997002997, 1.94
686
+ KoNViD_1k_videos/7703064692.mp4, 8.008, 29.97002997002997, 3.6
687
+ KoNViD_1k_videos/7500126174.mp4, 8.0, 24.0, 3.6
688
+ KoNViD_1k_videos/6268574586.mp4, 8.008, 23.976023976023978, 1.8
689
+ KoNViD_1k_videos/6373417661.mp4, 8.008, 23.976023976023978, 3.72
690
+ KoNViD_1k_videos/5693366090.mp4, 8.008, 29.97002997002997, 3.62
691
+ KoNViD_1k_videos/3358799122.mp4, 8.008, 29.97002997002997, 3.785714286
692
+ KoNViD_1k_videos/9812351333.mp4, 8.008008, 29.97, 3.74
693
+ KoNViD_1k_videos/10657154643.mp4, 8.008008, 29.97, 3.58
694
+ KoNViD_1k_videos/4184711791.mp4, 8.0, 24.0, 3.28
695
+ KoNViD_1k_videos/8552723245.mp4, 8.008, 23.976023976023978, 3.58
696
+ KoNViD_1k_videos/8842319020.mp4, 8.008008, 23.976, 2.86
697
+ KoNViD_1k_videos/7760313026.mp4, 8.008, 29.97002997002997, 3.36
698
+ KoNViD_1k_videos/8069880437.mp4, 8.008, 29.97002997002997, 3.64
699
+ KoNViD_1k_videos/12276570315.mp4, 8.0, 24.0, 3.365217391
700
+ KoNViD_1k_videos/5900513019.mp4, 8.008, 29.97002997002997, 3.231660232
701
+ KoNViD_1k_videos/6696625151.mp4, 8.008, 29.97002997002997, 3.1
702
+ KoNViD_1k_videos/6792594754.mp4, 8.008, 29.97002997002997, 3.4
703
+ KoNViD_1k_videos/10053703034.mp4, 8.008008, 29.97, 3.46
704
+ KoNViD_1k_videos/5621520605.mp4, 8.008, 29.97002997002997, 3.34
705
+ KoNViD_1k_videos/8144762018.mp4, 8.0, 24.0, 2.1
706
+ KoNViD_1k_videos/6095633507.mp4, 8.008, 29.97002997002997, 3.48
707
+ KoNViD_1k_videos/6299595444.mp4, 8.008, 29.97002997002997, 1.96
708
+ KoNViD_1k_videos/7681404078.mp4, 8.008, 29.97002997002997, 2.98
709
+ KoNViD_1k_videos/4484771725.mp4, 8.008, 29.97002997002997, 3.62
710
+ KoNViD_1k_videos/6722692103.mp4, 8.0, 24.0, 3.5
711
+ KoNViD_1k_videos/10201521555.mp4, 8.008008, 23.976, 2.3
712
+ KoNViD_1k_videos/3218223446.mp4, 8.008, 29.97002997002997, 2.92
713
+ KoNViD_1k_videos/5880475570.mp4, 8.0, 25.0, 2.695652174
714
+ KoNViD_1k_videos/6471207533.mp4, 8.0, 24.0, 3.32
715
+ KoNViD_1k_videos/7425861358.mp4, 8.008, 29.97002997002997, 3.46
716
+ KoNViD_1k_videos/11359714186.mp4, 8.008008, 29.97, 2.4
717
+ KoNViD_1k_videos/3546788918.mp4, 8.008, 29.97002997002997, 3.64
718
+ KoNViD_1k_videos/9680817388.mp4, 8.008008, 29.97, 3.84
719
+ KoNViD_1k_videos/8699104416.mp4, 8.008, 23.976023976023978, 2.48
720
+ KoNViD_1k_videos/5672502803.mp4, 8.008, 29.97002997002997, 3.3
721
+ KoNViD_1k_videos/7542896986.mp4, 8.0, 24.0, 3.02
722
+ KoNViD_1k_videos/8235549337.mp4, 8.008, 29.97002997002997, 2.66
723
+ KoNViD_1k_videos/4675931801.mp4, 8.008, 29.97002997002997, 4.16
724
+ KoNViD_1k_videos/5487345956.mp4, 8.008, 29.97002997002997, 3.92
725
+ KoNViD_1k_videos/8252040986.mp4, 8.008, 29.97002997002997, 3.68
726
+ KoNViD_1k_videos/6079875162.mp4, 8.008, 29.97002997002997, 1.6
727
+ KoNViD_1k_videos/5927908371.mp4, 8.008, 29.97002997002997, 1.6
728
+ KoNViD_1k_videos/5048882871.mp4, 8.0, 24.0, 1.84
729
+ KoNViD_1k_videos/8772282000.mp4, 8.008, 29.97002997002997, 3.24
730
+ KoNViD_1k_videos/3904711682.mp4, 8.008, 29.97002997002997, 3.04
731
+ KoNViD_1k_videos/5119193300.mp4, 8.008, 29.97002997002997, 2.46
732
+ KoNViD_1k_videos/5107749210.mp4, 8.008, 29.97002997002997, 2.36
733
+ KoNViD_1k_videos/9298912586.mp4, 8.0, 24.0, 2.5
734
+ KoNViD_1k_videos/9234105504.mp4, 8.008008, 23.976, 2.26
735
+ KoNViD_1k_videos/6323632937.mp4, 8.0, 24.0, 1.82
736
+ KoNViD_1k_videos/8320880668.mp4, 8.0, 24.0, 2.22
737
+ KoNViD_1k_videos/5561000398.mp4, 8.0, 24.0, 2.88
738
+ KoNViD_1k_videos/8731159058.mp4, 8.008008, 29.97, 2.2
739
+ KoNViD_1k_videos/6058824138.mp4, 8.008, 29.97002997002997, 2.74
740
+ KoNViD_1k_videos/5214650735.mp4, 8.008, 29.97002997002997, 3.18
741
+ KoNViD_1k_videos/5204063512.mp4, 8.008, 29.97002997002997, 2.94
742
+ KoNViD_1k_videos/6346933640.mp4, 8.008, 23.976023976023978, 2.6
743
+ KoNViD_1k_videos/4897814885.mp4, 8.0, 25.0, 2.46
744
+ KoNViD_1k_videos/6527804569.mp4, 8.0, 24.0, 3.12
745
+ KoNViD_1k_videos/4834450895.mp4, 8.008, 29.97002997002997, 1.74
746
+ KoNViD_1k_videos/3339387387.mp4, 8.008, 29.97002997002997, 2.42
747
+ KoNViD_1k_videos/4942462682.mp4, 8.008, 29.97002997002997, 3.5
748
+ KoNViD_1k_videos/8299847304.mp4, 8.0, 24.0, 2.96
749
+ KoNViD_1k_videos/4801560427.mp4, 8.008, 29.97002997002997, 3.44
750
+ KoNViD_1k_videos/7849542524.mp4, 8.008, 29.97002997002997, 3.7
751
+ KoNViD_1k_videos/12989512873.mp4, 8.008008, 23.976, 3.24
752
+ KoNViD_1k_videos/8467238180.mp4, 8.008, 29.97002997002997, 3.18
753
+ KoNViD_1k_videos/8222762296.mp4, 8.008, 29.97002997002997, 1.66
754
+ KoNViD_1k_videos/12899309364.mp4, 8.008008, 29.97, 3.1
755
+ KoNViD_1k_videos/6226852017.mp4, 8.008, 29.97002997002997, 3.4
756
+ KoNViD_1k_videos/8331955206.mp4, 8.008, 29.97002997002997, 3.84
757
+ KoNViD_1k_videos/3488252741.mp4, 8.008, 23.976023976023978, 3.54
758
+ KoNViD_1k_videos/5972627544.mp4, 8.0, 24.0, 3.04
759
+ KoNViD_1k_videos/12255205254.mp4, 8.0, 25.0, 3.96
760
+ KoNViD_1k_videos/4629711727.mp4, 8.0, 24.0, 2.3
761
+ KoNViD_1k_videos/6268902756.mp4, 8.008, 23.976023976023978, 1.843137255
762
+ KoNViD_1k_videos/4889669313.mp4, 8.008, 29.97002997002997, 2.52
763
+ KoNViD_1k_videos/7556185878.mp4, 8.0, 24.0, 2.82
764
+ KoNViD_1k_videos/8995404511.mp4, 8.008008, 29.97, 3.798245614
765
+ KoNViD_1k_videos/5941894437.mp4, 8.008, 29.97002997002997, 2.48
766
+ KoNViD_1k_videos/4414516665.mp4, 8.0, 25.0, 3.02
767
+ KoNViD_1k_videos/5879908897.mp4, 8.0, 25.0, 3.22
768
+ KoNViD_1k_videos/5130906896.mp4, 8.0, 24.0, 3.38
769
+ KoNViD_1k_videos/6059351914.mp4, 8.0, 24.0, 2.24
770
+ KoNViD_1k_videos/8751538577.mp4, 8.008008, 23.976, 4.4
771
+ KoNViD_1k_videos/4363644898.mp4, 8.008, 29.97002997002997, 3.7
772
+ KoNViD_1k_videos/6995540661.mp4, 8.008, 29.97002997002997, 3.18
773
+ KoNViD_1k_videos/10804894696.mp4, 8.008, 23.976023976023978, 3.12
774
+ KoNViD_1k_videos/10408715786.mp4, 8.008008, 29.97, 3.253731343
775
+ KoNViD_1k_videos/7136906983.mp4, 8.008, 29.97002997002997, 2.4
776
+ KoNViD_1k_videos/8366966088.mp4, 8.008, 29.97002997002997, 2.84
777
+ KoNViD_1k_videos/6849945228.mp4, 8.008, 23.976023976023978, 2.91796875
778
+ KoNViD_1k_videos/7548330816.mp4, 8.008, 23.976023976023978, 3.04
779
+ KoNViD_1k_videos/8479579728.mp4, 8.008, 29.97002997002997, 2.28
780
+ KoNViD_1k_videos/6716265511.mp4, 8.0, 24.0, 3.54
781
+ KoNViD_1k_videos/5355837503.mp4, 8.008, 23.976023976023978, 4.1
782
+ KoNViD_1k_videos/10650618913.mp4, 8.008008, 29.97, 1.7
783
+ KoNViD_1k_videos/7917797604.mp4, 8.008, 29.97002997002997, 2.105660377
784
+ KoNViD_1k_videos/7532463480.mp4, 8.008, 29.97002997002997, 2.792828685
785
+ KoNViD_1k_videos/9542585542.mp4, 8.008008, 29.97, 3.82
786
+ KoNViD_1k_videos/8429349812.mp4, 8.008, 29.97002997002997, 3.72
787
+ KoNViD_1k_videos/5996750151.mp4, 8.0, 24.0, 3.0
788
+ KoNViD_1k_videos/5865474554.mp4, 8.008, 29.97002997002997, 2.38
789
+ KoNViD_1k_videos/5245445672.mp4, 8.008, 29.97002997002997, 3.38
790
+ KoNViD_1k_videos/4876943348.mp4, 8.008, 29.97002997002997, 3.06
791
+ KoNViD_1k_videos/9434801897.mp4, 8.0, 25.0, 3.34
792
+ KoNViD_1k_videos/3963620197.mp4, 8.008, 23.976023976023978, 4.02
793
+ KoNViD_1k_videos/8976267501.mp4, 8.008008, 29.97, 3.0
794
+ KoNViD_1k_videos/7395710342.mp4, 8.008, 29.97002997002997, 2.92
795
+ KoNViD_1k_videos/7208119980.mp4, 8.008, 29.97002997002997, 2.93385214
796
+ KoNViD_1k_videos/12893918213.mp4, 8.008008, 29.97, 3.599236641
797
+ KoNViD_1k_videos/8675766145.mp4, 8.008, 29.97002997002997, 2.5
798
+ KoNViD_1k_videos/13154058393.mp4, 8.008, 23.976023976023978, 3.64
799
+ KoNViD_1k_videos/5082292308.mp4, 8.008, 29.97002997002997, 3.64
800
+ KoNViD_1k_videos/3588022983.mp4, 8.0, 24.0, 3.66
801
+ KoNViD_1k_videos/5458000924.mp4, 8.008, 29.97002997002997, 3.06
802
+ KoNViD_1k_videos/7141804343.mp4, 8.008, 29.97002997002997, 2.9
803
+ KoNViD_1k_videos/8078333220.mp4, 8.008, 29.97002997002997, 2.6
804
+ KoNViD_1k_videos/4736973814.mp4, 8.008, 29.97002997002997, 3.46
805
+ KoNViD_1k_videos/6971497024.mp4, 8.008, 29.97002997002997, 3.02
806
+ KoNViD_1k_videos/6369679907.mp4, 8.008, 29.97002997002997, 1.72
807
+ KoNViD_1k_videos/3699539878.mp4, 8.0, 24.0, 3.14
808
+ KoNViD_1k_videos/7983363078.mp4, 8.0, 24.0, 1.9
809
+ KoNViD_1k_videos/9390723258.mp4, 8.008008, 29.97, 3.8
810
+ KoNViD_1k_videos/8172276744.mp4, 8.008, 29.97002997002997, 3.92
811
+ KoNViD_1k_videos/10178280003.mp4, 8.008008, 29.97, 1.7
812
+ KoNViD_1k_videos/10716551125.mp4, 8.0, 25.0, 2.22
813
+ KoNViD_1k_videos/4258186896.mp4, 8.008, 29.97002997002997, 2.34
814
+ KoNViD_1k_videos/5078125268.mp4, 8.008, 29.97002997002997, 2.14
815
+ KoNViD_1k_videos/6191735552.mp4, 8.008, 29.97002997002997, 3.16
816
+ KoNViD_1k_videos/3521396571.mp4, 8.008, 29.97002997002997, 3.54
817
+ KoNViD_1k_videos/8729280042.mp4, 8.0, 24.0, 2.26
818
+ KoNViD_1k_videos/3179649855.mp4, 8.008, 23.976023976023978, 2.88
819
+ KoNViD_1k_videos/8476574247.mp4, 8.008, 29.97002997002997, 3.58
820
+ KoNViD_1k_videos/3819966572.mp4, 8.008, 29.97002997002997, 3.72
821
+ KoNViD_1k_videos/6514514177.mp4, 8.008, 23.976023976023978, 3.68
822
+ KoNViD_1k_videos/8229018279.mp4, 8.008, 29.97002997002997, 3.56
823
+ KoNViD_1k_videos/5010279097.mp4, 8.0, 24.0, 3.268907563
824
+ KoNViD_1k_videos/5914119796.mp4, 8.008, 29.97002997002997, 2.44
825
+ KoNViD_1k_videos/11061444876.mp4, 8.008, 29.97002997002997, 2.16
826
+ KoNViD_1k_videos/6304997355.mp4, 8.008, 29.97002997002997, 3.34
827
+ KoNViD_1k_videos/6775766604.mp4, 8.008, 29.97002997002997, 2.96
828
+ KoNViD_1k_videos/5066571585.mp4, 8.008, 29.97002997002997, 3.2
829
+ KoNViD_1k_videos/6554989029.mp4, 8.0, 24.0, 2.68
830
+ KoNViD_1k_videos/9058636581.mp4, 8.008008, 29.97, 1.82
831
+ KoNViD_1k_videos/6772996477.mp4, 8.008, 23.976023976023978, 3.72
832
+ KoNViD_1k_videos/5469530975.mp4, 8.008, 29.97002997002997, 2.9
833
+ KoNViD_1k_videos/5162398156.mp4, 8.008, 29.97002997002997, 2.68
834
+ KoNViD_1k_videos/5550087163.mp4, 8.008, 29.97002997002997, 2.46
835
+ KoNViD_1k_videos/5341444640.mp4, 8.008, 23.976023976023978, 3.68
836
+ KoNViD_1k_videos/3590551873.mp4, 8.008, 29.97002997002997, 2.7
837
+ KoNViD_1k_videos/3699089579.mp4, 8.0, 24.0, 3.38
838
+ KoNViD_1k_videos/5218123691.mp4, 8.0, 24.0, 3.04
839
+ KoNViD_1k_videos/4273096444.mp4, 8.008, 29.97002997002997, 3.98
840
+ KoNViD_1k_videos/5733977738.mp4, 8.0, 24.0, 2.06
841
+ KoNViD_1k_videos/6694501209.mp4, 8.008, 29.97002997002997, 2.92
842
+ KoNViD_1k_videos/4227960041.mp4, 8.008, 29.97002997002997, 3.66
843
+ KoNViD_1k_videos/6109922367.mp4, 8.008, 29.97002997002997, 3.14
844
+ KoNViD_1k_videos/7360639566.mp4, 8.008, 29.97002997002997, 3.54
845
+ KoNViD_1k_videos/7361913318.mp4, 8.008, 23.976023976023978, 2.74
846
+ KoNViD_1k_videos/4763574191.mp4, 8.008, 29.97002997002997, 3.469230769
847
+ KoNViD_1k_videos/5319413177.mp4, 8.0, 24.0, 2.48
848
+ KoNViD_1k_videos/9717753449.mp4, 8.008008, 23.976, 2.36
849
+ KoNViD_1k_videos/5690367902.mp4, 8.008, 29.97002997002997, 3.56
850
+ KoNViD_1k_videos/6137257339.mp4, 8.008, 29.97002997002997, 4.02
851
+ KoNViD_1k_videos/8986658663.mp4, 8.008008, 29.97, 3.2
852
+ KoNViD_1k_videos/12382276334.mp4, 8.0, 25.0, 3.586872587
853
+ KoNViD_1k_videos/4286406568.mp4, 8.008, 29.97002997002997, 2.96
854
+ KoNViD_1k_videos/6798366426.mp4, 8.008, 23.976023976023978, 3.84
855
+ KoNViD_1k_videos/10838718774.mp4, 8.008008, 29.97, 3.04
856
+ KoNViD_1k_videos/7969995368.mp4, 8.008, 29.97002997002997, 3.04
857
+ KoNViD_1k_videos/8967888670.mp4, 8.0, 24.0, 3.32
858
+ KoNViD_1k_videos/5212573386.mp4, 8.008, 29.97002997002997, 3.74
859
+ KoNViD_1k_videos/8440192357.mp4, 8.008, 29.97002997002997, 3.92
860
+ KoNViD_1k_videos/7177427229.mp4, 8.008, 23.976023976023978, 2.48
861
+ KoNViD_1k_videos/4417558772.mp4, 8.008, 29.97002997002997, 3.9
862
+ KoNViD_1k_videos/8007634384.mp4, 8.008, 29.97002997002997, 2.98
863
+ KoNViD_1k_videos/5993312864.mp4, 8.008, 29.97002997002997, 1.72
864
+ KoNViD_1k_videos/6959318659.mp4, 8.008, 29.97002997002997, 3.04
865
+ KoNViD_1k_videos/9675032717.mp4, 8.0, 25.0, 3.26
866
+ KoNViD_1k_videos/3496871384.mp4, 8.008, 29.97002997002997, 2.08
867
+ KoNViD_1k_videos/6270222640.mp4, 8.008, 29.97002997002997, 2.32
868
+ KoNViD_1k_videos/8414555471.mp4, 8.0, 25.0, 3.9
869
+ KoNViD_1k_videos/5011231037.mp4, 8.008, 29.97002997002997, 3.62195122
870
+ KoNViD_1k_videos/6638842709.mp4, 8.008, 29.97002997002997, 2.94
871
+ KoNViD_1k_videos/8646141367.mp4, 8.0, 25.0, 3.8
872
+ KoNViD_1k_videos/4265466447.mp4, 8.0, 24.0, 3.32
873
+ KoNViD_1k_videos/6867937233.mp4, 8.008, 29.97002997002997, 2.72
874
+ KoNViD_1k_videos/9085955167.mp4, 8.008008, 29.97, 3.2
875
+ KoNViD_1k_videos/4549036485.mp4, 8.008, 29.97002997002997, 3.992481203
876
+ KoNViD_1k_videos/6329730783.mp4, 8.0, 24.0, 2.28
877
+ KoNViD_1k_videos/4743183444.mp4, 8.008, 29.97002997002997, 3.44
878
+ KoNViD_1k_videos/8667333526.mp4, 8.0, 25.0, 2.62
879
+ KoNViD_1k_videos/6740483069.mp4, 8.008, 29.97002997002997, 2.2421875
880
+ KoNViD_1k_videos/9445782126.mp4, 8.008008, 29.97, 1.52
881
+ KoNViD_1k_videos/6858182534.mp4, 8.008, 29.97002997002997, 2.98
882
+ KoNViD_1k_videos/6135877141.mp4, 8.0, 24.0, 2.82
883
+ KoNViD_1k_videos/6949440526.mp4, 8.008, 29.97002997002997, 3.92
884
+ KoNViD_1k_videos/8531461869.mp4, 8.0, 24.0, 2.72
885
+ KoNViD_1k_videos/4751595925.mp4, 8.008, 29.97002997002997, 3.48
886
+ KoNViD_1k_videos/8031519344.mp4, 8.008, 29.97002997002997, 3.3
887
+ KoNViD_1k_videos/8433382995.mp4, 8.008, 23.976023976023978, 2.22
888
+ KoNViD_1k_videos/4337949901.mp4, 8.0, 24.0, 3.34
889
+ KoNViD_1k_videos/7087189849.mp4, 8.0, 24.0, 1.98
890
+ KoNViD_1k_videos/4313579496.mp4, 8.0, 24.0, 3.76
891
+ KoNViD_1k_videos/8251526312.mp4, 8.008, 29.97002997002997, 2.28
892
+ KoNViD_1k_videos/5819852885.mp4, 8.008, 29.97002997002997, 2.16
893
+ KoNViD_1k_videos/13196703974.mp4, 8.0, 25.0, 3.48
894
+ KoNViD_1k_videos/4281436885.mp4, 8.0, 24.0, 3.494339623
895
+ KoNViD_1k_videos/3810054574.mp4, 8.008, 29.97002997002997, 3.38
896
+ KoNViD_1k_videos/11535683914.mp4, 8.008, 29.97002997002997, 3.94
897
+ KoNViD_1k_videos/6598948831.mp4, 8.008, 29.97002997002997, 3.7
898
+ KoNViD_1k_videos/8319369964.mp4, 8.008, 29.97002997002997, 2.64
899
+ KoNViD_1k_videos/12387439244.mp4, 8.008008, 29.97, 2.34
900
+ KoNViD_1k_videos/12031712185.mp4, 8.0, 25.0, 2.58
901
+ KoNViD_1k_videos/4506003922.mp4, 8.008, 23.976023976023978, 2.36
902
+ KoNViD_1k_videos/5134310379.mp4, 8.008, 29.97002997002997, 2.12
903
+ KoNViD_1k_videos/5723108521.mp4, 8.008, 29.97002997002997, 3.18
904
+ KoNViD_1k_videos/8322973249.mp4, 8.008, 29.97002997002997, 3.3
905
+ KoNViD_1k_videos/6867862287.mp4, 8.008, 29.97002997002997, 3.68
906
+ KoNViD_1k_videos/6669111911.mp4, 8.008, 29.97002997002997, 3.2
907
+ KoNViD_1k_videos/10244479353.mp4, 8.008008, 29.97, 4.14
908
+ KoNViD_1k_videos/9571377943.mp4, 8.008008, 23.976, 4.28
909
+ KoNViD_1k_videos/5831807090.mp4, 8.008, 29.97002997002997, 3.68
910
+ KoNViD_1k_videos/6947112934.mp4, 8.0, 25.0, 4.18
911
+ KoNViD_1k_videos/4297535685.mp4, 8.008, 29.97002997002997, 2.86
912
+ KoNViD_1k_videos/5618829039.mp4, 8.008, 29.97002997002997, 3.16
913
+ KoNViD_1k_videos/11688667226.mp4, 8.008008, 29.97, 1.94
914
+ KoNViD_1k_videos/8298917863.mp4, 8.0, 24.0, 3.28
915
+ KoNViD_1k_videos/12101500915.mp4, 8.008, 29.97002997002997, 3.88
916
+ KoNViD_1k_videos/9098899158.mp4, 8.008008, 23.976, 3.02
917
+ KoNViD_1k_videos/5391431291.mp4, 8.008, 23.976023976023978, 4.151851852
918
+ KoNViD_1k_videos/8056761129.mp4, 8.008, 29.97002997002997, 3.12
919
+ KoNViD_1k_videos/4734666961.mp4, 8.008, 29.97002997002997, 2.82
920
+ KoNViD_1k_videos/9425570317.mp4, 8.0, 24.0, 3.0
921
+ KoNViD_1k_videos/3374751086.mp4, 8.008, 23.976023976023978, 2.24
922
+ KoNViD_1k_videos/9277742312.mp4, 8.008008, 29.97, 3.4
923
+ KoNViD_1k_videos/5516081814.mp4, 8.0, 24.0, 3.6
924
+ KoNViD_1k_videos/12823937005.mp4, 8.008008, 29.97, 3.4
925
+ KoNViD_1k_videos/5208145791.mp4, 8.008, 29.97002997002997, 2.24
926
+ KoNViD_1k_videos/9724431856.mp4, 8.008008, 29.97, 3.84
927
+ KoNViD_1k_videos/5460258278.mp4, 8.008, 29.97002997002997, 2.18
928
+ KoNViD_1k_videos/7545358086.mp4, 8.0, 24.0, 2.02
929
+ KoNViD_1k_videos/4206970561.mp4, 8.008, 23.976023976023978, 2.26
930
+ KoNViD_1k_videos/10785115855.mp4, 8.008008, 29.97, 2.46
931
+ KoNViD_1k_videos/9766188655.mp4, 8.008008, 29.97, 3.06
932
+ KoNViD_1k_videos/12439507093.mp4, 8.0, 24.0, 2.5
933
+ KoNViD_1k_videos/4582086726.mp4, 8.008, 29.97002997002997, 3.14
934
+ KoNViD_1k_videos/6111574885.mp4, 8.008, 29.97002997002997, 2.74
935
+ KoNViD_1k_videos/5346134105.mp4, 8.008, 29.97002997002997, 2.46
936
+ KoNViD_1k_videos/3647017501.mp4, 8.008, 23.976023976023978, 3.56
937
+ KoNViD_1k_videos/6660107279.mp4, 8.008, 29.97002997002997, 3.26
938
+ KoNViD_1k_videos/8002170390.mp4, 8.0, 25.0, 2.76
939
+ KoNViD_1k_videos/7495449440.mp4, 8.008, 29.97002997002997, 1.92
940
+ KoNViD_1k_videos/6281135899.mp4, 8.008, 29.97002997002997, 3.0
941
+ KoNViD_1k_videos/10124821225.mp4, 8.008008, 29.97, 3.4
942
+ KoNViD_1k_videos/5004709068.mp4, 8.008, 23.976023976023978, 2.52
943
+ KoNViD_1k_videos/8380926104.mp4, 8.008, 29.97002997002997, 3.4
944
+ KoNViD_1k_videos/9494995600.mp4, 8.0, 24.0, 2.74
945
+ KoNViD_1k_videos/5649370618.mp4, 8.008, 29.97002997002997, 2.9
946
+ KoNViD_1k_videos/8110008288.mp4, 8.0, 24.0, 2.02
947
+ KoNViD_1k_videos/4901210888.mp4, 8.008, 29.97002997002997, 2.82
948
+ KoNViD_1k_videos/5356092090.mp4, 8.008, 29.97002997002997, 3.22
949
+ KoNViD_1k_videos/4981160522.mp4, 8.008, 29.97002997002997, 3.26
950
+ KoNViD_1k_videos/4236199952.mp4, 8.008, 29.97002997002997, 2.9
951
+ KoNViD_1k_videos/7518773518.mp4, 8.008, 23.976023976023978, 4.32
952
+ KoNViD_1k_videos/11902527296.mp4, 8.008, 29.97002997002997, 3.24
953
+ KoNViD_1k_videos/6076608135.mp4, 8.008, 29.97002997002997, 4.4
954
+ KoNViD_1k_videos/4020832533.mp4, 7.674333, 23.976023976023978, 2.948616601
955
+ KoNViD_1k_videos/8587384006.mp4, 8.008, 29.97002997002997, 3.74
956
+ KoNViD_1k_videos/8278672659.mp4, 8.008, 23.976023976023978, 2.18
957
+ KoNViD_1k_videos/9586935224.mp4, 8.008008, 29.97, 3.34
958
+ KoNViD_1k_videos/5357123491.mp4, 8.008, 23.976023976023978, 3.44
959
+ KoNViD_1k_videos/5003283158.mp4, 8.008, 29.97002997002997, 2.78
960
+ KoNViD_1k_videos/12682619933.mp4, 8.008008, 29.97, 3.24
961
+ KoNViD_1k_videos/8217524401.mp4, 8.0, 24.0, 2.58
962
+ KoNViD_1k_videos/10533906564.mp4, 8.008008, 29.97, 3.24
963
+ KoNViD_1k_videos/4864754515.mp4, 8.0, 24.0, 3.12
964
+ KoNViD_1k_videos/9267867823.mp4, 8.008008, 29.97, 3.84
965
+ KoNViD_1k_videos/10636855406.mp4, 8.008008, 29.97, 3.88
966
+ KoNViD_1k_videos/7819697028.mp4, 8.008, 29.97002997002997, 2.56
967
+ KoNViD_1k_videos/6694553789.mp4, 8.008, 29.97002997002997, 3.62
968
+ KoNViD_1k_videos/6982428911.mp4, 8.008, 29.97002997002997, 2.7
969
+ KoNViD_1k_videos/6368379561.mp4, 8.008, 29.97002997002997, 3.54
970
+ KoNViD_1k_videos/12623549414.mp4, 8.008008, 29.97, 3.68
971
+ KoNViD_1k_videos/8442879599.mp4, 8.008, 29.97002997002997, 3.74
972
+ KoNViD_1k_videos/8484812859.mp4, 8.008, 29.97002997002997, 3.38
973
+ KoNViD_1k_videos/11816978255.mp4, 8.0, 25.0, 3.76
974
+ KoNViD_1k_videos/8215074237.mp4, 8.0, 24.0, 2.7
975
+ KoNViD_1k_videos/5415012877.mp4, 8.008, 23.976023976023978, 2.42
976
+ KoNViD_1k_videos/12277706754.mp4, 8.008008, 29.97, 3.5
977
+ KoNViD_1k_videos/11635304003.mp4, 8.008, 29.97002997002997, 1.74
978
+ KoNViD_1k_videos/7526781566.mp4, 8.008, 29.97002997002997, 3.02
979
+ KoNViD_1k_videos/6215542986.mp4, 8.008, 29.97002997002997, 2.58
980
+ KoNViD_1k_videos/9363436148.mp4, 8.008008, 29.97, 3.58
981
+ KoNViD_1k_videos/7005111650.mp4, 8.008, 23.976023976023978, 2.72
982
+ KoNViD_1k_videos/5093919014.mp4, 8.008, 29.97002997002997, 2.26
983
+ KoNViD_1k_videos/6671136435.mp4, 8.008, 29.97002997002997, 3.04
984
+ KoNViD_1k_videos/3369925072.mp4, 8.008, 29.97002997002997, 4.02
985
+ KoNViD_1k_videos/11057110163.mp4, 8.008, 29.97002997002997, 2.78
986
+ KoNViD_1k_videos/3343535739.mp4, 8.008, 29.97002997002997, 2.4
987
+ KoNViD_1k_videos/5526460224.mp4, 8.008, 29.97002997002997, 3.38
988
+ KoNViD_1k_videos/5005236579.mp4, 8.008, 29.97002997002997, 3.26
989
+ KoNViD_1k_videos/5203755303.mp4, 7.507499999999999, 29.97002997002997, 2.68
990
+ KoNViD_1k_videos/5960200024.mp4, 8.008, 29.97002997002997, 2.52
991
+ KoNViD_1k_videos/5453773681.mp4, 8.008, 29.97002997002997, 2.24
992
+ KoNViD_1k_videos/6319757818.mp4, 8.008, 29.97002997002997, 3.38
993
+ KoNViD_1k_videos/2999049224.mp4, 8.008, 29.97002997002997, 3.44
994
+ KoNViD_1k_videos/5770178686.mp4, 8.008, 23.976023976023978, 3.78
995
+ KoNViD_1k_videos/9737640625.mp4, 7.5492159999999995, 23.976, 3.36
996
+ KoNViD_1k_videos/6107338487.mp4, 8.0, 25.0, 3.62
997
+ KoNViD_1k_videos/5467704373.mp4, 8.008, 29.97002997002997, 3.16
998
+ KoNViD_1k_videos/7897750494.mp4, 8.0, 24.0, 2.4
999
+ KoNViD_1k_videos/4627984833.mp4, 8.008, 29.97002997002997, 1.7
1000
+ KoNViD_1k_videos/3611217365.mp4, 8.008, 23.976023976023978, 3.6
1001
+ KoNViD_1k_videos/5154464760.mp4, 8.0, 25.0, 3.86
1002
+ KoNViD_1k_videos/9802042845.mp4, 8.008008, 29.97, 3.66
1003
+ KoNViD_1k_videos/9618443862.mp4, 8.008008, 29.97, 2.08
1004
+ KoNViD_1k_videos/7892371292.mp4, 8.008, 29.97002997002997, 3.48
1005
+ KoNViD_1k_videos/4573314147.mp4, 8.008, 29.97002997002997, 2.56
1006
+ KoNViD_1k_videos/8026448058.mp4, 8.0, 25.0, 3.44
1007
+ KoNViD_1k_videos/4545845826.mp4, 8.008, 29.97002997002997, 3.9
1008
+ KoNViD_1k_videos/4567868868.mp4, 8.008, 29.97002997002997, 3.5
1009
+ KoNViD_1k_videos/6907839104.mp4, 8.008, 23.976023976023978, 3.5
1010
+ KoNViD_1k_videos/5875944717.mp4, 8.0, 25.0, 3.92
1011
+ KoNViD_1k_videos/5009379117.mp4, 7.8411669999999996, 29.97002997002997, 2.299595142
1012
+ KoNViD_1k_videos/8563986596.mp4, 8.008, 29.97002997002997, 2.94
1013
+ KoNViD_1k_videos/5987864177.mp4, 8.008, 29.97002997002997, 3.14
1014
+ KoNViD_1k_videos/4335304419.mp4, 8.008, 29.97002997002997, 3.02
1015
+ KoNViD_1k_videos/8098521993.mp4, 8.008, 29.97002997002997, 2.86
1016
+ KoNViD_1k_videos/11925275803.mp4, 8.008, 29.97002997002997, 2.66
1017
+ KoNViD_1k_videos/5956380805.mp4, 8.008, 29.97002997002997, 3.46
1018
+ KoNViD_1k_videos/4289215378.mp4, 8.008, 29.97002997002997, 3.4
1019
+ KoNViD_1k_videos/6787309364.mp4, 8.008, 29.97002997002997, 4.16
1020
+ KoNViD_1k_videos/10838934973.mp4, 8.008008, 29.97, 3.34
1021
+ KoNViD_1k_videos/7383994638.mp4, 8.008, 29.97002997002997, 3.74
1022
+ KoNViD_1k_videos/5845660280.mp4, 8.0, 25.0, 3.5
1023
+ KoNViD_1k_videos/8738388681.mp4, 8.008, 29.97002997002997, 3.8
1024
+ KoNViD_1k_videos/6210062524.mp4, 8.0, 24.0, 2.78
1025
+ KoNViD_1k_videos/6914710581.mp4, 8.0, 24.0, 4.0
1026
+ KoNViD_1k_videos/7151780093.mp4, 8.008, 29.97002997002997, 2.6
1027
+ KoNViD_1k_videos/11673027224.mp4, 8.008, 29.97002997002997, 3.32
1028
+ KoNViD_1k_videos/9317770917.mp4, 8.008008, 29.97, 3.38
1029
+ KoNViD_1k_videos/7548305462.mp4, 8.008, 23.976023976023978, 2.24
1030
+ KoNViD_1k_videos/4202404174.mp4, 8.008, 29.97002997002997, 3.14
1031
+ KoNViD_1k_videos/3425371223.mp4, 8.008, 29.97002997002997, 3.12
1032
+ KoNViD_1k_videos/4987165854.mp4, 8.008, 29.97002997002997, 3.24
1033
+ KoNViD_1k_videos/4642004278.mp4, 8.008, 29.97002997002997, 2.56
1034
+ KoNViD_1k_videos/6223104346.mp4, 8.008, 29.97002997002997, 2.56
1035
+ KoNViD_1k_videos/12088104156.mp4, 8.008, 29.97002997002997, 3.6
1036
+ KoNViD_1k_videos/5636101558.mp4, 8.0, 24.0, 2.26
1037
+ KoNViD_1k_videos/10238939436.mp4, 8.008008, 23.976, 2.58
1038
+ KoNViD_1k_videos/6844033217.mp4, 8.008, 29.97002997002997, 3.72
1039
+ KoNViD_1k_videos/8440795977.mp4, 8.008, 29.97002997002997, 2.28
1040
+ KoNViD_1k_videos/5570426341.mp4, 8.0, 24.0, 1.56
1041
+ KoNViD_1k_videos/9524637688.mp4, 8.008008, 29.97, 2.58
1042
+ KoNViD_1k_videos/3948863216.mp4, 8.0, 24.0, 3.62
1043
+ KoNViD_1k_videos/5706858082.mp4, 8.008, 29.97002997002997, 3.12
1044
+ KoNViD_1k_videos/5911601461.mp4, 8.008, 23.976023976023978, 3.42
1045
+ KoNViD_1k_videos/4836718842.mp4, 8.008, 29.97002997002997, 2.64
1046
+ KoNViD_1k_videos/6792979288.mp4, 8.0, 24.0, 1.78
1047
+ KoNViD_1k_videos/8486027818.mp4, 8.008, 29.97002997002997, 3.2
1048
+ KoNViD_1k_videos/12912266335.mp4, 8.0, 24.0, 2.86
1049
+ KoNViD_1k_videos/5893880541.mp4, 8.008, 23.976023976023978, 2.7
1050
+ KoNViD_1k_videos/8013211059.mp4, 8.0, 25.0, 2.48
1051
+ KoNViD_1k_videos/5440091902.mp4, 8.008, 29.97002997002997, 3.0
1052
+ KoNViD_1k_videos/5850199057.mp4, 8.008, 29.97002997002997, 2.4
1053
+ KoNViD_1k_videos/11082897705.mp4, 8.008008, 29.97, 3.3
1054
+ KoNViD_1k_videos/5338017301.mp4, 8.008, 29.97002997002997, 3.8
1055
+ KoNViD_1k_videos/7935377214.mp4, 8.008, 29.97002997002997, 3.58
1056
+ KoNViD_1k_videos/4871910775.mp4, 8.008, 29.97002997002997, 2.48
1057
+ KoNViD_1k_videos/8507613491.mp4, 8.008, 29.97002997002997, 2.28
1058
+ KoNViD_1k_videos/4321531777.mp4, 8.008, 29.97002997002997, 1.82
1059
+ KoNViD_1k_videos/8193379879.mp4, 8.0, 24.0, 3.26
1060
+ KoNViD_1k_videos/4712606231.mp4, 8.008, 29.97002997002997, 3.7
1061
+ KoNViD_1k_videos/6249802707.mp4, 8.008, 29.97002997002997, 3.58
1062
+ KoNViD_1k_videos/10283826203.mp4, 8.008008, 29.97, 4.14
1063
+ KoNViD_1k_videos/8435518163.mp4, 8.008, 29.97002997002997, 3.16
1064
+ KoNViD_1k_videos/8586743266.mp4, 8.008, 29.97002997002997, 2.72
1065
+ KoNViD_1k_videos/6490001399.mp4, 8.0, 24.0, 3.36
1066
+ KoNViD_1k_videos/11907021903.mp4, 8.008008, 29.97, 2.16
1067
+ KoNViD_1k_videos/7287459080.mp4, 8.008, 29.97002997002997, 2.32
1068
+ KoNViD_1k_videos/6065207696.mp4, 8.008, 29.97002997002997, 2.88
1069
+ KoNViD_1k_videos/7197025396.mp4, 8.008, 29.97002997002997, 3.92
1070
+ KoNViD_1k_videos/6092433561.mp4, 8.008, 29.97002997002997, 2.86
1071
+ KoNViD_1k_videos/5465225347.mp4, 7.5408669999999995, 29.97002997002997, 3.52
1072
+ KoNViD_1k_videos/3878700690.mp4, 8.008, 29.97002997002997, 2.74
1073
+ KoNViD_1k_videos/6313769652.mp4, 8.008, 29.97002997002997, 3.82
1074
+ KoNViD_1k_videos/5477809421.mp4, 8.008, 29.97002997002997, 4.04
1075
+ KoNViD_1k_videos/4639129836.mp4, 8.008, 29.97002997002997, 2.06
1076
+ KoNViD_1k_videos/7550224002.mp4, 8.0, 24.0, 2.34
1077
+ KoNViD_1k_videos/5805360253.mp4, 8.0, 24.0, 2.58
1078
+ KoNViD_1k_videos/6444142351.mp4, 8.008, 29.97002997002997, 3.28
1079
+ KoNViD_1k_videos/7177696763.mp4, 8.0, 24.0, 4.06
1080
+ KoNViD_1k_videos/4809572333.mp4, 8.008, 23.976023976023978, 3.12
1081
+ KoNViD_1k_videos/7664727482.mp4, 8.008, 29.97002997002997, 3.4
1082
+ KoNViD_1k_videos/7198853624.mp4, 8.008, 23.976023976023978, 3.6
1083
+ KoNViD_1k_videos/8124951690.mp4, 8.008, 29.97002997002997, 3.06
1084
+ KoNViD_1k_videos/12123666684.mp4, 8.008, 29.97002997002997, 4.0
1085
+ KoNViD_1k_videos/8132161360.mp4, 8.0, 24.0, 2.9
1086
+ KoNViD_1k_videos/7122092993.mp4, 8.008, 29.97002997002997, 2.88
1087
+ KoNViD_1k_videos/5625415555.mp4, 8.008, 23.976023976023978, 2.08
1088
+ KoNViD_1k_videos/6411380421.mp4, 8.008, 29.97002997002997, 3.44
1089
+ KoNViD_1k_videos/6867926575.mp4, 8.008, 29.97002997002997, 3.08
1090
+ KoNViD_1k_videos/8859509880.mp4, 8.008008, 29.97, 3.94
1091
+ KoNViD_1k_videos/5806274886.mp4, 8.0, 24.0, 2.24
1092
+ KoNViD_1k_videos/11425159465.mp4, 8.008008, 29.97, 2.68
1093
+ KoNViD_1k_videos/6648629457.mp4, 8.008, 23.976023976023978, 3.42
1094
+ KoNViD_1k_videos/8683385786.mp4, 8.008, 29.97002997002997, 3.66
1095
+ KoNViD_1k_videos/8701032421.mp4, 8.008, 23.976023976023978, 4.02
1096
+ KoNViD_1k_videos/7548202460.mp4, 8.008, 29.97002997002997, 3.3
1097
+ KoNViD_1k_videos/5030830413.mp4, 8.008, 29.97002997002997, 3.28
1098
+ KoNViD_1k_videos/5140332158.mp4, 8.008, 29.97002997002997, 3.58
1099
+ KoNViD_1k_videos/4723346787.mp4, 8.008, 29.97002997002997, 3.0
1100
+ KoNViD_1k_videos/5644303578.mp4, 8.008, 29.97002997002997, 2.54
1101
+ KoNViD_1k_videos/6807638360.mp4, 8.0, 24.0, 1.46
1102
+ KoNViD_1k_videos/5641862023.mp4, 8.008, 29.97002997002997, 3.36
1103
+ KoNViD_1k_videos/3860907449.mp4, 8.0, 24.0, 3.54
1104
+ KoNViD_1k_videos/5460445525.mp4, 8.0, 24.0, 2.04
1105
+ KoNViD_1k_videos/8339870239.mp4, 8.0, 24.0, 1.992063492
1106
+ KoNViD_1k_videos/7667993154.mp4, 8.008, 29.97002997002997, 3.76
1107
+ KoNViD_1k_videos/5869559051.mp4, 8.008, 29.97002997002997, 2.3
1108
+ KoNViD_1k_videos/13288073854.mp4, 8.0, 25.0, 4.1
1109
+ KoNViD_1k_videos/5360118581.mp4, 8.008, 29.97002997002997, 3.2
1110
+ KoNViD_1k_videos/5669052182.mp4, 8.008, 29.97002997002997, 3.879844961
1111
+ KoNViD_1k_videos/11971608074.mp4, 8.008, 29.97002997002997, 3.34
1112
+ KoNViD_1k_videos/8646170532.mp4, 8.008, 29.97002997002997, 3.48
1113
+ KoNViD_1k_videos/5786034294.mp4, 8.008, 23.976023976023978, 3.0
1114
+ KoNViD_1k_videos/7983610133.mp4, 8.008, 29.97002997002997, 3.74
1115
+ KoNViD_1k_videos/8084631711.mp4, 8.0, 24.0, 3.48
1116
+ KoNViD_1k_videos/11435365925.mp4, 8.008008, 29.97, 2.6
1117
+ KoNViD_1k_videos/6917745987.mp4, 8.008, 23.976023976023978, 2.4
1118
+ KoNViD_1k_videos/9209826160.mp4, 8.008008, 29.97, 3.34
1119
+ KoNViD_1k_videos/4907486229.mp4, 8.008, 29.97002997002997, 2.0
1120
+ KoNViD_1k_videos/6123040072.mp4, 8.0, 24.0, 2.22
1121
+ KoNViD_1k_videos/5023426388.mp4, 8.008, 29.97002997002997, 3.32
1122
+ KoNViD_1k_videos/5043852749.mp4, 7.8411669999999996, 29.97002997002997, 1.84
1123
+ KoNViD_1k_videos/10116604143.mp4, 8.008008, 23.976, 3.08
1124
+ KoNViD_1k_videos/4945025836.mp4, 8.0, 24.0, 3.32
1125
+ KoNViD_1k_videos/8664958871.mp4, 8.0, 25.0, 3.52
1126
+ KoNViD_1k_videos/5609569195.mp4, 8.008, 29.97002997002997, 2.82
1127
+ KoNViD_1k_videos/4931493432.mp4, 8.008, 29.97002997002997, 3.36
1128
+ KoNViD_1k_videos/9136867624.mp4, 8.008008, 29.97, 2.12
1129
+ KoNViD_1k_videos/8494495600.mp4, 8.008, 23.976023976023978, 3.74
1130
+ KoNViD_1k_videos/3781419322.mp4, 8.008, 23.976023976023978, 2.89787234
1131
+ KoNViD_1k_videos/8254380270.mp4, 8.0, 24.0, 3.16
1132
+ KoNViD_1k_videos/7863416600.mp4, 8.008, 29.97002997002997, 1.78
1133
+ KoNViD_1k_videos/4297792287.mp4, 8.0, 25.0, 3.78
1134
+ KoNViD_1k_videos/8510251919.mp4, 8.008, 29.97002997002997, 3.66
1135
+ KoNViD_1k_videos/4573734018.mp4, 8.008, 29.97002997002997, 3.3
1136
+ KoNViD_1k_videos/5562932712.mp4, 8.008, 23.976023976023978, 3.9
1137
+ KoNViD_1k_videos/7693986854.mp4, 8.008, 29.97002997002997, 3.1
1138
+ KoNViD_1k_videos/7959542546.mp4, 8.0, 24.0, 2.64
1139
+ KoNViD_1k_videos/7962972726.mp4, 8.008, 29.97002997002997, 2.18
1140
+ KoNViD_1k_videos/11618284403.mp4, 8.008, 29.97002997002997, 4.12
1141
+ KoNViD_1k_videos/6215522900.mp4, 8.008, 29.97002997002997, 3.48
1142
+ KoNViD_1k_videos/6804829908.mp4, 8.008, 29.97002997002997, 2.96
1143
+ KoNViD_1k_videos/6416687735.mp4, 8.008, 29.97002997002997, 3.36
1144
+ KoNViD_1k_videos/5003956763.mp4, 8.008, 29.97002997002997, 3.598455598
1145
+ KoNViD_1k_videos/3718963712.mp4, 8.0, 24.0, 3.66
1146
+ KoNViD_1k_videos/3482706595.mp4, 8.008, 29.97002997002997, 3.2
1147
+ KoNViD_1k_videos/6106291030.mp4, 8.008, 23.976023976023978, 3.6
1148
+ KoNViD_1k_videos/6847647728.mp4, 8.008, 29.97002997002997, 3.68
1149
+ KoNViD_1k_videos/7054450817.mp4, 8.0, 24.0, 3.46
1150
+ KoNViD_1k_videos/7558781186.mp4, 8.0, 25.0, 1.68
1151
+ KoNViD_1k_videos/12378508014.mp4, 8.0, 25.0, 4.035573123
1152
+ KoNViD_1k_videos/10132105415.mp4, 8.008008, 29.97, 1.64
1153
+ KoNViD_1k_videos/4975471705.mp4, 8.008, 29.97002997002997, 3.78
1154
+ KoNViD_1k_videos/5717841095.mp4, 8.008, 23.976023976023978, 3.02
1155
+ KoNViD_1k_videos/6212970517.mp4, 8.008, 29.97002997002997, 2.06
1156
+ KoNViD_1k_videos/9583034514.mp4, 8.008008, 29.97, 3.88
1157
+ KoNViD_1k_videos/10406095776.mp4, 8.008008, 29.97, 2.62
1158
+ KoNViD_1k_videos/8635322862.mp4, 8.008, 29.97002997002997, 2.36
1159
+ KoNViD_1k_videos/5354932311.mp4, 8.0, 24.0, 3.480314961
1160
+ KoNViD_1k_videos/5668545266.mp4, 8.0, 24.0, 2.46
1161
+ KoNViD_1k_videos/4201416279.mp4, 8.008, 29.97002997002997, 3.72
1162
+ KoNViD_1k_videos/8324243833.mp4, 8.008, 29.97002997002997, 2.16
1163
+ KoNViD_1k_videos/6439107119.mp4, 8.008, 29.97002997002997, 3.08
1164
+ KoNViD_1k_videos/8396734122.mp4, 8.0, 24.0, 2.84
1165
+ KoNViD_1k_videos/10027007645.mp4, 8.008008, 29.97, 3.8
1166
+ KoNViD_1k_videos/5176278265.mp4, 8.008, 29.97002997002997, 3.12
1167
+ KoNViD_1k_videos/3747645672.mp4, 8.0, 24.0, 3.82
1168
+ KoNViD_1k_videos/5127033299.mp4, 8.008, 29.97002997002997, 3.28
1169
+ KoNViD_1k_videos/6104508040.mp4, 8.008, 29.97002997002997, 3.08
1170
+ KoNViD_1k_videos/8632107216.mp4, 8.008, 29.97002997002997, 2.32
1171
+ KoNViD_1k_videos/4159445902.mp4, 8.008, 23.976023976023978, 2.52
1172
+ KoNViD_1k_videos/8933125503.mp4, 8.008008, 29.97, 2.4
1173
+ KoNViD_1k_videos/7198246948.mp4, 8.008, 29.97002997002997, 3.42
1174
+ KoNViD_1k_videos/4673672619.mp4, 8.008, 29.97002997002997, 3.7
1175
+ KoNViD_1k_videos/12143576166.mp4, 8.008008, 29.97, 3.44
1176
+ KoNViD_1k_videos/6079632868.mp4, 8.0, 25.0, 3.3
1177
+ KoNViD_1k_videos/8522230898.mp4, 8.008, 23.976023976023978, 3.34
1178
+ KoNViD_1k_videos/6916420878.mp4, 8.0, 24.0, 2.84
1179
+ KoNViD_1k_videos/8730797431.mp4, 8.008, 29.97002997002997, 3.7
1180
+ KoNViD_1k_videos/4735334918.mp4, 8.008, 29.97002997002997, 2.28
1181
+ KoNViD_1k_videos/6272702003.mp4, 8.008, 29.97002997002997, 3.5
1182
+ KoNViD_1k_videos/5311187352.mp4, 8.008, 29.97002997002997, 2.88
1183
+ KoNViD_1k_videos/13042523904.mp4, 8.008, 29.97002997002997, 3.12
1184
+ KoNViD_1k_videos/10691750555.mp4, 8.008008, 23.976, 2.84
1185
+ KoNViD_1k_videos/5539585646.mp4, 8.008, 29.97002997002997, 3.26
1186
+ KoNViD_1k_videos/6300963347.mp4, 8.008, 29.97002997002997, 3.58
1187
+ KoNViD_1k_videos/4329911079.mp4, 8.008, 29.97002997002997, 4.02
1188
+ KoNViD_1k_videos/7722155622.mp4, 8.0, 24.0, 2.36
1189
+ KoNViD_1k_videos/4491961485.mp4, 8.0, 24.0, 3.08
1190
+ KoNViD_1k_videos/7547764246.mp4, 8.008, 29.97002997002997, 3.28
1191
+ KoNViD_1k_videos/6041834062.mp4, 8.008, 29.97002997002997, 3.32
1192
+ KoNViD_1k_videos/6004183559.mp4, 8.008, 29.97002997002997, 3.38
1193
+ KoNViD_1k_videos/10128248563.mp4, 8.008008, 29.97, 2.86
1194
+ KoNViD_1k_videos/6012598017.mp4, 8.008, 29.97002997002997, 3.36
1195
+ KoNViD_1k_videos/6061346837.mp4, 8.008, 29.97002997002997, 2.78
1196
+ KoNViD_1k_videos/6416725133.mp4, 8.008, 29.97002997002997, 3.76
1197
+ KoNViD_1k_videos/8266961231.mp4, 8.008, 29.97002997002997, 3.14
1198
+ KoNViD_1k_videos/6868748154.mp4, 8.0, 25.0, 3.26
1199
+ KoNViD_1k_videos/5912268467.mp4, 8.008, 29.97002997002997, 3.34
1200
+ KoNViD_1k_videos/10404182556.mp4, 8.008008, 29.97, 2.32
examplar_data_labels/KoNiQ10k/test_labels.txt ADDED
@@ -0,0 +1,2015 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 10007357496.jpg, -1, -1, 68.7285714286
2
+ 10020766793.jpg, -1, -1, 81.50625
3
+ 10020891105.jpg, -1, -1, 56.8297413793
4
+ 10022757465.jpg, -1, -1, 71.0154867257
5
+ 10039534103.jpg, -1, -1, 76.075
6
+ 10050349236.jpg, -1, -1, 30.9418103448
7
+ 10086300493.jpg, -1, -1, 68.3026315789
8
+ 10093057603.jpg, -1, -1, 63.4955752212
9
+ 10113730664.jpg, -1, -1, 68.8125
10
+ 10126177964.jpg, -1, -1, 70.9078947368
11
+ 10154367094.jpg, -1, -1, 74.1120689655
12
+ 10154991453.jpg, -1, -1, 74.185483871
13
+ 10169304416.jpg, -1, -1, 80.8716814159
14
+ 10173544123.jpg, -1, -1, 65.1055555556
15
+ 10178183416.jpg, -1, -1, 75.6842105263
16
+ 10186832194.jpg, -1, -1, 57.5818584071
17
+ 10187102065.jpg, -1, -1, 38.7142857143
18
+ 10187522803.jpg, -1, -1, 68.4601769912
19
+ 10206674394.jpg, -1, -1, 79.1100917431
20
+ 10209917915.jpg, -1, -1, 46.9097222222
21
+ 10226524916.jpg, -1, -1, 20.6216216216
22
+ 10246318724.jpg, -1, -1, 74.3581081081
23
+ 10273804555.jpg, -1, -1, 68.3026315789
24
+ 10279904885.jpg, -1, -1, 44.0400943396
25
+ 1029470257.jpg, -1, -1, 43.0961538462
26
+ 10299438733.jpg, -1, -1, 55.2683486239
27
+ 10319186405.jpg, -1, -1, 55.5540540541
28
+ 10330831494.jpg, -1, -1, 48.5321100917
29
+ 10335072144.jpg, -1, -1, 62.1610576923
30
+ 10342569703.jpg, -1, -1, 38.6081081081
31
+ 10349956885.jpg, -1, -1, 69.75
32
+ 10350497833.jpg, -1, -1, 58.2605932203
33
+ 10351189716.jpg, -1, -1, 61.01875
34
+ 10362672006.jpg, -1, -1, 75.8189655172
35
+ 10366373284.jpg, -1, -1, 75.4545454545
36
+ 10397166423.jpg, -1, -1, 73.935840708
37
+ 10458703674.jpg, -1, -1, 38.1615044248
38
+ 10468110795.jpg, -1, -1, 61.4754464286
39
+ 10469774844.jpg, -1, -1, 59.6034482759
40
+ 10474002734.jpg, -1, -1, 80.7745535714
41
+ 10486107003.jpg, -1, -1, 67.9930555556
42
+ 10491423125.jpg, -1, -1, 72.5
43
+ 10520192995.jpg, -1, -1, 55.1630434783
44
+ 10526881085.jpg, -1, -1, 44.4598214286
45
+ 10536668436.jpg, -1, -1, 36.5432330827
46
+ 10539008983.jpg, -1, -1, 71.0154867257
47
+ 105620456.jpg, -1, -1, 55.8625
48
+ 10567114974.jpg, -1, -1, 64.847826087
49
+ 10582113466.jpg, -1, -1, 58.1607142857
50
+ 10603218265.jpg, -1, -1, 71.8616071429
51
+ 10625988224.jpg, -1, -1, 66.8443396226
52
+ 1063351510.jpg, -1, -1, 47.1153846154
53
+ 10633933603.jpg, -1, -1, 58.8898305085
54
+ 10644234544.jpg, -1, -1, 54.7289915966
55
+ 106652866.jpg, -1, -1, 49.0441176471
56
+ 1067765170.jpg, -1, -1, 51.175
57
+ 10678350303.jpg, -1, -1, 66.786637931
58
+ 10680715605.jpg, -1, -1, 37.3
59
+ 10680932454.jpg, -1, -1, 51.39375
60
+ 10689376076.jpg, -1, -1, 59.2394067797
61
+ 10714662986.jpg, -1, -1, 80.6551724138
62
+ 10715577856.jpg, -1, -1, 45.213592233
63
+ 10721124556.jpg, -1, -1, 58.4579646018
64
+ 10740727595.jpg, -1, -1, 72.9125
65
+ 10762659033.jpg, -1, -1, 42.25
66
+ 10767441335.jpg, -1, -1, 74.2094594595
67
+ 10770700455.jpg, -1, -1, 60.74375
68
+ 10773126784.jpg, -1, -1, 78.475
69
+ 10776111906.jpg, -1, -1, 73.975
70
+ 10791502435.jpg, -1, -1, 70.8130252101
71
+ 10813730303.jpg, -1, -1, 40.0817307692
72
+ 10836905604.jpg, -1, -1, 75.9932432432
73
+ 1084984228.jpg, -1, -1, 34.8319327731
74
+ 10861031374.jpg, -1, -1, 64.675
75
+ 10865798996.jpg, -1, -1, 76.4722222222
76
+ 10866463396.jpg, -1, -1, 69.1029411765
77
+ 10880314705.jpg, -1, -1, 77.4210526316
78
+ 10911046243.jpg, -1, -1, 76.2339449541
79
+ 10912602716.jpg, -1, -1, 74.9510869565
80
+ 10953973716.jpg, -1, -1, 72.6486486486
81
+ 11013892854.jpg, -1, -1, 27.4978813559
82
+ 110356262.jpg, -1, -1, 35.925
83
+ 1103755154.jpg, -1, -1, 73.0641025641
84
+ 11051374673.jpg, -1, -1, 62.6148648649
85
+ 11054638863.jpg, -1, -1, 61.0070754717
86
+ 11054899145.jpg, -1, -1, 74.0512820513
87
+ 11060185873.jpg, -1, -1, 63.0148305085
88
+ 11095381506.jpg, -1, -1, 76.75
89
+ 11109548195.jpg, -1, -1, 61.5231092437
90
+ 11126652126.jpg, -1, -1, 56.6485849057
91
+ 11150131936.jpg, -1, -1, 63.9158878505
92
+ 11153943035.jpg, -1, -1, 60.2777777778
93
+ 11156365526.jpg, -1, -1, 47.5882352941
94
+ 11158314215.jpg, -1, -1, 46.3349514563
95
+ 11215941184.jpg, -1, -1, 73.4077669903
96
+ 11232665444.jpg, -1, -1, 32.2668067227
97
+ 11242015725.jpg, -1, -1, 63.7152777778
98
+ 11250215644.jpg, -1, -1, 69.6776315789
99
+ 11253432804.jpg, -1, -1, 61.2027027027
100
+ 11260270773.jpg, -1, -1, 72.3513513514
101
+ 11293176185.jpg, -1, -1, 61.3
102
+ 11310848503.jpg, -1, -1, 70.1461864407
103
+ 11335231214.jpg, -1, -1, 68.9603960396
104
+ 113663782.jpg, -1, -1, 67.55
105
+ 11378848184.jpg, -1, -1, 64.1334745763
106
+ 11406584295.jpg, -1, -1, 55.3310810811
107
+ 11407368256.jpg, -1, -1, 72.2884615385
108
+ 11407862453.jpg, -1, -1, 70.6504424779
109
+ 11408443745.jpg, -1, -1, 63.475
110
+ 11433127464.jpg, -1, -1, 54.109375
111
+ 11436390143.jpg, -1, -1, 72.4481132075
112
+ 11436508006.jpg, -1, -1, 65.14375
113
+ 11438612393.jpg, -1, -1, 31.9953271028
114
+ 11446005074.jpg, -1, -1, 77.468487395
115
+ 11452299875.jpg, -1, -1, 74.4608695652
116
+ 11476172786.jpg, -1, -1, 72.625
117
+ 11511265293.jpg, -1, -1, 24.2767857143
118
+ 11514342335.jpg, -1, -1, 71.9646017699
119
+ 11522920033.jpg, -1, -1, 57.3275862069
120
+ 11527524903.jpg, -1, -1, 33.6972477064
121
+ 115290990.jpg, -1, -1, 42.9432773109
122
+ 1159028861.jpg, -1, -1, 72.9125
123
+ 11706252.jpg, -1, -1, 32.2521186441
124
+ 121123359.jpg, -1, -1, 88.24375
125
+ 122760647.jpg, -1, -1, 50.9125
126
+ 123242995.jpg, -1, -1, 60.968487395
127
+ 128079586.jpg, -1, -1, 57.6407563025
128
+ 1289162225.jpg, -1, -1, 62.1470588235
129
+ 1313706108.jpg, -1, -1, 34.4159663866
130
+ 1329713028.jpg, -1, -1, 52.7632743363
131
+ 1331642031.jpg, -1, -1, 54.2905405405
132
+ 1348094685.jpg, -1, -1, 72.3834745763
133
+ 139999071.jpg, -1, -1, 25.68125
134
+ 1408118135.jpg, -1, -1, 44.5132743363
135
+ 143394138.jpg, -1, -1, 29.94375
136
+ 143622689.jpg, -1, -1, 62.7375
137
+ 1465282532.jpg, -1, -1, 35.6941964286
138
+ 1465706268.jpg, -1, -1, 59.78125
139
+ 1519650440.jpg, -1, -1, 53.156779661
140
+ 154270569.jpg, -1, -1, 70.4772727273
141
+ 156266461.jpg, -1, -1, 41.0125
142
+ 1571972975.jpg, -1, -1, 50.9004854369
143
+ 159144665.jpg, -1, -1, 74.15
144
+ 16079437.jpg, -1, -1, 53.0868644068
145
+ 163208499.jpg, -1, -1, 27.3181818182
146
+ 166528976.jpg, -1, -1, 62.25625
147
+ 16799807.jpg, -1, -1, 39.9030172414
148
+ 1748861184.jpg, -1, -1, 62.2457627119
149
+ 175326787.jpg, -1, -1, 35.2272727273
150
+ 17606027.jpg, -1, -1, 69.3863636364
151
+ 1794888173.jpg, -1, -1, 49.5911016949
152
+ 1797683075.jpg, -1, -1, 34.8897058824
153
+ 1802306299.jpg, -1, -1, 40.2236842105
154
+ 1807195948.jpg, -1, -1, 34.0
155
+ 1809848435.jpg, -1, -1, 24.7278761062
156
+ 1812202754.jpg, -1, -1, 78.4661016949
157
+ 18438855.jpg, -1, -1, 74.6207627119
158
+ 187499391.jpg, -1, -1, 46.8256302521
159
+ 1946326263.jpg, -1, -1, 67.0736607143
160
+ 1970440733.jpg, -1, -1, 55.875
161
+ 1988371580.jpg, -1, -1, 68.4935344828
162
+ 1990074154.jpg, -1, -1, 82.4868421053
163
+ 19943010.jpg, -1, -1, 77.2605042017
164
+ 2035975268.jpg, -1, -1, 15.1428571429
165
+ 203950866.jpg, -1, -1, 72.7542016807
166
+ 2041755502.jpg, -1, -1, 74.1064356436
167
+ 2055589084.jpg, -1, -1, 38.4806034483
168
+ 2059981058.jpg, -1, -1, 58.75
169
+ 2065722620.jpg, -1, -1, 67.9662162162
170
+ 2070418201.jpg, -1, -1, 46.8109756098
171
+ 2074092022.jpg, -1, -1, 68.2866972477
172
+ 2088433684.jpg, -1, -1, 33.8526785714
173
+ 2102208013.jpg, -1, -1, 81.017699115
174
+ 211603206.jpg, -1, -1, 65.474789916
175
+ 2126677316.jpg, -1, -1, 29.3990384615
176
+ 2138874780.jpg, -1, -1, 70.4779411765
177
+ 213906996.jpg, -1, -1, 86.6625
178
+ 2140758242.jpg, -1, -1, 69.0817757009
179
+ 2150291485.jpg, -1, -1, 50.8650442478
180
+ 2161809899.jpg, -1, -1, 57.7924107143
181
+ 2168323029.jpg, -1, -1, 53.05
182
+ 2181528959.jpg, -1, -1, 71.1949152542
183
+ 2191549592.jpg, -1, -1, 50.7682926829
184
+ 2191619969.jpg, -1, -1, 59.575
185
+ 2195703133.jpg, -1, -1, 52.3982300885
186
+ 2199234449.jpg, -1, -1, 45.2894736842
187
+ 2211658928.jpg, -1, -1, 72.7873134328
188
+ 2215695798.jpg, -1, -1, 63.0172413793
189
+ 2223002938.jpg, -1, -1, 63.2569444444
190
+ 2224984425.jpg, -1, -1, 67.4459459459
191
+ 2234064642.jpg, -1, -1, 44.125
192
+ 2235352888.jpg, -1, -1, 70.5774336283
193
+ 2236324282.jpg, -1, -1, 72.5
194
+ 2239987680.jpg, -1, -1, 29.9121621622
195
+ 224076897.jpg, -1, -1, 62.2857142857
196
+ 2243706700.jpg, -1, -1, 53.3986486486
197
+ 2255948084.jpg, -1, -1, 74.8644859813
198
+ 2266672191.jpg, -1, -1, 57.30625
199
+ 2280116666.jpg, -1, -1, 34.0711206897
200
+ 2280668053.jpg, -1, -1, 48.0907079646
201
+ 2285789022.jpg, -1, -1, 51.6
202
+ 2298963655.jpg, -1, -1, 72.5514018692
203
+ 2301461383.jpg, -1, -1, 57.25
204
+ 2313971368.jpg, -1, -1, 69.891025641
205
+ 2322493147.jpg, -1, -1, 68.0951327434
206
+ 2337030121.jpg, -1, -1, 74.2837837838
207
+ 2338832331.jpg, -1, -1, 55.5338983051
208
+ 234944177.jpg, -1, -1, 72.56875
209
+ 2349676135.jpg, -1, -1, 37.025
210
+ 2350031609.jpg, -1, -1, 40.0
211
+ 2362202039.jpg, -1, -1, 75.6730769231
212
+ 2366273919.jpg, -1, -1, 29.5066964286
213
+ 2372696116.jpg, -1, -1, 62.0353982301
214
+ 2383953966.jpg, -1, -1, 65.5526315789
215
+ 2409675838.jpg, -1, -1, 37.1890756303
216
+ 2415334988.jpg, -1, -1, 38.4094827586
217
+ 2418067534.jpg, -1, -1, 32.1281512605
218
+ 2422029745.jpg, -1, -1, 27.0486111111
219
+ 2448776548.jpg, -1, -1, 56.75625
220
+ 2459843879.jpg, -1, -1, 63.7
221
+ 2466673147.jpg, -1, -1, 69.5603448276
222
+ 2474265369.jpg, -1, -1, 40.6717391304
223
+ 2486040810.jpg, -1, -1, 57.5304347826
224
+ 2491353954.jpg, -1, -1, 47.2876106195
225
+ 2492228578.jpg, -1, -1, 31.1756756757
226
+ 2510458503.jpg, -1, -1, 74.3173913043
227
+ 2517718928.jpg, -1, -1, 73.0681818182
228
+ 2539818768.jpg, -1, -1, 40.1707317073
229
+ 2549004146.jpg, -1, -1, 41.7740384615
230
+ 2555644998.jpg, -1, -1, 66.55
231
+ 2557729909.jpg, -1, -1, 77.1280487805
232
+ 2568268182.jpg, -1, -1, 47.5586956522
233
+ 257457837.jpg, -1, -1, 38.74375
234
+ 2579041082.jpg, -1, -1, 63.5096153846
235
+ 2585752058.jpg, -1, -1, 58.6728971963
236
+ 2604683646.jpg, -1, -1, 58.371559633
237
+ 2612353606.jpg, -1, -1, 79.375
238
+ 2620467173.jpg, -1, -1, 53.6025641026
239
+ 2622466873.jpg, -1, -1, 57.9174311927
240
+ 2624809780.jpg, -1, -1, 57.2375
241
+ 2624882684.jpg, -1, -1, 70.0504201681
242
+ 2627264031.jpg, -1, -1, 74.5810810811
243
+ 2640025600.jpg, -1, -1, 47.2119565217
244
+ 2652397944.jpg, -1, -1, 70.3
245
+ 2657101675.jpg, -1, -1, 61.5504587156
246
+ 2659313316.jpg, -1, -1, 71.9345794393
247
+ 2676837522.jpg, -1, -1, 60.7720588235
248
+ 2679224314.jpg, -1, -1, 17.01875
249
+ 268066639.jpg, -1, -1, 54.1666666667
250
+ 2700852747.jpg, -1, -1, 55.511682243
251
+ 2713735764.jpg, -1, -1, 51.5683453237
252
+ 2720757001.jpg, -1, -1, 32.8541666667
253
+ 2722913532.jpg, -1, -1, 60.7935779817
254
+ 2725531194.jpg, -1, -1, 68.1785714286
255
+ 273181633.jpg, -1, -1, 68.6792035398
256
+ 2737304398.jpg, -1, -1, 53.875
257
+ 273844105.jpg, -1, -1, 79.7368421053
258
+ 2739007601.jpg, -1, -1, 51.2071428571
259
+ 274390572.jpg, -1, -1, 36.5169491525
260
+ 2746983219.jpg, -1, -1, 67.5789473684
261
+ 2748370097.jpg, -1, -1, 56.0252293578
262
+ 276590560.jpg, -1, -1, 40.1689189189
263
+ 2766412345.jpg, -1, -1, 37.1216216216
264
+ 2778039171.jpg, -1, -1, 68.8581081081
265
+ 2779185055.jpg, -1, -1, 49.9889380531
266
+ 2796774323.jpg, -1, -1, 78.4174107143
267
+ 2798808586.jpg, -1, -1, 62.1559633028
268
+ 2801033102.jpg, -1, -1, 43.4978991597
269
+ 2807762234.jpg, -1, -1, 70.4663865546
270
+ 2808286292.jpg, -1, -1, 72.3552631579
271
+ 28173586.jpg, -1, -1, 63.5862068966
272
+ 2817883212.jpg, -1, -1, 79.7695652174
273
+ 2818844013.jpg, -1, -1, 71.0471698113
274
+ 2819296433.jpg, -1, -1, 46.1956521739
275
+ 2839494557.jpg, -1, -1, 71.4369747899
276
+ 2844802224.jpg, -1, -1, 52.5067567568
277
+ 2851510265.jpg, -1, -1, 63.1399082569
278
+ 2852195444.jpg, -1, -1, 64.3509174312
279
+ 2856053454.jpg, -1, -1, 26.95
280
+ 2861346409.jpg, -1, -1, 62.5344036697
281
+ 2866097268.jpg, -1, -1, 46.0193965517
282
+ 2867718050.jpg, -1, -1, 31.86875
283
+ 2876226509.jpg, -1, -1, 38.3676470588
284
+ 2889734029.jpg, -1, -1, 74.6260504202
285
+ 289462174.jpg, -1, -1, 26.9695652174
286
+ 290161904.jpg, -1, -1, 68.925
287
+ 2907072293.jpg, -1, -1, 21.1666666667
288
+ 2910782740.jpg, -1, -1, 56.1155462185
289
+ 2932540851.jpg, -1, -1, 67.6239495798
290
+ 2943372707.jpg, -1, -1, 64.2033898305
291
+ 2952908030.jpg, -1, -1, 50.2179487179
292
+ 2954406623.jpg, -1, -1, 26.3739495798
293
+ 2960552821.jpg, -1, -1, 23.9565217391
294
+ 2962584804.jpg, -1, -1, 61.3326086957
295
+ 2964746234.jpg, -1, -1, 38.725
296
+ 2967177700.jpg, -1, -1, 52.4888392857
297
+ 296739390.jpg, -1, -1, 55.0833333333
298
+ 2974085281.jpg, -1, -1, 42.9551282051
299
+ 2976379229.jpg, -1, -1, 75.9491525424
300
+ 2977820017.jpg, -1, -1, 62.5855263158
301
+ 2982904736.jpg, -1, -1, 74.4469026549
302
+ 298392087.jpg, -1, -1, 64.2242990654
303
+ 2985434942.jpg, -1, -1, 49.125
304
+ 301104017.jpg, -1, -1, 72.43125
305
+ 3011247965.jpg, -1, -1, 55.2567567568
306
+ 3013640771.jpg, -1, -1, 70.9776785714
307
+ 3018181041.jpg, -1, -1, 49.2086956522
308
+ 3021659624.jpg, -1, -1, 53.18125
309
+ 3022463774.jpg, -1, -1, 71.1596638655
310
+ 3026154930.jpg, -1, -1, 56.61875
311
+ 302681636.jpg, -1, -1, 53.5762711864
312
+ 3050837263.jpg, -1, -1, 53.7702702703
313
+ 3070894795.jpg, -1, -1, 37.743697479
314
+ 3072804517.jpg, -1, -1, 63.1176470588
315
+ 3075864902.jpg, -1, -1, 19.0945378151
316
+ 3077228870.jpg, -1, -1, 34.3055555556
317
+ 3084516747.jpg, -1, -1, 30.6153846154
318
+ 3084860790.jpg, -1, -1, 12.1850961538
319
+ 3087855667.jpg, -1, -1, 45.5202702703
320
+ 3090534929.jpg, -1, -1, 37.0320512821
321
+ 3093435000.jpg, -1, -1, 73.4474789916
322
+ 310467976.jpg, -1, -1, 75.7352941176
323
+ 3117729596.jpg, -1, -1, 50.6513761468
324
+ 3118439925.jpg, -1, -1, 62.2432432432
325
+ 3118885310.jpg, -1, -1, 62.4428571429
326
+ 311996877.jpg, -1, -1, 45.0924369748
327
+ 3130211082.jpg, -1, -1, 76.7697368421
328
+ 3132085493.jpg, -1, -1, 68.4807692308
329
+ 3132669524.jpg, -1, -1, 28.5924369748
330
+ 3136368482.jpg, -1, -1, 46.6399082569
331
+ 3141055360.jpg, -1, -1, 54.6973684211
332
+ 3147927973.jpg, -1, -1, 55.9075630252
333
+ 3148698494.jpg, -1, -1, 47.1697247706
334
+ 3164597811.jpg, -1, -1, 68.3357142857
335
+ 3164921799.jpg, -1, -1, 57.2767857143
336
+ 3174536924.jpg, -1, -1, 54.7648305085
337
+ 3175529254.jpg, -1, -1, 55.873853211
338
+ 3177560540.jpg, -1, -1, 61.3
339
+ 3177839887.jpg, -1, -1, 78.4410377358
340
+ 3184407773.jpg, -1, -1, 39.6816037736
341
+ 3184831791.jpg, -1, -1, 71.1621621622
342
+ 3188292993.jpg, -1, -1, 77.9256756757
343
+ 3194036363.jpg, -1, -1, 67.15
344
+ 3197293330.jpg, -1, -1, 65.8786407767
345
+ 3206894685.jpg, -1, -1, 56.9428571429
346
+ 3207112247.jpg, -1, -1, 58.1659292035
347
+ 320987228.jpg, -1, -1, 83.4306722689
348
+ 3210385990.jpg, -1, -1, 70.3846153846
349
+ 3217227849.jpg, -1, -1, 48.6666666667
350
+ 3224295308.jpg, -1, -1, 61.0334821429
351
+ 3233213622.jpg, -1, -1, 65.474789916
352
+ 3241749396.jpg, -1, -1, 58.5943396226
353
+ 3243501762.jpg, -1, -1, 37.3445945946
354
+ 3252078614.jpg, -1, -1, 47.81875
355
+ 325223545.jpg, -1, -1, 67.1434782609
356
+ 3253636665.jpg, -1, -1, 77.79375
357
+ 3254432034.jpg, -1, -1, 60.5525210084
358
+ 3256723994.jpg, -1, -1, 54.9933035714
359
+ 326195053.jpg, -1, -1, 50.9194915254
360
+ 3265568572.jpg, -1, -1, 70.1025641026
361
+ 3275523745.jpg, -1, -1, 57.5304347826
362
+ 3277186380.jpg, -1, -1, 54.2754237288
363
+ 3296835726.jpg, -1, -1, 66.0337837838
364
+ 3297887749.jpg, -1, -1, 69.608974359
365
+ 3301587792.jpg, -1, -1, 43.4174528302
366
+ 3302975736.jpg, -1, -1, 57.7456521739
367
+ 3303080079.jpg, -1, -1, 80.527027027
368
+ 3305968848.jpg, -1, -1, 23.7064220183
369
+ 3307215666.jpg, -1, -1, 50.5705128205
370
+ 3308341801.jpg, -1, -1, 27.8474576271
371
+ 3311728409.jpg, -1, -1, 75.4615384615
372
+ 3312331811.jpg, -1, -1, 18.9601769912
373
+ 3315383233.jpg, -1, -1, 21.2905405405
374
+ 3323033903.jpg, -1, -1, 26.2702702703
375
+ 3326988489.jpg, -1, -1, 57.375
376
+ 3330470420.jpg, -1, -1, 54.6628440367
377
+ 3335618194.jpg, -1, -1, 75.4101941748
378
+ 3338894282.jpg, -1, -1, 63.7311320755
379
+ 3340734956.jpg, -1, -1, 63.7995689655
380
+ 3341085556.jpg, -1, -1, 20.6032110092
381
+ 3346416285.jpg, -1, -1, 57.2362385321
382
+ 3347823880.jpg, -1, -1, 67.0
383
+ 3355200103.jpg, -1, -1, 75.3645833333
384
+ 3355464174.jpg, -1, -1, 60.3282608696
385
+ 3362731032.jpg, -1, -1, 76.69375
386
+ 3364722083.jpg, -1, -1, 44.5334821429
387
+ 3376076568.jpg, -1, -1, 56.9625
388
+ 3381345467.jpg, -1, -1, 46.6305309735
389
+ 3383701765.jpg, -1, -1, 65.6621621622
390
+ 3388712525.jpg, -1, -1, 48.1216216216
391
+ 3390605400.jpg, -1, -1, 25.0688073394
392
+ 342221498.jpg, -1, -1, 32.3361344538
393
+ 3427902428.jpg, -1, -1, 71.2983193277
394
+ 3427902514.jpg, -1, -1, 69.3130841121
395
+ 3433315492.jpg, -1, -1, 49.6238938053
396
+ 3444483519.jpg, -1, -1, 66.1680672269
397
+ 3446366493.jpg, -1, -1, 67.2313084112
398
+ 3450753884.jpg, -1, -1, 69.2632743363
399
+ 3452963187.jpg, -1, -1, 25.3040540541
400
+ 3456840631.jpg, -1, -1, 76.8214285714
401
+ 3458045907.jpg, -1, -1, 60.5178571429
402
+ 3461062752.jpg, -1, -1, 44.5540540541
403
+ 3462395913.jpg, -1, -1, 49.5424107143
404
+ 3463146453.jpg, -1, -1, 68.6805555556
405
+ 3463376543.jpg, -1, -1, 56.3055555556
406
+ 3466610103.jpg, -1, -1, 70.3559322034
407
+ 3469092597.jpg, -1, -1, 68.1148648649
408
+ 347162205.jpg, -1, -1, 31.5425531915
409
+ 34796696.jpg, -1, -1, 66.1538461538
410
+ 348162485.jpg, -1, -1, 26.3046218487
411
+ 3483143101.jpg, -1, -1, 6.06092436975
412
+ 3483540691.jpg, -1, -1, 46.1891891892
413
+ 3487085790.jpg, -1, -1, 69.0442477876
414
+ 3487701269.jpg, -1, -1, 69.5631067961
415
+ 3489731995.jpg, -1, -1, 66.0043103448
416
+ 3497202348.jpg, -1, -1, 55.5540540541
417
+ 35002160.jpg, -1, -1, 68.468220339
418
+ 3503300231.jpg, -1, -1, 76.41875
419
+ 3505262206.jpg, -1, -1, 50.4269911504
420
+ 3505627590.jpg, -1, -1, 40.703125
421
+ 3510063780.jpg, -1, -1, 46.052173913
422
+ 3515663938.jpg, -1, -1, 73.4821428571
423
+ 351997474.jpg, -1, -1, 50.6586538462
424
+ 352602458.jpg, -1, -1, 29.552173913
425
+ 352744670.jpg, -1, -1, 14.3783783784
426
+ 353228664.jpg, -1, -1, 66.0508849558
427
+ 3532374814.jpg, -1, -1, 68.8245192308
428
+ 3533949779.jpg, -1, -1, 63.6415929204
429
+ 3537044665.jpg, -1, -1, 34.2796610169
430
+ 3542002438.jpg, -1, -1, 47.0336134454
431
+ 3547073718.jpg, -1, -1, 77.2942477876
432
+ 3563134047.jpg, -1, -1, 70.0353773585
433
+ 357641600.jpg, -1, -1, 21.3130252101
434
+ 358433606.jpg, -1, -1, 27.0607476636
435
+ 3585032307.jpg, -1, -1, 48.4571428571
436
+ 3587076357.jpg, -1, -1, 62.9108695652
437
+ 3587337138.jpg, -1, -1, 44.5862831858
438
+ 3596611917.jpg, -1, -1, 54.4152542373
439
+ 3597095413.jpg, -1, -1, 76.0588235294
440
+ 3597199001.jpg, -1, -1, 71.3347457627
441
+ 359723983.jpg, -1, -1, 72.0934782609
442
+ 3606669688.jpg, -1, -1, 60.2831858407
443
+ 3608137140.jpg, -1, -1, 70.4932432432
444
+ 3609473273.jpg, -1, -1, 77.4358974359
445
+ 3615125120.jpg, -1, -1, 67.6991525424
446
+ 3615267731.jpg, -1, -1, 70.3584070796
447
+ 3616806128.jpg, -1, -1, 63.3669724771
448
+ 3618474326.jpg, -1, -1, 69.0810810811
449
+ 3618795157.jpg, -1, -1, 83.3443396226
450
+ 3620726.jpg, -1, -1, 24.224789916
451
+ 3632417985.jpg, -1, -1, 4.16741071429
452
+ 3639490630.jpg, -1, -1, 46.2375
453
+ 3645793167.jpg, -1, -1, 71.7257281553
454
+ 3648316650.jpg, -1, -1, 65.0321100917
455
+ 3651951998.jpg, -1, -1, 56.8276699029
456
+ 3663320781.jpg, -1, -1, 75.9612068966
457
+ 3663960871.jpg, -1, -1, 56.856557377
458
+ 3665983083.jpg, -1, -1, 78.1486486486
459
+ 3671193503.jpg, -1, -1, 60.1621621622
460
+ 3674117315.jpg, -1, -1, 61.5723684211
461
+ 3680743288.jpg, -1, -1, 34.6292372881
462
+ 3683418770.jpg, -1, -1, 55.9533898305
463
+ 3689126503.jpg, -1, -1, 74.4174311927
464
+ 3705698114.jpg, -1, -1, 53.7123893805
465
+ 3705884386.jpg, -1, -1, 46.2673913043
466
+ 3712529193.jpg, -1, -1, 36.7037815126
467
+ 3713010621.jpg, -1, -1, 44.0178571429
468
+ 3713773458.jpg, -1, -1, 64.7572815534
469
+ 3715014568.jpg, -1, -1, 48.9391891892
470
+ 3721179887.jpg, -1, -1, 43.5878378378
471
+ 3722409963.jpg, -1, -1, 34.1386554622
472
+ 3722623767.jpg, -1, -1, 51.1991525424
473
+ 3727572481.jpg, -1, -1, 52.5817757009
474
+ 3733759776.jpg, -1, -1, 68.9888392857
475
+ 3734577194.jpg, -1, -1, 66.6108490566
476
+ 3736358849.jpg, -1, -1, 31.8
477
+ 3736907392.jpg, -1, -1, 65.3207964602
478
+ 375084308.jpg, -1, -1, 59.0527522936
479
+ 3753143338.jpg, -1, -1, 11.51875
480
+ 3755358004.jpg, -1, -1, 69.3040540541
481
+ 3758843972.jpg, -1, -1, 64.343220339
482
+ 3759763464.jpg, -1, -1, 48.5776699029
483
+ 3761173384.jpg, -1, -1, 53.4464285714
484
+ 3767341492.jpg, -1, -1, 35.275
485
+ 3767933394.jpg, -1, -1, 75.0943396226
486
+ 3783245891.jpg, -1, -1, 52.5974576271
487
+ 3783568353.jpg, -1, -1, 60.6152173913
488
+ 3788379941.jpg, -1, -1, 74.8849557522
489
+ 379437662.jpg, -1, -1, 68.6651376147
490
+ 38056572.jpg, -1, -1, 64.8326271186
491
+ 3810011859.jpg, -1, -1, 36.497706422
492
+ 3812031514.jpg, -1, -1, 70.9851694915
493
+ 3812926720.jpg, -1, -1, 70.3302752294
494
+ 3815063597.jpg, -1, -1, 47.3325892857
495
+ 3815098207.jpg, -1, -1, 74.7297297297
496
+ 3817779484.jpg, -1, -1, 50.7426470588
497
+ 3824413541.jpg, -1, -1, 76.3879310345
498
+ 3825837186.jpg, -1, -1, 70.0320512821
499
+ 3827560087.jpg, -1, -1, 73.0260869565
500
+ 3839953318.jpg, -1, -1, 57.281779661
501
+ 3841233055.jpg, -1, -1, 43.0981308411
502
+ 3841305755.jpg, -1, -1, 61.3368644068
503
+ 3845656628.jpg, -1, -1, 62.5855263158
504
+ 3847965007.jpg, -1, -1, 43.0
505
+ 3851672801.jpg, -1, -1, 75.9189189189
506
+ 3851941327.jpg, -1, -1, 55.495412844
507
+ 3853359984.jpg, -1, -1, 61.4471153846
508
+ 3854279335.jpg, -1, -1, 30.5558252427
509
+ 3855419676.jpg, -1, -1, 76.2905405405
510
+ 3860848792.jpg, -1, -1, 43.6585365854
511
+ 3867441709.jpg, -1, -1, 65.9048672566
512
+ 386746008.jpg, -1, -1, 62.4243697479
513
+ 387003357.jpg, -1, -1, 31.7702702703
514
+ 3870343367.jpg, -1, -1, 62.4705882353
515
+ 3871801313.jpg, -1, -1, 50.6398305085
516
+ 3874298550.jpg, -1, -1, 61.84375
517
+ 3874382715.jpg, -1, -1, 32.5721153846
518
+ 3880140639.jpg, -1, -1, 34.5840707965
519
+ 3890811565.jpg, -1, -1, 64.1575630252
520
+ 3895433023.jpg, -1, -1, 58.1760869565
521
+ 390226070.jpg, -1, -1, 72.2566371681
522
+ 3905518312.jpg, -1, -1, 39.3035714286
523
+ 3908866899.jpg, -1, -1, 38.2777777778
524
+ 3917849504.jpg, -1, -1, 49.4956521739
525
+ 3919380535.jpg, -1, -1, 69.1361607143
526
+ 392103652.jpg, -1, -1, 33.3707627119
527
+ 3921589638.jpg, -1, -1, 72.8407079646
528
+ 3922421993.jpg, -1, -1, 72.0416666667
529
+ 3930941584.jpg, -1, -1, 36.4527027027
530
+ 3931210821.jpg, -1, -1, 55.2567567568
531
+ 3934821151.jpg, -1, -1, 69.5683962264
532
+ 3935177366.jpg, -1, -1, 21.0256410256
533
+ 3938367619.jpg, -1, -1, 50.2155172414
534
+ 3949143151.jpg, -1, -1, 68.99375
535
+ 395315549.jpg, -1, -1, 52.1875
536
+ 39540491.jpg, -1, -1, 73.3088235294
537
+ 3960444662.jpg, -1, -1, 73.7375
538
+ 396221760.jpg, -1, -1, 61.5948275862
539
+ 3969622447.jpg, -1, -1, 45.3737864078
540
+ 3975469304.jpg, -1, -1, 82.3851351351
541
+ 3980206588.jpg, -1, -1, 63.6116071429
542
+ 3983499964.jpg, -1, -1, 67.1542056075
543
+ 3984248064.jpg, -1, -1, 24.602173913
544
+ 3985027264.jpg, -1, -1, 40.0260869565
545
+ 3985258920.jpg, -1, -1, 52.3877118644
546
+ 3993808694.jpg, -1, -1, 40.875
547
+ 3999106249.jpg, -1, -1, 50.2828947368
548
+ 4003392015.jpg, -1, -1, 37.7905405405
549
+ 4004417944.jpg, -1, -1, 30.8475609756
550
+ 4016051317.jpg, -1, -1, 32.3348623853
551
+ 4018386659.jpg, -1, -1, 58.0688073394
552
+ 4018879156.jpg, -1, -1, 38.2867647059
553
+ 4028266820.jpg, -1, -1, 65.9423076923
554
+ 4039710407.jpg, -1, -1, 70.85
555
+ 4040650709.jpg, -1, -1, 45.0
556
+ 4042188530.jpg, -1, -1, 39.4495412844
557
+ 4042433513.jpg, -1, -1, 49.4976635514
558
+ 4042572733.jpg, -1, -1, 41.4324324324
559
+ 4054847919.jpg, -1, -1, 76.9951923077
560
+ 4079545582.jpg, -1, -1, 58.8243243243
561
+ 4081025902.jpg, -1, -1, 43.9868421053
562
+ 4089514101.jpg, -1, -1, 74.1548672566
563
+ 4094850958.jpg, -1, -1, 29.2638888889
564
+ 4095160762.jpg, -1, -1, 65.8318584071
565
+ 409597917.jpg, -1, -1, 40.251953125
566
+ 4098700616.jpg, -1, -1, 65.5897435897
567
+ 4100096331.jpg, -1, -1, 18.5405405405
568
+ 4107362083.jpg, -1, -1, 80.75
569
+ 4109408743.jpg, -1, -1, 73.9915254237
570
+ 411025625.jpg, -1, -1, 71.8931034483
571
+ 411090819.jpg, -1, -1, 16.9
572
+ 4113685341.jpg, -1, -1, 7.51680672269
573
+ 4126410943.jpg, -1, -1, 40.0065789474
574
+ 4132747677.jpg, -1, -1, 62.6458333333
575
+ 4139461254.jpg, -1, -1, 66.2142857143
576
+ 4139991387.jpg, -1, -1, 56.7413043478
577
+ 414385138.jpg, -1, -1, 68.7408256881
578
+ 4144540155.jpg, -1, -1, 62.9121621622
579
+ 4147791219.jpg, -1, -1, 58.2102803738
580
+ 4149265699.jpg, -1, -1, 66.6071428571
581
+ 4156345687.jpg, -1, -1, 75.193877551
582
+ 4156652933.jpg, -1, -1, 62.5576923077
583
+ 4157689684.jpg, -1, -1, 47.1415929204
584
+ 4167927417.jpg, -1, -1, 48.3846153846
585
+ 4169643886.jpg, -1, -1, 48.9111111111
586
+ 4172609703.jpg, -1, -1, 39.0769230769
587
+ 4172859607.jpg, -1, -1, 57.4121621622
588
+ 4172951303.jpg, -1, -1, 56.1216814159
589
+ 4176378029.jpg, -1, -1, 47.95
590
+ 4178432884.jpg, -1, -1, 48.4551282051
591
+ 4185039279.jpg, -1, -1, 72.7542016807
592
+ 4187118157.jpg, -1, -1, 76.1034482759
593
+ 4191882783.jpg, -1, -1, 45.025
594
+ 4196316000.jpg, -1, -1, 31.73125
595
+ 4206098907.jpg, -1, -1, 43.7867647059
596
+ 4213244648.jpg, -1, -1, 66.9282608696
597
+ 4224633098.jpg, -1, -1, 80.2146017699
598
+ 4236732050.jpg, -1, -1, 67.5606796117
599
+ 4239526963.jpg, -1, -1, 72.43125
600
+ 4242548000.jpg, -1, -1, 64.7572815534
601
+ 4243562466.jpg, -1, -1, 67.1398305085
602
+ 4243875190.jpg, -1, -1, 69.1949541284
603
+ 4244935320.jpg, -1, -1, 64.1575630252
604
+ 42490373.jpg, -1, -1, 62.9461206897
605
+ 4255841453.jpg, -1, -1, 61.0540540541
606
+ 4256128407.jpg, -1, -1, 31.4891304348
607
+ 4257293580.jpg, -1, -1, 26.9978991597
608
+ 4259043215.jpg, -1, -1, 64.5529661017
609
+ 4260146768.jpg, -1, -1, 69.6490825688
610
+ 4264601430.jpg, -1, -1, 73.8869565217
611
+ 4265877256.jpg, -1, -1, 50.2268211921
612
+ 4275452644.jpg, -1, -1, 66.8526785714
613
+ 4281583432.jpg, -1, -1, 62.66875
614
+ 4286953087.jpg, -1, -1, 35.3983050847
615
+ 4296647005.jpg, -1, -1, 72.3428571429
616
+ 4297599043.jpg, -1, -1, 63.7
617
+ 4298067776.jpg, -1, -1, 72.7
618
+ 4310499723.jpg, -1, -1, 52.5086956522
619
+ 4312794040.jpg, -1, -1, 62.9789915966
620
+ 4313162470.jpg, -1, -1, 67.2079831933
621
+ 4315078660.jpg, -1, -1, 41.6207627119
622
+ 4316201498.jpg, -1, -1, 67.2946428571
623
+ 4323755897.jpg, -1, -1, 61.4256756757
624
+ 4324984866.jpg, -1, -1, 63.3419811321
625
+ 433731087.jpg, -1, -1, 24.325
626
+ 434127863.jpg, -1, -1, 58.3080357143
627
+ 4346999174.jpg, -1, -1, 72.373853211
628
+ 4353430545.jpg, -1, -1, 27.2697368421
629
+ 4353485101.jpg, -1, -1, 32.3938053097
630
+ 4359682859.jpg, -1, -1, 78.1913043478
631
+ 4360105019.jpg, -1, -1, 29.4652317881
632
+ 43749901.jpg, -1, -1, 37.3559322034
633
+ 4378419360.jpg, -1, -1, 62.6857798165
634
+ 4385399670.jpg, -1, -1, 50.4294871795
635
+ 4389836139.jpg, -1, -1, 37.5357142857
636
+ 4390875397.jpg, -1, -1, 56.0252293578
637
+ 4391517237.jpg, -1, -1, 54.8899082569
638
+ 4393459228.jpg, -1, -1, 75.8392857143
639
+ 4396087406.jpg, -1, -1, 59.4189189189
640
+ 4396643142.jpg, -1, -1, 72.925
641
+ 44009500.jpg, -1, -1, 5.5
642
+ 4401185827.jpg, -1, -1, 46.0953389831
643
+ 440297710.jpg, -1, -1, 57.5277777778
644
+ 4410900135.jpg, -1, -1, 25.8243243243
645
+ 4422726404.jpg, -1, -1, 68.1049107143
646
+ 4423499380.jpg, -1, -1, 59.4491525424
647
+ 4424481742.jpg, -1, -1, 73.0641025641
648
+ 4429610219.jpg, -1, -1, 74.2628205128
649
+ 4433497740.jpg, -1, -1, 60.7212389381
650
+ 4434357623.jpg, -1, -1, 71.5444915254
651
+ 4436568659.jpg, -1, -1, 67.0705128205
652
+ 4443705808.jpg, -1, -1, 34.6513157895
653
+ 4456369143.jpg, -1, -1, 62.3855932203
654
+ 4464186761.jpg, -1, -1, 77.725
655
+ 4468108530.jpg, -1, -1, 60.695754717
656
+ 4472735511.jpg, -1, -1, 51.925
657
+ 4476029726.jpg, -1, -1, 67.2972972973
658
+ 4483042022.jpg, -1, -1, 65.7364864865
659
+ 448305008.jpg, -1, -1, 66.3067226891
660
+ 4484527101.jpg, -1, -1, 66.325
661
+ 4491574274.jpg, -1, -1, 71.3421052632
662
+ 4492079346.jpg, -1, -1, 60.869266055
663
+ 4492298311.jpg, -1, -1, 67.3466386555
664
+ 4502409601.jpg, -1, -1, 24.6286407767
665
+ 4504786016.jpg, -1, -1, 45.3163716814
666
+ 4509028861.jpg, -1, -1, 54.0044247788
667
+ 4511318356.jpg, -1, -1, 72.1262135922
668
+ 4513142635.jpg, -1, -1, 42.3973214286
669
+ 4518358107.jpg, -1, -1, 39.8142857143
670
+ 4519546786.jpg, -1, -1, 59.3956521739
671
+ 4526077589.jpg, -1, -1, 65.1974789916
672
+ 4526456137.jpg, -1, -1, 72.5
673
+ 45323583.jpg, -1, -1, 10.0256410256
674
+ 4538412115.jpg, -1, -1, 17.7079831933
675
+ 4542866851.jpg, -1, -1, 45.2563559322
676
+ 4551121129.jpg, -1, -1, 80.9173913043
677
+ 4562998069.jpg, -1, -1, 72.1944444444
678
+ 456472871.jpg, -1, -1, 62.5803571429
679
+ 4567181047.jpg, -1, -1, 69.6490825688
680
+ 4567762490.jpg, -1, -1, 73.2756410256
681
+ 4568018818.jpg, -1, -1, 73.0127118644
682
+ 4570575045.jpg, -1, -1, 57.2941176471
683
+ 4580698398.jpg, -1, -1, 34.0711206897
684
+ 4583413420.jpg, -1, -1, 66.9243119266
685
+ 4589538888.jpg, -1, -1, 44.8
686
+ 4597162839.jpg, -1, -1, 74.7297297297
687
+ 460024423.jpg, -1, -1, 71.95
688
+ 4605589887.jpg, -1, -1, 62.5630252101
689
+ 4611014636.jpg, -1, -1, 73.0452586207
690
+ 4614592470.jpg, -1, -1, 35.2110091743
691
+ 4615199637.jpg, -1, -1, 35.1565420561
692
+ 4617747913.jpg, -1, -1, 48.0691964286
693
+ 4619633197.jpg, -1, -1, 77.4054054054
694
+ 4620749659.jpg, -1, -1, 69.1887755102
695
+ 4626914740.jpg, -1, -1, 69.7991071429
696
+ 4631127472.jpg, -1, -1, 45.504587156
697
+ 463525569.jpg, -1, -1, 69.0435779817
698
+ 4638688609.jpg, -1, -1, 72.6155462185
699
+ 4644913731.jpg, -1, -1, 58.68125
700
+ 4656808355.jpg, -1, -1, 58.1043478261
701
+ 4664658605.jpg, -1, -1, 61.3677884615
702
+ 4668889560.jpg, -1, -1, 66.5619469027
703
+ 4670475171.jpg, -1, -1, 78.1617647059
704
+ 4671787703.jpg, -1, -1, 63.4642857143
705
+ 4679147725.jpg, -1, -1, 70.0200892857
706
+ 4680082599.jpg, -1, -1, 77.6592920354
707
+ 4681329966.jpg, -1, -1, 72.5
708
+ 468518360.jpg, -1, -1, 64.880733945
709
+ 468764619.jpg, -1, -1, 58.6065217391
710
+ 4689181626.jpg, -1, -1, 49.7791262136
711
+ 4696239138.jpg, -1, -1, 68.9411764706
712
+ 4699352200.jpg, -1, -1, 71.9345794393
713
+ 4699474732.jpg, -1, -1, 53.3004587156
714
+ 4704859608.jpg, -1, -1, 34.7279411765
715
+ 4711101546.jpg, -1, -1, 61.5504587156
716
+ 4719295519.jpg, -1, -1, 49.3108108108
717
+ 4727744330.jpg, -1, -1, 74.2012711864
718
+ 4728646090.jpg, -1, -1, 21.296460177
719
+ 4731015701.jpg, -1, -1, 54.4078947368
720
+ 4731330377.jpg, -1, -1, 51.7375
721
+ 4732322374.jpg, -1, -1, 75.8445945946
722
+ 4733189524.jpg, -1, -1, 53.7435897436
723
+ 473881316.jpg, -1, -1, 16.984375
724
+ 4748697171.jpg, -1, -1, 41.2794117647
725
+ 4764540278.jpg, -1, -1, 65.4618644068
726
+ 4765706226.jpg, -1, -1, 72.0887850467
727
+ 4770169005.jpg, -1, -1, 70.3147321429
728
+ 4770991170.jpg, -1, -1, 60.925
729
+ 4781142781.jpg, -1, -1, 56.1631355932
730
+ 4792717107.jpg, -1, -1, 70.4816513761
731
+ 4794011269.jpg, -1, -1, 70.2860169492
732
+ 4794780770.jpg, -1, -1, 62.1758474576
733
+ 4801184295.jpg, -1, -1, 72.7972972973
734
+ 4804088480.jpg, -1, -1, 66.2216981132
735
+ 4806281586.jpg, -1, -1, 57.5485436893
736
+ 4815901200.jpg, -1, -1, 81.0818965517
737
+ 4822559131.jpg, -1, -1, 51.9347826087
738
+ 4822965011.jpg, -1, -1, 63.3728448276
739
+ 4831070004.jpg, -1, -1, 11.9294871795
740
+ 4835256174.jpg, -1, -1, 79.7433035714
741
+ 4835865863.jpg, -1, -1, 56.6875
742
+ 4841257153.jpg, -1, -1, 54.6949152542
743
+ 4845997437.jpg, -1, -1, 49.6518691589
744
+ 4848345613.jpg, -1, -1, 75.7435897436
745
+ 4851066149.jpg, -1, -1, 34.6875
746
+ 485174290.jpg, -1, -1, 67.2972972973
747
+ 4852617544.jpg, -1, -1, 72.2063106796
748
+ 4858527470.jpg, -1, -1, 48.50625
749
+ 4862383971.jpg, -1, -1, 65.7547169811
750
+ 4863093063.jpg, -1, -1, 41.3173913043
751
+ 4867994945.jpg, -1, -1, 21.0256410256
752
+ 4871847706.jpg, -1, -1, 72.5743243243
753
+ 4873047491.jpg, -1, -1, 74.5448717949
754
+ 4875935356.jpg, -1, -1, 58.6829268293
755
+ 4875993491.jpg, -1, -1, 63.4426605505
756
+ 4885160336.jpg, -1, -1, 72.1038135593
757
+ 4885928195.jpg, -1, -1, 52.2217391304
758
+ 4886153819.jpg, -1, -1, 62.95
759
+ 4890155523.jpg, -1, -1, 74.7582781457
760
+ 4891468275.jpg, -1, -1, 77.7027027027
761
+ 489975423.jpg, -1, -1, 71.675
762
+ 4903384785.jpg, -1, -1, 63.2094594595
763
+ 4903418699.jpg, -1, -1, 49.33125
764
+ 4922860865.jpg, -1, -1, 47.0379464286
765
+ 4922892935.jpg, -1, -1, 74.3411016949
766
+ 4923537063.jpg, -1, -1, 56.3279816514
767
+ 4925153617.jpg, -1, -1, 69.322815534
768
+ 4925167261.jpg, -1, -1, 60.968487395
769
+ 4926787829.jpg, -1, -1, 55.1939655172
770
+ 4933863195.jpg, -1, -1, 48.7162162162
771
+ 4938188075.jpg, -1, -1, 47.4418103448
772
+ 4941401132.jpg, -1, -1, 76.5758928571
773
+ 4947500856.jpg, -1, -1, 73.3276699029
774
+ 4947572555.jpg, -1, -1, 69.9027777778
775
+ 4948021193.jpg, -1, -1, 62.6652542373
776
+ 4948125167.jpg, -1, -1, 63.90625
777
+ 4955052105.jpg, -1, -1, 61.6195652174
778
+ 4955674278.jpg, -1, -1, 59.1299342105
779
+ 4962117985.jpg, -1, -1, 32.3919491525
780
+ 4967412135.jpg, -1, -1, 67.5789473684
781
+ 4969822064.jpg, -1, -1, 56.1766055046
782
+ 497163401.jpg, -1, -1, 73.7118644068
783
+ 4972553952.jpg, -1, -1, 65.5878378378
784
+ 4980829250.jpg, -1, -1, 26.7288135593
785
+ 4981035833.jpg, -1, -1, 69.925
786
+ 4981184708.jpg, -1, -1, 55.5887850467
787
+ 4982428461.jpg, -1, -1, 65.8564356436
788
+ 4985465789.jpg, -1, -1, 61.6033834586
789
+ 4986076377.jpg, -1, -1, 69.55
790
+ 4986510349.jpg, -1, -1, 21.4152542373
791
+ 4986682017.jpg, -1, -1, 13.5677570093
792
+ 49874259.jpg, -1, -1, 62.4318181818
793
+ 4995119944.jpg, -1, -1, 73.6155660377
794
+ 5001805928.jpg, -1, -1, 40.1689189189
795
+ 5001866078.jpg, -1, -1, 65.8051724138
796
+ 5005558786.jpg, -1, -1, 71.7142857143
797
+ 5005908077.jpg, -1, -1, 64.063559322
798
+ 5014526130.jpg, -1, -1, 69.9154135338
799
+ 5016657118.jpg, -1, -1, 49.4454887218
800
+ 5017267795.jpg, -1, -1, 61.6195652174
801
+ 5017618497.jpg, -1, -1, 65.1822033898
802
+ 502182808.jpg, -1, -1, 71.7683486239
803
+ 5027882580.jpg, -1, -1, 78.6513157895
804
+ 5028389463.jpg, -1, -1, 48.8808411215
805
+ 503291895.jpg, -1, -1, 64.391025641
806
+ 5035645781.jpg, -1, -1, 57.7875
807
+ 5036262446.jpg, -1, -1, 68.2336448598
808
+ 5037503230.jpg, -1, -1, 61.9390756303
809
+ 5038604558.jpg, -1, -1, 78.222972973
810
+ 5041593593.jpg, -1, -1, 47.8986486486
811
+ 5044457387.jpg, -1, -1, 64.294
812
+ 5050399849.jpg, -1, -1, 54.5935114504
813
+ 5051600612.jpg, -1, -1, 63.08125
814
+ 5055213857.jpg, -1, -1, 48.8194444444
815
+ 5062110567.jpg, -1, -1, 48.6184210526
816
+ 5064176245.jpg, -1, -1, 75.1756756757
817
+ 5073500906.jpg, -1, -1, 58.825
818
+ 5075275205.jpg, -1, -1, 76.00625
819
+ 5077869968.jpg, -1, -1, 42.8289473684
820
+ 5079448176.jpg, -1, -1, 68.8177966102
821
+ 5082407583.jpg, -1, -1, 56.6592465753
822
+ 5090174589.jpg, -1, -1, 30.49375
823
+ 5090549164.jpg, -1, -1, 66.3599137931
824
+ 5090803058.jpg, -1, -1, 54.3303571429
825
+ 5094262203.jpg, -1, -1, 68.7331932773
826
+ 5097068931.jpg, -1, -1, 82.7361111111
827
+ 5101108569.jpg, -1, -1, 45.1195652174
828
+ 5103528737.jpg, -1, -1, 33.1897321429
829
+ 5115371206.jpg, -1, -1, 57.3119266055
830
+ 5120849681.jpg, -1, -1, 35.9642857143
831
+ 5129172.jpg, -1, -1, 18.5399159664
832
+ 5135958105.jpg, -1, -1, 47.6995412844
833
+ 5137221601.jpg, -1, -1, 42.5320512821
834
+ 5140673192.jpg, -1, -1, 57.561440678
835
+ 5142063764.jpg, -1, -1, 40.8181818182
836
+ 514498047.jpg, -1, -1, 56.2455357143
837
+ 5146611755.jpg, -1, -1, 48.6495327103
838
+ 5150999709.jpg, -1, -1, 41.5567226891
839
+ 5153880006.jpg, -1, -1, 74.9357142857
840
+ 5157059497.jpg, -1, -1, 61.8961864407
841
+ 5164262533.jpg, -1, -1, 64.3716814159
842
+ 5166185394.jpg, -1, -1, 62.8
843
+ 5166352802.jpg, -1, -1, 53.8392857143
844
+ 5166736586.jpg, -1, -1, 79.2692307692
845
+ 5167059215.jpg, -1, -1, 75.7798165138
846
+ 5170785709.jpg, -1, -1, 65.5212264151
847
+ 5172565267.jpg, -1, -1, 41.6858974359
848
+ 5172812012.jpg, -1, -1, 56.61875
849
+ 5176681750.jpg, -1, -1, 67.1386554622
850
+ 5179986164.jpg, -1, -1, 50.9805825243
851
+ 5183223776.jpg, -1, -1, 69.3771186441
852
+ 5184506144.jpg, -1, -1, 81.5588235294
853
+ 5185255513.jpg, -1, -1, 72.8125
854
+ 5189289022.jpg, -1, -1, 77.3486842105
855
+ 5193757906.jpg, -1, -1, 59.575
856
+ 5195745450.jpg, -1, -1, 56.4036697248
857
+ 5197644731.jpg, -1, -1, 56.570754717
858
+ 5209757780.jpg, -1, -1, 58.1207627119
859
+ 5216419230.jpg, -1, -1, 71.8310810811
860
+ 5216870847.jpg, -1, -1, 52.7434210526
861
+ 5217550500.jpg, -1, -1, 29.7193396226
862
+ 5221875216.jpg, -1, -1, 47.9830508475
863
+ 5223615087.jpg, -1, -1, 74.4676724138
864
+ 5233564371.jpg, -1, -1, 60.0897435897
865
+ 5236886996.jpg, -1, -1, 73.0401785714
866
+ 5237114997.jpg, -1, -1, 31.8508403361
867
+ 5241841036.jpg, -1, -1, 77.51875
868
+ 5249052587.jpg, -1, -1, 57.6313559322
869
+ 5251948512.jpg, -1, -1, 71.3860759494
870
+ 5252606224.jpg, -1, -1, 63.3155339806
871
+ 5258392332.jpg, -1, -1, 67.675
872
+ 526016794.jpg, -1, -1, 45.1703539823
873
+ 5261364718.jpg, -1, -1, 61.6527777778
874
+ 526555735.jpg, -1, -1, 50.1972477064
875
+ 5266542779.jpg, -1, -1, 64.502293578
876
+ 526762082.jpg, -1, -1, 67.8319327731
877
+ 5269040313.jpg, -1, -1, 12.2433628319
878
+ 5269333343.jpg, -1, -1, 79.15
879
+ 5272496469.jpg, -1, -1, 34.9491150442
880
+ 5276700957.jpg, -1, -1, 42.1800847458
881
+ 5286481593.jpg, -1, -1, 65.6004464286
882
+ 5288273177.jpg, -1, -1, 58.3608490566
883
+ 528858440.jpg, -1, -1, 36.5826086957
884
+ 5292802931.jpg, -1, -1, 40.4077669903
885
+ 5293214322.jpg, -1, -1, 72.0609243697
886
+ 5297588508.jpg, -1, -1, 53.1283185841
887
+ 5298103951.jpg, -1, -1, 40.9121621622
888
+ 5305142351.jpg, -1, -1, 71.9646017699
889
+ 5305438208.jpg, -1, -1, 45.8855932203
890
+ 5308844734.jpg, -1, -1, 74.8816964286
891
+ 5309442831.jpg, -1, -1, 73.5131578947
892
+ 5309538408.jpg, -1, -1, 57.7094594595
893
+ 5312388504.jpg, -1, -1, 68.7633587786
894
+ 5317045814.jpg, -1, -1, 66.621559633
895
+ 5320512069.jpg, -1, -1, 46.6594827586
896
+ 5326008096.jpg, -1, -1, 64.5970873786
897
+ 5327737580.jpg, -1, -1, 52.3982300885
898
+ 5328075993.jpg, -1, -1, 66.4054054054
899
+ 5342778275.jpg, -1, -1, 55.4646017699
900
+ 5343739846.jpg, -1, -1, 46.16875
901
+ 5352670374.jpg, -1, -1, 66.691588785
902
+ 535787632.jpg, -1, -1, 38.5995575221
903
+ 5358352176.jpg, -1, -1, 40.6605504587
904
+ 5358640896.jpg, -1, -1, 68.275
905
+ 5359127660.jpg, -1, -1, 67.7891304348
906
+ 5360175600.jpg, -1, -1, 75.6183035714
907
+ 5362095529.jpg, -1, -1, 75.3986486486
908
+ 5363150332.jpg, -1, -1, 55.203271028
909
+ 5364427287.jpg, -1, -1, 69.2564102564
910
+ 5365003071.jpg, -1, -1, 67.20625
911
+ 5370538873.jpg, -1, -1, 50.1381578947
912
+ 5371908851.jpg, -1, -1, 66.2699115044
913
+ 537232636.jpg, -1, -1, 40.2960526316
914
+ 5375536055.jpg, -1, -1, 67.9871794872
915
+ 5376223041.jpg, -1, -1, 68.0405405405
916
+ 5382638405.jpg, -1, -1, 44.858974359
917
+ 5399327452.jpg, -1, -1, 63.325
918
+ 5407907045.jpg, -1, -1, 48.1637168142
919
+ 5407931669.jpg, -1, -1, 73.6308411215
920
+ 5414089487.jpg, -1, -1, 80.4146341463
921
+ 5414222209.jpg, -1, -1, 77.9740566038
922
+ 5419570020.jpg, -1, -1, 66.7053571429
923
+ 5420375203.jpg, -1, -1, 74.2382075472
924
+ 5424903451.jpg, -1, -1, 68.5218446602
925
+ 5430352868.jpg, -1, -1, 71.0884955752
926
+ 5430572465.jpg, -1, -1, 69.9203539823
927
+ 5433364879.jpg, -1, -1, 30.256302521
928
+ 5434982423.jpg, -1, -1, 38.0684931507
929
+ 5436442410.jpg, -1, -1, 70.0394736842
930
+ 5437565452.jpg, -1, -1, 62.7635135135
931
+ 5457535055.jpg, -1, -1, 64.3986486486
932
+ 5459749774.jpg, -1, -1, 49.7556390977
933
+ 5460394455.jpg, -1, -1, 75.765625
934
+ 5464519026.jpg, -1, -1, 48.55
935
+ 5471871046.jpg, -1, -1, 35.0951327434
936
+ 5473035889.jpg, -1, -1, 73.1346153846
937
+ 5482786130.jpg, -1, -1, 76.5413043478
938
+ 5483273817.jpg, -1, -1, 56.3407079646
939
+ 5483697378.jpg, -1, -1, 53.8147321429
940
+ 5483904990.jpg, -1, -1, 53.1013513514
941
+ 5487766524.jpg, -1, -1, 50.0805084746
942
+ 5489335291.jpg, -1, -1, 76.4911504425
943
+ 5494503911.jpg, -1, -1, 72.6009174312
944
+ 5495773051.jpg, -1, -1, 62.5803571429
945
+ 5496213154.jpg, -1, -1, 73.476635514
946
+ 5501090896.jpg, -1, -1, 61.7563559322
947
+ 5504474324.jpg, -1, -1, 54.2430555556
948
+ 5505953336.jpg, -1, -1, 20.2731092437
949
+ 5508282751.jpg, -1, -1, 74.3092105263
950
+ 5508551413.jpg, -1, -1, 26.3785714286
951
+ 5512989538.jpg, -1, -1, 69.3469827586
952
+ 5519011400.jpg, -1, -1, 54.14375
953
+ 5525531268.jpg, -1, -1, 47.9446902655
954
+ 552629141.jpg, -1, -1, 30.5379464286
955
+ 5528731780.jpg, -1, -1, 68.8877118644
956
+ 5531274580.jpg, -1, -1, 66.4701834862
957
+ 5534328979.jpg, -1, -1, 50.3526785714
958
+ 5538586634.jpg, -1, -1, 77.9766949153
959
+ 5539522093.jpg, -1, -1, 70.6844660194
960
+ 5543037671.jpg, -1, -1, 63.7475961538
961
+ 5544820783.jpg, -1, -1, 42.8289473684
962
+ 5546833927.jpg, -1, -1, 62.1734693878
963
+ 5546979604.jpg, -1, -1, 72.8928571429
964
+ 5558342101.jpg, -1, -1, 61.8961864407
965
+ 5561303733.jpg, -1, -1, 34.8561320755
966
+ 5561541116.jpg, -1, -1, 45.3163716814
967
+ 5569876905.jpg, -1, -1, 48.4025423729
968
+ 5572325411.jpg, -1, -1, 74.5625
969
+ 5573505825.jpg, -1, -1, 74.2888349515
970
+ 5578143578.jpg, -1, -1, 72.443877551
971
+ 5580904626.jpg, -1, -1, 72.8598130841
972
+ 5586376319.jpg, -1, -1, 61.7853773585
973
+ 5589406824.jpg, -1, -1, 40.6605504587
974
+ 5590179803.jpg, -1, -1, 57.475
975
+ 5592040758.jpg, -1, -1, 69.1673728814
976
+ 5593028853.jpg, -1, -1, 61.8341121495
977
+ 5599500585.jpg, -1, -1, 53.8147321429
978
+ 5599655175.jpg, -1, -1, 76.3879310345
979
+ 560054684.jpg, -1, -1, 21.0573394495
980
+ 5602088551.jpg, -1, -1, 67.2115384615
981
+ 5602195949.jpg, -1, -1, 71.5833333333
982
+ 5602672763.jpg, -1, -1, 61.6375
983
+ 5606693622.jpg, -1, -1, 61.5231092437
984
+ 5607542951.jpg, -1, -1, 48.6418918919
985
+ 5609386026.jpg, -1, -1, 56.7079207921
986
+ 5621762674.jpg, -1, -1, 81.320754717
987
+ 5625316207.jpg, -1, -1, 64.357421875
988
+ 5627315065.jpg, -1, -1, 73.8397435897
989
+ 562942809.jpg, -1, -1, 21.2027559055
990
+ 5631644954.jpg, -1, -1, 48.0818965517
991
+ 5632060967.jpg, -1, -1, 65.4487179487
992
+ 5634844318.jpg, -1, -1, 43.9622641509
993
+ 5645276165.jpg, -1, -1, 63.5810810811
994
+ 5646738672.jpg, -1, -1, 62.1383928571
995
+ 5650628470.jpg, -1, -1, 56.2607758621
996
+ 5651165439.jpg, -1, -1, 79.9976415094
997
+ 5655651548.jpg, -1, -1, 32.3642241379
998
+ 5664632199.jpg, -1, -1, 59.5602678571
999
+ 5666479112.jpg, -1, -1, 65.685840708
1000
+ 5669648365.jpg, -1, -1, 59.0527522936
1001
+ 5673514295.jpg, -1, -1, 70.3277310924
1002
+ 5680237187.jpg, -1, -1, 79.0066964286
1003
+ 5680571645.jpg, -1, -1, 31.027027027
1004
+ 5683736894.jpg, -1, -1, 32.3919491525
1005
+ 5684894057.jpg, -1, -1, 60.325
1006
+ 5685822958.jpg, -1, -1, 79.6696428571
1007
+ 5687490427.jpg, -1, -1, 44.2948717949
1008
+ 5689072971.jpg, -1, -1, 46.1857798165
1009
+ 5690961665.jpg, -1, -1, 76.9151376147
1010
+ 5692339429.jpg, -1, -1, 72.0217391304
1011
+ 5692933688.jpg, -1, -1, 72.9342105263
1012
+ 5694228883.jpg, -1, -1, 71.447826087
1013
+ 5698690184.jpg, -1, -1, 69.4127358491
1014
+ 5699749992.jpg, -1, -1, 67.5689655172
1015
+ 5700224418.jpg, -1, -1, 50.8716216216
1016
+ 5700803236.jpg, -1, -1, 63.9724770642
1017
+ 5701314350.jpg, -1, -1, 75.5420353982
1018
+ 5702257196.jpg, -1, -1, 51.506097561
1019
+ 5706720351.jpg, -1, -1, 30.256302521
1020
+ 5718858871.jpg, -1, -1, 69.9575471698
1021
+ 5719653094.jpg, -1, -1, 17.2920168067
1022
+ 5737344773.jpg, -1, -1, 68.6638655462
1023
+ 5740846112.jpg, -1, -1, 55.8035714286
1024
+ 5746784745.jpg, -1, -1, 46.375
1025
+ 5747013682.jpg, -1, -1, 28.075
1026
+ 5748221179.jpg, -1, -1, 60.8672566372
1027
+ 5748620149.jpg, -1, -1, 56.9821428571
1028
+ 5748929912.jpg, -1, -1, 43.9831932773
1029
+ 57501165.jpg, -1, -1, 51.8983050847
1030
+ 5756631011.jpg, -1, -1, 59.1959459459
1031
+ 5763062464.jpg, -1, -1, 73.7767857143
1032
+ 5764115414.jpg, -1, -1, 71.7142857143
1033
+ 5764514076.jpg, -1, -1, 29.0079617834
1034
+ 5771711136.jpg, -1, -1, 59.9181415929
1035
+ 5771887382.jpg, -1, -1, 26.0966386555
1036
+ 5790219836.jpg, -1, -1, 61.2330097087
1037
+ 5796436033.jpg, -1, -1, 60.74375
1038
+ 5798200461.jpg, -1, -1, 71.2326086957
1039
+ 5798775388.jpg, -1, -1, 49.8370535714
1040
+ 5803914327.jpg, -1, -1, 43.2905405405
1041
+ 5805954950.jpg, -1, -1, 72.8470873786
1042
+ 5812785774.jpg, -1, -1, 76.0602678571
1043
+ 5817880168.jpg, -1, -1, 69.119266055
1044
+ 582148952.jpg, -1, -1, 69.8727678571
1045
+ 5827460813.jpg, -1, -1, 60.2881355932
1046
+ 5830956184.jpg, -1, -1, 63.1136363636
1047
+ 5835525141.jpg, -1, -1, 71.35
1048
+ 5836498446.jpg, -1, -1, 67.6292372881
1049
+ 5838518725.jpg, -1, -1, 65.8108108108
1050
+ 5842574442.jpg, -1, -1, 80.2311320755
1051
+ 5842712003.jpg, -1, -1, 70.3584070796
1052
+ 5844921100.jpg, -1, -1, 68.6923076923
1053
+ 58449294.jpg, -1, -1, 50.6363636364
1054
+ 5853259008.jpg, -1, -1, 69.7013274336
1055
+ 5865132229.jpg, -1, -1, 57.9180672269
1056
+ 5866763424.jpg, -1, -1, 73.3205645161
1057
+ 5870955720.jpg, -1, -1, 74.7102803738
1058
+ 5871068150.jpg, -1, -1, 62.7710084034
1059
+ 5871593028.jpg, -1, -1, 64.5169902913
1060
+ 5875925963.jpg, -1, -1, 54.7307692308
1061
+ 5876722447.jpg, -1, -1, 58.6699029126
1062
+ 5877192304.jpg, -1, -1, 58.9690265487
1063
+ 5877272501.jpg, -1, -1, 79.1956521739
1064
+ 5881856929.jpg, -1, -1, 56.7279411765
1065
+ 5884906716.jpg, -1, -1, 71.1949152542
1066
+ 5886086971.jpg, -1, -1, 44.9471153846
1067
+ 5891588921.jpg, -1, -1, 75.25
1068
+ 5893652818.jpg, -1, -1, 18.7132352941
1069
+ 5894875244.jpg, -1, -1, 76.6378504673
1070
+ 5898928772.jpg, -1, -1, 55.495412844
1071
+ 5899300375.jpg, -1, -1, 39.925
1072
+ 5903878980.jpg, -1, -1, 67.6939252336
1073
+ 5903930781.jpg, -1, -1, 72.7669902913
1074
+ 5905277829.jpg, -1, -1, 57.0861344538
1075
+ 5910545670.jpg, -1, -1, 30.390625
1076
+ 5910732650.jpg, -1, -1, 25.2201834862
1077
+ 5912436872.jpg, -1, -1, 66.4054054054
1078
+ 5914413530.jpg, -1, -1, 66.9269911504
1079
+ 5917296642.jpg, -1, -1, 27.5665137615
1080
+ 5921291430.jpg, -1, -1, 64.063559322
1081
+ 5926348208.jpg, -1, -1, 59.1183035714
1082
+ 5937141958.jpg, -1, -1, 65.1184210526
1083
+ 5944469044.jpg, -1, -1, 61.0456521739
1084
+ 5945639421.jpg, -1, -1, 56.171875
1085
+ 5948378050.jpg, -1, -1, 60.2428571429
1086
+ 5948782020.jpg, -1, -1, 25.8193277311
1087
+ 5955821366.jpg, -1, -1, 63.6029411765
1088
+ 5956532834.jpg, -1, -1, 19.7706422018
1089
+ 5958527369.jpg, -1, -1, 76.1588983051
1090
+ 5959274849.jpg, -1, -1, 47.26875
1091
+ 5960283649.jpg, -1, -1, 43.7625
1092
+ 5965181934.jpg, -1, -1, 61.5257009346
1093
+ 5966712198.jpg, -1, -1, 58.8922413793
1094
+ 5966778623.jpg, -1, -1, 36.25
1095
+ 5968558261.jpg, -1, -1, 73.9473684211
1096
+ 5969471767.jpg, -1, -1, 48.9105504587
1097
+ 5971762123.jpg, -1, -1, 71.8616071429
1098
+ 5977841815.jpg, -1, -1, 82.5673913043
1099
+ 5985353334.jpg, -1, -1, 64.502293578
1100
+ 5987384542.jpg, -1, -1, 82.8188073394
1101
+ 5987658762.jpg, -1, -1, 69.0625
1102
+ 5996266717.jpg, -1, -1, 72.198630137
1103
+ 5997968998.jpg, -1, -1, 63.325
1104
+ 6001258616.jpg, -1, -1, 64.5691964286
1105
+ 6001836316.jpg, -1, -1, 44.6764705882
1106
+ 6004179446.jpg, -1, -1, 58.15
1107
+ 6004662053.jpg, -1, -1, 71.9934210526
1108
+ 6011321842.jpg, -1, -1, 62.2
1109
+ 6012494248.jpg, -1, -1, 32.9882075472
1110
+ 6014996044.jpg, -1, -1, 21.947826087
1111
+ 6015032576.jpg, -1, -1, 44.777027027
1112
+ 6018292739.jpg, -1, -1, 82.825
1113
+ 6027079771.jpg, -1, -1, 35.6205357143
1114
+ 6031428768.jpg, -1, -1, 76.7697368421
1115
+ 6031659162.jpg, -1, -1, 70.0663716814
1116
+ 6038209414.jpg, -1, -1, 67.7638888889
1117
+ 6050570749.jpg, -1, -1, 45.5909090909
1118
+ 6051776132.jpg, -1, -1, 77.6592920354
1119
+ 6052522172.jpg, -1, -1, 65.4875
1120
+ 6063249330.jpg, -1, -1, 58.825
1121
+ 6063545051.jpg, -1, -1, 46.5959821429
1122
+ 6064738730.jpg, -1, -1, 71.9338235294
1123
+ 6065704403.jpg, -1, -1, 67.7432432432
1124
+ 6072006649.jpg, -1, -1, 67.6823308271
1125
+ 6073506056.jpg, -1, -1, 71.2648305085
1126
+ 6077061574.jpg, -1, -1, 68.468220339
1127
+ 6078472527.jpg, -1, -1, 61.3053097345
1128
+ 6084731334.jpg, -1, -1, 38.9197247706
1129
+ 6092350487.jpg, -1, -1, 70.5675675676
1130
+ 6093133362.jpg, -1, -1, 51.9935344828
1131
+ 6098120607.jpg, -1, -1, 23.7989864865
1132
+ 6099389158.jpg, -1, -1, 36.4605263158
1133
+ 6113347868.jpg, -1, -1, 51.7584745763
1134
+ 6114847662.jpg, -1, -1, 64.3685344828
1135
+ 6115396985.jpg, -1, -1, 70.7087155963
1136
+ 6116188135.jpg, -1, -1, 72.8728813559
1137
+ 6118735934.jpg, -1, -1, 76.397826087
1138
+ 6118760299.jpg, -1, -1, 77.8962264151
1139
+ 6124044226.jpg, -1, -1, 71.9797297297
1140
+ 6127748837.jpg, -1, -1, 67.9491150442
1141
+ 6131345309.jpg, -1, -1, 69.5108695652
1142
+ 6138363884.jpg, -1, -1, 72.8928571429
1143
+ 6142185721.jpg, -1, -1, 39.6009174312
1144
+ 6143698715.jpg, -1, -1, 33.6349557522
1145
+ 6144690260.jpg, -1, -1, 76.4399038462
1146
+ 6144702078.jpg, -1, -1, 54.0357142857
1147
+ 6146146023.jpg, -1, -1, 80.7739130435
1148
+ 6146595160.jpg, -1, -1, 71.6261682243
1149
+ 6147638271.jpg, -1, -1, 76.5486111111
1150
+ 6149089572.jpg, -1, -1, 73.3517699115
1151
+ 6153962968.jpg, -1, -1, 40.6605504587
1152
+ 6156825791.jpg, -1, -1, 66.0388349515
1153
+ 6164110615.jpg, -1, -1, 56.3826086957
1154
+ 6166626410.jpg, -1, -1, 48.9353448276
1155
+ 6170589548.jpg, -1, -1, 64.4348739496
1156
+ 6172964730.jpg, -1, -1, 23.8224299065
1157
+ 6173003449.jpg, -1, -1, 72.6009174312
1158
+ 61745002.jpg, -1, -1, 47.7727272727
1159
+ 6188110673.jpg, -1, -1, 75.1026785714
1160
+ 6190085634.jpg, -1, -1, 51.8865546218
1161
+ 6190639250.jpg, -1, -1, 72.5827067669
1162
+ 6192537618.jpg, -1, -1, 71.9122137405
1163
+ 6199370121.jpg, -1, -1, 76.4695652174
1164
+ 6210494281.jpg, -1, -1, 59.7075892857
1165
+ 6216457478.jpg, -1, -1, 64.9195652174
1166
+ 6218336714.jpg, -1, -1, 75.1794871795
1167
+ 6219475255.jpg, -1, -1, 61.1808035714
1168
+ 6220758013.jpg, -1, -1, 67.6629464286
1169
+ 6224309834.jpg, -1, -1, 23.3055555556
1170
+ 6226065978.jpg, -1, -1, 65.8851351351
1171
+ 6232031392.jpg, -1, -1, 71.8241525424
1172
+ 6237907286.jpg, -1, -1, 67.1473214286
1173
+ 62458506.jpg, -1, -1, 64.5320512821
1174
+ 6248527465.jpg, -1, -1, 75.6391509434
1175
+ 6251598886.jpg, -1, -1, 53.3727678571
1176
+ 6251795586.jpg, -1, -1, 73.4878640777
1177
+ 6252497829.jpg, -1, -1, 57.8660714286
1178
+ 6258118545.jpg, -1, -1, 36.5826086957
1179
+ 6264153932.jpg, -1, -1, 40.6605504587
1180
+ 6267817145.jpg, -1, -1, 64.975
1181
+ 6268176546.jpg, -1, -1, 75.8739495798
1182
+ 6274950285.jpg, -1, -1, 76.675
1183
+ 6276334186.jpg, -1, -1, 35.44375
1184
+ 6280923250.jpg, -1, -1, 69.7737068966
1185
+ 6283627097.jpg, -1, -1, 57.8247663551
1186
+ 6285601097.jpg, -1, -1, 73.6933962264
1187
+ 6290479072.jpg, -1, -1, 64.2919847328
1188
+ 6293629106.jpg, -1, -1, 44.8183962264
1189
+ 6294913930.jpg, -1, -1, 58.75
1190
+ 6298978484.jpg, -1, -1, 59.8096330275
1191
+ 6303893419.jpg, -1, -1, 56.570754717
1192
+ 6304784314.jpg, -1, -1, 28.3513513514
1193
+ 6305224063.jpg, -1, -1, 24.8891304348
1194
+ 6305573914.jpg, -1, -1, 53.7715517241
1195
+ 6310649828.jpg, -1, -1, 18.7522321429
1196
+ 6317232957.jpg, -1, -1, 61.5504587156
1197
+ 6318820907.jpg, -1, -1, 61.8819444444
1198
+ 6319935355.jpg, -1, -1, 82.1741071429
1199
+ 6321752444.jpg, -1, -1, 57.4864864865
1200
+ 6325240412.jpg, -1, -1, 79.9343220339
1201
+ 6326811967.jpg, -1, -1, 64.6025641026
1202
+ 6326902733.jpg, -1, -1, 66.2633928571
1203
+ 6332883956.jpg, -1, -1, 16.9
1204
+ 6333112690.jpg, -1, -1, 67.502173913
1205
+ 6340619552.jpg, -1, -1, 30.4324324324
1206
+ 6344881273.jpg, -1, -1, 54.925
1207
+ 6391074279.jpg, -1, -1, 76.6973684211
1208
+ 6391919133.jpg, -1, -1, 64.4446902655
1209
+ 6403149971.jpg, -1, -1, 35.3231132075
1210
+ 6408708033.jpg, -1, -1, 47.7008928571
1211
+ 6411944553.jpg, -1, -1, 56.9977876106
1212
+ 641595598.jpg, -1, -1, 74.747826087
1213
+ 6419047435.jpg, -1, -1, 67.8009708738
1214
+ 6422846525.jpg, -1, -1, 71.6169724771
1215
+ 6433692937.jpg, -1, -1, 45.7742718447
1216
+ 6435747701.jpg, -1, -1, 74.3173913043
1217
+ 6441383857.jpg, -1, -1, 74.1603773585
1218
+ 6443396365.jpg, -1, -1, 56.0233050847
1219
+ 6443711585.jpg, -1, -1, 45.4459459459
1220
+ 6445980191.jpg, -1, -1, 63.8109243697
1221
+ 6453185751.jpg, -1, -1, 60.5525210084
1222
+ 6453831461.jpg, -1, -1, 38.0871559633
1223
+ 6458056243.jpg, -1, -1, 42.6880530973
1224
+ 6459811757.jpg, -1, -1, 68.2913043478
1225
+ 6467546565.jpg, -1, -1, 29.7710084034
1226
+ 6470955539.jpg, -1, -1, 64.4316037736
1227
+ 6471980333.jpg, -1, -1, 59.7288135593
1228
+ 6481674971.jpg, -1, -1, 53.425
1229
+ 6486906969.jpg, -1, -1, 38.2385321101
1230
+ 6489144861.jpg, -1, -1, 66.9256756757
1231
+ 6495386031.jpg, -1, -1, 68.9576271186
1232
+ 6496266341.jpg, -1, -1, 35.8982300885
1233
+ 6497142149.jpg, -1, -1, 63.0148305085
1234
+ 6511236583.jpg, -1, -1, 68.4558823529
1235
+ 6512617013.jpg, -1, -1, 80.26875
1236
+ 6517362491.jpg, -1, -1, 61.9504310345
1237
+ 6522952765.jpg, -1, -1, 61.626146789
1238
+ 6523676619.jpg, -1, -1, 56.4036697248
1239
+ 6530161213.jpg, -1, -1, 54.7648305085
1240
+ 6543602941.jpg, -1, -1, 55.3185840708
1241
+ 6563196289.jpg, -1, -1, 52.6554054054
1242
+ 6563980965.jpg, -1, -1, 81.1625
1243
+ 6564941395.jpg, -1, -1, 72.3296460177
1244
+ 6568238645.jpg, -1, -1, 56.9247787611
1245
+ 6577277115.jpg, -1, -1, 62.5485611511
1246
+ 6578628541.jpg, -1, -1, 55.768907563
1247
+ 6579889887.jpg, -1, -1, 64.1334745763
1248
+ 6585714605.jpg, -1, -1, 80.40625
1249
+ 6595488751.jpg, -1, -1, 64.343220339
1250
+ 6596085063.jpg, -1, -1, 62.1111111111
1251
+ 6596306.jpg, -1, -1, 40.6282051282
1252
+ 6609931137.jpg, -1, -1, 67.2115384615
1253
+ 6613235741.jpg, -1, -1, 42.3308823529
1254
+ 6621488051.jpg, -1, -1, 66.5840336134
1255
+ 6622717529.jpg, -1, -1, 74.5387931034
1256
+ 6632573597.jpg, -1, -1, 53.125
1257
+ 6633974935.jpg, -1, -1, 74.2794117647
1258
+ 6638453859.jpg, -1, -1, 75.85
1259
+ 6645063001.jpg, -1, -1, 43.3686440678
1260
+ 6648576397.jpg, -1, -1, 40.1470588235
1261
+ 6658108283.jpg, -1, -1, 26.9695652174
1262
+ 6658947687.jpg, -1, -1, 47.6995412844
1263
+ 6661741471.jpg, -1, -1, 19.8771186441
1264
+ 6669862645.jpg, -1, -1, 58.516509434
1265
+ 6679373485.jpg, -1, -1, 42.6625
1266
+ 6681716765.jpg, -1, -1, 69.9870689655
1267
+ 6688906197.jpg, -1, -1, 59.9065420561
1268
+ 6692795337.jpg, -1, -1, 72.3834745763
1269
+ 6694664547.jpg, -1, -1, 66.4602803738
1270
+ 6694669509.jpg, -1, -1, 64.6869158879
1271
+ 6698669369.jpg, -1, -1, 79.9130434783
1272
+ 6708806483.jpg, -1, -1, 69.1491596639
1273
+ 6710865793.jpg, -1, -1, 63.8325892857
1274
+ 6727661759.jpg, -1, -1, 60.4138655462
1275
+ 6728176335.jpg, -1, -1, 66.9306722689
1276
+ 6728215257.jpg, -1, -1, 64.9724576271
1277
+ 6731073163.jpg, -1, -1, 65.6602564103
1278
+ 6733542819.jpg, -1, -1, 60.0758928571
1279
+ 6739568529.jpg, -1, -1, 57.9789719626
1280
+ 6741097253.jpg, -1, -1, 57.8333333333
1281
+ 6746835381.jpg, -1, -1, 79.8602941176
1282
+ 6748287295.jpg, -1, -1, 66.3653846154
1283
+ 6771336533.jpg, -1, -1, 31.1272321429
1284
+ 6776603937.jpg, -1, -1, 66.7401574803
1285
+ 6791461596.jpg, -1, -1, 60.2058823529
1286
+ 6793483796.jpg, -1, -1, 67.0
1287
+ 6793575356.jpg, -1, -1, 57.7094594595
1288
+ 6796759928.jpg, -1, -1, 75.8082706767
1289
+ 6798563238.jpg, -1, -1, 52.1019417476
1290
+ 6803183748.jpg, -1, -1, 58.4553571429
1291
+ 6808449405.jpg, -1, -1, 67.8481308411
1292
+ 6809126683.jpg, -1, -1, 46.2614678899
1293
+ 6811037194.jpg, -1, -1, 38.1621621622
1294
+ 6813481975.jpg, -1, -1, 68.4637096774
1295
+ 6818984013.jpg, -1, -1, 49.5911016949
1296
+ 6824806522.jpg, -1, -1, 30.9929906542
1297
+ 6829439085.jpg, -1, -1, 55.5769230769
1298
+ 6830281130.jpg, -1, -1, 76.1108695652
1299
+ 6832446699.jpg, -1, -1, 51.506097561
1300
+ 6833109332.jpg, -1, -1, 70.8076923077
1301
+ 6834514891.jpg, -1, -1, 28.8081896552
1302
+ 6836874905.jpg, -1, -1, 68.2954545455
1303
+ 6837228368.jpg, -1, -1, 69.5169491525
1304
+ 6837662382.jpg, -1, -1, 71.5265486726
1305
+ 6842750557.jpg, -1, -1, 29.4166666667
1306
+ 6843523713.jpg, -1, -1, 56.2894736842
1307
+ 6843525285.jpg, -1, -1, 71.534351145
1308
+ 6845847924.jpg, -1, -1, 42.6767241379
1309
+ 6854228547.jpg, -1, -1, 58.0448717949
1310
+ 6860404399.jpg, -1, -1, 41.5625
1311
+ 6864569694.jpg, -1, -1, 65.8851351351
1312
+ 6865774865.jpg, -1, -1, 66.51875
1313
+ 6869450975.jpg, -1, -1, 47.608974359
1314
+ 6869855884.jpg, -1, -1, 35.1402439024
1315
+ 6870596537.jpg, -1, -1, 56.9084821429
1316
+ 6880980024.jpg, -1, -1, 29.7163461538
1317
+ 6881321836.jpg, -1, -1, 18.6866438356
1318
+ 6882017804.jpg, -1, -1, 46.9642857143
1319
+ 6890378940.jpg, -1, -1, 75.775
1320
+ 6890677809.jpg, -1, -1, 80.05
1321
+ 6891596982.jpg, -1, -1, 64.6311881188
1322
+ 6891599340.jpg, -1, -1, 63.2354368932
1323
+ 6892058753.jpg, -1, -1, 68.85625
1324
+ 6895353221.jpg, -1, -1, 48.5227272727
1325
+ 6897314758.jpg, -1, -1, 73.5091743119
1326
+ 6902342114.jpg, -1, -1, 47.8014018692
1327
+ 6902444479.jpg, -1, -1, 68.8581081081
1328
+ 6904675638.jpg, -1, -1, 43.0210280374
1329
+ 6905808386.jpg, -1, -1, 61.3233944954
1330
+ 6908900076.jpg, -1, -1, 80.3881578947
1331
+ 6911586648.jpg, -1, -1, 29.8013392857
1332
+ 6920118996.jpg, -1, -1, 64.9375
1333
+ 6927641976.jpg, -1, -1, 42.3973214286
1334
+ 6929188980.jpg, -1, -1, 70.8648648649
1335
+ 6929191226.jpg, -1, -1, 48.0691964286
1336
+ 6930075142.jpg, -1, -1, 51.2208737864
1337
+ 6931269556.jpg, -1, -1, 60.8298319328
1338
+ 6933526580.jpg, -1, -1, 65.0256410256
1339
+ 6934705817.jpg, -1, -1, 52.9957983193
1340
+ 6935047173.jpg, -1, -1, 74.2711864407
1341
+ 6937606166.jpg, -1, -1, 74.7434210526
1342
+ 6938468092.jpg, -1, -1, 63.15
1343
+ 6939374638.jpg, -1, -1, 60.5375
1344
+ 6940202479.jpg, -1, -1, 65.1495327103
1345
+ 6940429128.jpg, -1, -1, 40.225
1346
+ 6941715715.jpg, -1, -1, 64.7367256637
1347
+ 6942418274.jpg, -1, -1, 54.3823529412
1348
+ 6942738260.jpg, -1, -1, 64.3947368421
1349
+ 6945167278.jpg, -1, -1, 70.5902777778
1350
+ 6945246052.jpg, -1, -1, 77.535472973
1351
+ 6946044735.jpg, -1, -1, 43.4285714286
1352
+ 6947747616.jpg, -1, -1, 62.875
1353
+ 6949001721.jpg, -1, -1, 50.70625
1354
+ 6954316003.jpg, -1, -1, 59.7899159664
1355
+ 6954547933.jpg, -1, -1, 37.1032110092
1356
+ 6955490571.jpg, -1, -1, 72.8598130841
1357
+ 6962031247.jpg, -1, -1, 39.1658878505
1358
+ 6963387147.jpg, -1, -1, 56.171875
1359
+ 6966797840.jpg, -1, -1, 39.3971962617
1360
+ 6974731766.jpg, -1, -1, 35.3995535714
1361
+ 6976429442.jpg, -1, -1, 36.8004587156
1362
+ 6976437867.jpg, -1, -1, 71.6144067797
1363
+ 6985846441.jpg, -1, -1, 61.4471153846
1364
+ 6994313445.jpg, -1, -1, 79.046460177
1365
+ 6996264658.jpg, -1, -1, 62.7654867257
1366
+ 6998687310.jpg, -1, -1, 56.2115384615
1367
+ 6998702838.jpg, -1, -1, 63.8325892857
1368
+ 7001480005.jpg, -1, -1, 59.6418918919
1369
+ 7003526811.jpg, -1, -1, 68.6351351351
1370
+ 7005136305.jpg, -1, -1, 78.7544247788
1371
+ 7005672625.jpg, -1, -1, 66.775
1372
+ 7006191446.jpg, -1, -1, 53.5762711864
1373
+ 7013076503.jpg, -1, -1, 64.375
1374
+ 7013643069.jpg, -1, -1, 44.6627358491
1375
+ 7024488001.jpg, -1, -1, 78.3163716814
1376
+ 7025630527.jpg, -1, -1, 62.4734513274
1377
+ 70272233.jpg, -1, -1, 15.8361344538
1378
+ 7033122257.jpg, -1, -1, 55.3254716981
1379
+ 7033769199.jpg, -1, -1, 54.1504424779
1380
+ 7036262065.jpg, -1, -1, 71.7763157895
1381
+ 7043681927.jpg, -1, -1, 70.2715517241
1382
+ 7046294773.jpg, -1, -1, 72.9665178571
1383
+ 7051563883.jpg, -1, -1, 57.8891304348
1384
+ 7052391619.jpg, -1, -1, 76.8175
1385
+ 7054584681.jpg, -1, -1, 65.6621621622
1386
+ 7063436849.jpg, -1, -1, 60.1116504854
1387
+ 706409343.jpg, -1, -1, 72.0887850467
1388
+ 7066508685.jpg, -1, -1, 53.6788990826
1389
+ 7066539033.jpg, -1, -1, 71.2307692308
1390
+ 7078293355.jpg, -1, -1, 79.4102564103
1391
+ 7082408177.jpg, -1, -1, 49.9701834862
1392
+ 7096952113.jpg, -1, -1, 62.875
1393
+ 7110105161.jpg, -1, -1, 47.0986842105
1394
+ 7113461019.jpg, -1, -1, 51.5076335878
1395
+ 7119860187.jpg, -1, -1, 70.7234513274
1396
+ 7131772191.jpg, -1, -1, 79.5642201835
1397
+ 7136073057.jpg, -1, -1, 62.1689189189
1398
+ 7144832397.jpg, -1, -1, 68.6351351351
1399
+ 71458295.jpg, -1, -1, 62.9789915966
1400
+ 7149994451.jpg, -1, -1, 62.8391304348
1401
+ 7158901982.jpg, -1, -1, 62.7654867257
1402
+ 7162678728.jpg, -1, -1, 63.5297619048
1403
+ 7162855925.jpg, -1, -1, 11.8552631579
1404
+ 7167671368.jpg, -1, -1, 63.8802521008
1405
+ 7168526408.jpg, -1, -1, 64.7043478261
1406
+ 7171867098.jpg, -1, -1, 68.6061946903
1407
+ 7172192793.jpg, -1, -1, 51.8983050847
1408
+ 7176672187.jpg, -1, -1, 59.3392857143
1409
+ 7178199774.jpg, -1, -1, 81.4932432432
1410
+ 7181400642.jpg, -1, -1, 41.65
1411
+ 7189264298.jpg, -1, -1, 79.8256302521
1412
+ 7189498789.jpg, -1, -1, 64.1995412844
1413
+ 7189997672.jpg, -1, -1, 69.625
1414
+ 7191194360.jpg, -1, -1, 47.7256637168
1415
+ 7192279956.jpg, -1, -1, 63.2094594595
1416
+ 7192785398.jpg, -1, -1, 56.597826087
1417
+ 7217797182.jpg, -1, -1, 63.3419811321
1418
+ 7219295732.jpg, -1, -1, 44.125
1419
+ 7228274052.jpg, -1, -1, 72.55
1420
+ 7232368208.jpg, -1, -1, 62.6924778761
1421
+ 7238368946.jpg, -1, -1, 36.8875
1422
+ 7264923790.jpg, -1, -1, 77.1152173913
1423
+ 7268981578.jpg, -1, -1, 36.3004807692
1424
+ 7270764894.jpg, -1, -1, 70.2004310345
1425
+ 7276446104.jpg, -1, -1, 70.3141025641
1426
+ 7280314854.jpg, -1, -1, 54.8459821429
1427
+ 7283094026.jpg, -1, -1, 78.34375
1428
+ 7286003056.jpg, -1, -1, 70.9285714286
1429
+ 7291000146.jpg, -1, -1, 66.237394958
1430
+ 7292878318.jpg, -1, -1, 61.1381578947
1431
+ 7296337218.jpg, -1, -1, 65.5
1432
+ 7312266068.jpg, -1, -1, 53.31875
1433
+ 7312752876.jpg, -1, -1, 50.5693277311
1434
+ 7316131560.jpg, -1, -1, 71.9646017699
1435
+ 7316593310.jpg, -1, -1, 74.8305084746
1436
+ 7328195954.jpg, -1, -1, 72.1652173913
1437
+ 7331539774.jpg, -1, -1, 74.4539473684
1438
+ 7334314036.jpg, -1, -1, 75.25
1439
+ 7351425672.jpg, -1, -1, 76.1119402985
1440
+ 7355344734.jpg, -1, -1, 76.2899159664
1441
+ 7355708808.jpg, -1, -1, 67.2920353982
1442
+ 73558937.jpg, -1, -1, 56.1375
1443
+ 7358286276.jpg, -1, -1, 7.05
1444
+ 7359378256.jpg, -1, -1, 71.3805309735
1445
+ 7360550332.jpg, -1, -1, 72.5763888889
1446
+ 7383311936.jpg, -1, -1, 37.0436893204
1447
+ 7387717156.jpg, -1, -1, 39.268907563
1448
+ 7391334516.jpg, -1, -1, 68.4864864865
1449
+ 7391355184.jpg, -1, -1, 77.5064102564
1450
+ 7392719524.jpg, -1, -1, 76.1142857143
1451
+ 7393631040.jpg, -1, -1, 75.7435897436
1452
+ 7417878338.jpg, -1, -1, 80.4869565217
1453
+ 7425526298.jpg, -1, -1, 43.7831858407
1454
+ 7445548804.jpg, -1, -1, 54.9655963303
1455
+ 7460005346.jpg, -1, -1, 62.4166666667
1456
+ 7460698156.jpg, -1, -1, 71.5913043478
1457
+ 7466229858.jpg, -1, -1, 66.7053571429
1458
+ 7470692282.jpg, -1, -1, 44.05
1459
+ 7484588674.jpg, -1, -1, 68.1891891892
1460
+ 7492007796.jpg, -1, -1, 53.0553097345
1461
+ 7492033480.jpg, -1, -1, 59.3093220339
1462
+ 7499781424.jpg, -1, -1, 69.797826087
1463
+ 7504569144.jpg, -1, -1, 70.0847826087
1464
+ 7510638426.jpg, -1, -1, 54.0675675676
1465
+ 7538254742.jpg, -1, -1, 54.1830357143
1466
+ 7551562932.jpg, -1, -1, 40.9633027523
1467
+ 7553660370.jpg, -1, -1, 56.6701680672
1468
+ 7557808298.jpg, -1, -1, 75.7610619469
1469
+ 7559495698.jpg, -1, -1, 19.0945378151
1470
+ 7561935496.jpg, -1, -1, 73.1111111111
1471
+ 7575998494.jpg, -1, -1, 68.7368421053
1472
+ 7577281784.jpg, -1, -1, 67.3618421053
1473
+ 7582497202.jpg, -1, -1, 65.95
1474
+ 7586142684.jpg, -1, -1, 64.6337209302
1475
+ 7590781464.jpg, -1, -1, 74.6444954128
1476
+ 7591870206.jpg, -1, -1, 60.1602564103
1477
+ 7592360042.jpg, -1, -1, 64.25
1478
+ 7613152858.jpg, -1, -1, 73.9241071429
1479
+ 7617788578.jpg, -1, -1, 62.05
1480
+ 7622957154.jpg, -1, -1, 75.8846153846
1481
+ 7625539278.jpg, -1, -1, 35.5137614679
1482
+ 7636475996.jpg, -1, -1, 23.1766055046
1483
+ 7645259944.jpg, -1, -1, 40.2051282051
1484
+ 7651198192.jpg, -1, -1, 63.197826087
1485
+ 7660049550.jpg, -1, -1, 63.1305309735
1486
+ 7662529090.jpg, -1, -1, 61.15625
1487
+ 7672860636.jpg, -1, -1, 56.3701923077
1488
+ 7673801638.jpg, -1, -1, 80.4336283186
1489
+ 7674660830.jpg, -1, -1, 68.1323529412
1490
+ 7678256566.jpg, -1, -1, 78.0924369748
1491
+ 7690007638.jpg, -1, -1, 58.75
1492
+ 7692418948.jpg, -1, -1, 58.4630434783
1493
+ 7697080640.jpg, -1, -1, 66.1610169492
1494
+ 7697492926.jpg, -1, -1, 65.3366935484
1495
+ 7703482662.jpg, -1, -1, 70.4903846154
1496
+ 7704019566.jpg, -1, -1, 72.0376106195
1497
+ 7712722502.jpg, -1, -1, 74.6760869565
1498
+ 7720747032.jpg, -1, -1, 78.3716216216
1499
+ 7728764770.jpg, -1, -1, 72.5514018692
1500
+ 7731352704.jpg, -1, -1, 76.00625
1501
+ 7736222330.jpg, -1, -1, 58.3495145631
1502
+ 7737023436.jpg, -1, -1, 50.0021551724
1503
+ 7738480598.jpg, -1, -1, 31.3947368421
1504
+ 7751690228.jpg, -1, -1, 71.6169724771
1505
+ 7759565946.jpg, -1, -1, 79.0344036697
1506
+ 7765809112.jpg, -1, -1, 51.4705882353
1507
+ 7769931466.jpg, -1, -1, 46.9642857143
1508
+ 7769932398.jpg, -1, -1, 69.5553097345
1509
+ 7783320848.jpg, -1, -1, 72.373853211
1510
+ 7785216770.jpg, -1, -1, 59.875
1511
+ 7785223076.jpg, -1, -1, 14.2152777778
1512
+ 7788745946.jpg, -1, -1, 72.8108695652
1513
+ 7796133504.jpg, -1, -1, 71.3851351351
1514
+ 7797268116.jpg, -1, -1, 64.8445945946
1515
+ 7802695886.jpg, -1, -1, 75.1136363636
1516
+ 7810074740.jpg, -1, -1, 75.3217391304
1517
+ 7819211312.jpg, -1, -1, 50.1316964286
1518
+ 7819666996.jpg, -1, -1, 72.722972973
1519
+ 7832796652.jpg, -1, -1, 73.031512605
1520
+ 7875370780.jpg, -1, -1, 67.640776699
1521
+ 7877688978.jpg, -1, -1, 43.3782051282
1522
+ 7880797072.jpg, -1, -1, 53.1985981308
1523
+ 7889791504.jpg, -1, -1, 66.175
1524
+ 7893560424.jpg, -1, -1, 57.8965517241
1525
+ 7906332480.jpg, -1, -1, 65.2
1526
+ 7911339166.jpg, -1, -1, 72.2629310345
1527
+ 7911502338.jpg, -1, -1, 50.9380530973
1528
+ 7919270808.jpg, -1, -1, 39.2828947368
1529
+ 7927998516.jpg, -1, -1, 71.9054054054
1530
+ 7928561010.jpg, -1, -1, 43.6116504854
1531
+ 7934073070.jpg, -1, -1, 53.0984251969
1532
+ 7937116986.jpg, -1, -1, 72.0711009174
1533
+ 7942038522.jpg, -1, -1, 55.5089285714
1534
+ 7946380974.jpg, -1, -1, 67.9930555556
1535
+ 7954849148.jpg, -1, -1, 79.1822429907
1536
+ 7955276034.jpg, -1, -1, 40.5558035714
1537
+ 7955776040.jpg, -1, -1, 53.027027027
1538
+ 7959317086.jpg, -1, -1, 60.2232142857
1539
+ 7964545096.jpg, -1, -1, 20.5762711864
1540
+ 7969586312.jpg, -1, -1, 66.342920354
1541
+ 7973547250.jpg, -1, -1, 70.8782051282
1542
+ 7974813040.jpg, -1, -1, 74.4893617021
1543
+ 7978282630.jpg, -1, -1, 50.9852941176
1544
+ 7983035556.jpg, -1, -1, 72.671875
1545
+ 7991223182.jpg, -1, -1, 65.3648648649
1546
+ 7991449337.jpg, -1, -1, 80.8509174312
1547
+ 7994058325.jpg, -1, -1, 50.8855140187
1548
+ 7999186610.jpg, -1, -1, 52.2522123894
1549
+ 8000770201.jpg, -1, -1, 60.2428571429
1550
+ 8003923043.jpg, -1, -1, 53.7847222222
1551
+ 8005430981.jpg, -1, -1, 75.625
1552
+ 8008341376.jpg, -1, -1, 66.7130434783
1553
+ 8016728166.jpg, -1, -1, 70.3
1554
+ 8017805417.jpg, -1, -1, 75.8970588235
1555
+ 8020768393.jpg, -1, -1, 72.8191964286
1556
+ 8021353022.jpg, -1, -1, 74.9553571429
1557
+ 8021464988.jpg, -1, -1, 74.125
1558
+ 8022493551.jpg, -1, -1, 57.7794117647
1559
+ 8026326591.jpg, -1, -1, 72.2224770642
1560
+ 8027425918.jpg, -1, -1, 75.1
1561
+ 8030242803.jpg, -1, -1, 58.9086538462
1562
+ 8034781018.jpg, -1, -1, 79.2614678899
1563
+ 8035478082.jpg, -1, -1, 28.7570093458
1564
+ 8036386438.jpg, -1, -1, 75.25
1565
+ 8039894154.jpg, -1, -1, 41.2924107143
1566
+ 8040007574.jpg, -1, -1, 78.9587155963
1567
+ 80417582.jpg, -1, -1, 38.7836134454
1568
+ 8044678667.jpg, -1, -1, 78.9166666667
1569
+ 8056693769.jpg, -1, -1, 68.4732142857
1570
+ 8064361291.jpg, -1, -1, 38.4806034483
1571
+ 8065579848.jpg, -1, -1, 65.1747787611
1572
+ 8070731836.jpg, -1, -1, 70.4314159292
1573
+ 8078731890.jpg, -1, -1, 67.4419642857
1574
+ 8081688318.jpg, -1, -1, 65.0287610619
1575
+ 8087906672.jpg, -1, -1, 73.1138392857
1576
+ 8088110789.jpg, -1, -1, 78.796728972
1577
+ 8090361774.jpg, -1, -1, 35.9861111111
1578
+ 8094310910.jpg, -1, -1, 72.8407079646
1579
+ 8094870766.jpg, -1, -1, 71.3347457627
1580
+ 8108912190.jpg, -1, -1, 67.2796610169
1581
+ 8113190555.jpg, -1, -1, 78.2171052632
1582
+ 8116678978.jpg, -1, -1, 78.5353982301
1583
+ 8118290739.jpg, -1, -1, 57.6644736842
1584
+ 8120735257.jpg, -1, -1, 72.7594339623
1585
+ 8121424411.jpg, -1, -1, 43.9151376147
1586
+ 8127311442.jpg, -1, -1, 47.1845794393
1587
+ 81292981.jpg, -1, -1, 17.6375
1588
+ 8130446271.jpg, -1, -1, 60.9547413793
1589
+ 8130469512.jpg, -1, -1, 56.9719827586
1590
+ 8142530403.jpg, -1, -1, 67.1434782609
1591
+ 8142588958.jpg, -1, -1, 68.6941964286
1592
+ 8152567908.jpg, -1, -1, 57.9180672269
1593
+ 8154204617.jpg, -1, -1, 73.3461538462
1594
+ 8157886874.jpg, -1, -1, 71.1961206897
1595
+ 8158011851.jpg, -1, -1, 69.2239130435
1596
+ 8158344434.jpg, -1, -1, 72.4276315789
1597
+ 8163644568.jpg, -1, -1, 18.8172268908
1598
+ 8178731250.jpg, -1, -1, 67.4159663866
1599
+ 8179375247.jpg, -1, -1, 80.3455882353
1600
+ 8179882827.jpg, -1, -1, 73.0347222222
1601
+ 8183790186.jpg, -1, -1, 64.6959459459
1602
+ 8184075727.jpg, -1, -1, 71.6725663717
1603
+ 8185807831.jpg, -1, -1, 61.232300885
1604
+ 8192678214.jpg, -1, -1, 75.6150442478
1605
+ 8195543723.jpg, -1, -1, 76.7287735849
1606
+ 8196266010.jpg, -1, -1, 36.5733944954
1607
+ 8199545794.jpg, -1, -1, 77.6592920354
1608
+ 8206370859.jpg, -1, -1, 77.9747706422
1609
+ 8210857374.jpg, -1, -1, 67.3928571429
1610
+ 8211686846.jpg, -1, -1, 52.3922018349
1611
+ 8214901795.jpg, -1, -1, 67.3891509434
1612
+ 8220742117.jpg, -1, -1, 20.125
1613
+ 8226831484.jpg, -1, -1, 54.5538793103
1614
+ 8229585759.jpg, -1, -1, 55.93125
1615
+ 8230351643.jpg, -1, -1, 29.8341584158
1616
+ 8231492407.jpg, -1, -1, 66.4107142857
1617
+ 8238462203.jpg, -1, -1, 79.4096638655
1618
+ 8239827069.jpg, -1, -1, 58.2
1619
+ 8242811234.jpg, -1, -1, 66.3944954128
1620
+ 8244017611.jpg, -1, -1, 62.2196261682
1621
+ 8245350767.jpg, -1, -1, 74.2476635514
1622
+ 8246184591.jpg, -1, -1, 50.15625
1623
+ 8250498441.jpg, -1, -1, 60.8125
1624
+ 8252742940.jpg, -1, -1, 83.5
1625
+ 8256731117.jpg, -1, -1, 48.1118421053
1626
+ 8257779790.jpg, -1, -1, 22.9533898305
1627
+ 8259404043.jpg, -1, -1, 74.65
1628
+ 8260840073.jpg, -1, -1, 49.5294117647
1629
+ 8261560586.jpg, -1, -1, 65.4668141593
1630
+ 826373.jpg, -1, -1, 77.7457983193
1631
+ 8266041849.jpg, -1, -1, 68.1785714286
1632
+ 8266648992.jpg, -1, -1, 73.3
1633
+ 8267353806.jpg, -1, -1, 70.7566964286
1634
+ 8269777387.jpg, -1, -1, 54.6642857143
1635
+ 8281818195.jpg, -1, -1, 36.95625
1636
+ 8282376526.jpg, -1, -1, 60.3394495413
1637
+ 82824130.jpg, -1, -1, 29.4625
1638
+ 8289927707.jpg, -1, -1, 66.1081081081
1639
+ 8301271447.jpg, -1, -1, 63.5263157895
1640
+ 8303486295.jpg, -1, -1, 77.2076271186
1641
+ 8303662528.jpg, -1, -1, 65.6004464286
1642
+ 8305212885.jpg, -1, -1, 44.5963302752
1643
+ 8306155022.jpg, -1, -1, 77.65625
1644
+ 8306770184.jpg, -1, -1, 65.3808411215
1645
+ 8307817608.jpg, -1, -1, 33.5580357143
1646
+ 8310254529.jpg, -1, -1, 71.19375
1647
+ 8317893385.jpg, -1, -1, 75.18125
1648
+ 8318268016.jpg, -1, -1, 65.4668141593
1649
+ 8320017511.jpg, -1, -1, 77.0280172414
1650
+ 8320098796.jpg, -1, -1, 55.2836134454
1651
+ 8321155777.jpg, -1, -1, 53.2731092437
1652
+ 8322413607.jpg, -1, -1, 72.9230769231
1653
+ 8330807035.jpg, -1, -1, 34.9705882353
1654
+ 8331758509.jpg, -1, -1, 68.3026315789
1655
+ 8334203333.jpg, -1, -1, 48.0320512821
1656
+ 8336846212.jpg, -1, -1, 68.375
1657
+ 8337813363.jpg, -1, -1, 68.7331932773
1658
+ 8338842182.jpg, -1, -1, 67.2920353982
1659
+ 8346970641.jpg, -1, -1, 51.6891891892
1660
+ 8349327834.jpg, -1, -1, 47.25
1661
+ 8350532456.jpg, -1, -1, 66.8613445378
1662
+ 8351745980.jpg, -1, -1, 61.5257009346
1663
+ 8352587213.jpg, -1, -1, 63.8040540541
1664
+ 8353048335.jpg, -1, -1, 62.2163865546
1665
+ 8353276755.jpg, -1, -1, 71.125
1666
+ 8354332729.jpg, -1, -1, 53.1052631579
1667
+ 8354479425.jpg, -1, -1, 46.6351351351
1668
+ 8357037213.jpg, -1, -1, 69.1949541284
1669
+ 8358374077.jpg, -1, -1, 67.0
1670
+ 8358381896.jpg, -1, -1, 57.85
1671
+ 8362183655.jpg, -1, -1, 71.8714285714
1672
+ 8365545870.jpg, -1, -1, 74.1680327869
1673
+ 8374149596.jpg, -1, -1, 24.2079439252
1674
+ 8377051997.jpg, -1, -1, 45.0491071429
1675
+ 8380369304.jpg, -1, -1, 36.925
1676
+ 8386378049.jpg, -1, -1, 66.6283783784
1677
+ 8391500206.jpg, -1, -1, 46.2635135135
1678
+ 8393106944.jpg, -1, -1, 22.2257281553
1679
+ 8393175128.jpg, -1, -1, 68.9324324324
1680
+ 8395711832.jpg, -1, -1, 38.3012820513
1681
+ 8397860756.jpg, -1, -1, 56.3928571429
1682
+ 8397958686.jpg, -1, -1, 60.6004672897
1683
+ 8398198583.jpg, -1, -1, 78.231092437
1684
+ 8399489944.jpg, -1, -1, 60.8653846154
1685
+ 8401416435.jpg, -1, -1, 74.1903669725
1686
+ 8402267016.jpg, -1, -1, 50.2027027027
1687
+ 8403312891.jpg, -1, -1, 43.825
1688
+ 8403342715.jpg, -1, -1, 48.8655660377
1689
+ 8404206319.jpg, -1, -1, 62.4625
1690
+ 8407650196.jpg, -1, -1, 60.7142857143
1691
+ 8407751062.jpg, -1, -1, 57.1605504587
1692
+ 8417650354.jpg, -1, -1, 69.3040540541
1693
+ 8420816712.jpg, -1, -1, 42.3922413793
1694
+ 8421980802.jpg, -1, -1, 66.8625
1695
+ 8425720386.jpg, -1, -1, 54.7428571429
1696
+ 8434015762.jpg, -1, -1, 49.15
1697
+ 8434381156.jpg, -1, -1, 31.2689655172
1698
+ 8434827647.jpg, -1, -1, 77.5657894737
1699
+ 8435700290.jpg, -1, -1, 74.5870535714
1700
+ 8435902776.jpg, -1, -1, 70.09375
1701
+ 8437609150.jpg, -1, -1, 80.0379464286
1702
+ 8438205347.jpg, -1, -1, 71.2028301887
1703
+ 8438721601.jpg, -1, -1, 77.402173913
1704
+ 8438943296.jpg, -1, -1, 53.6293103448
1705
+ 8442013836.jpg, -1, -1, 46.784351145
1706
+ 8442383096.jpg, -1, -1, 43.15
1707
+ 8447352918.jpg, -1, -1, 43.6642857143
1708
+ 8448710097.jpg, -1, -1, 60.6422018349
1709
+ 8450680026.jpg, -1, -1, 61.9661016949
1710
+ 8450836581.jpg, -1, -1, 70.5357142857
1711
+ 8450870263.jpg, -1, -1, 56.2972972973
1712
+ 8457028188.jpg, -1, -1, 77.858974359
1713
+ 8458155212.jpg, -1, -1, 67.5593220339
1714
+ 8460376605.jpg, -1, -1, 78.7857142857
1715
+ 8461542458.jpg, -1, -1, 41.4097222222
1716
+ 8462442319.jpg, -1, -1, 79.5608108108
1717
+ 8463119506.jpg, -1, -1, 74.3411016949
1718
+ 8463181410.jpg, -1, -1, 47.1116071429
1719
+ 8465276999.jpg, -1, -1, 72.1918103448
1720
+ 8465545790.jpg, -1, -1, 62.4509345794
1721
+ 8473611231.jpg, -1, -1, 13.2593457944
1722
+ 8474594407.jpg, -1, -1, 62.4080188679
1723
+ 8481450412.jpg, -1, -1, 37.225
1724
+ 8485295618.jpg, -1, -1, 50.95
1725
+ 8486814802.jpg, -1, -1, 73.476635514
1726
+ 8492768208.jpg, -1, -1, 71.3108108108
1727
+ 8496923161.jpg, -1, -1, 65.9205607477
1728
+ 8497829320.jpg, -1, -1, 62.0347222222
1729
+ 8501012764.jpg, -1, -1, 51.2173913043
1730
+ 8501698385.jpg, -1, -1, 69.0810810811
1731
+ 8503871115.jpg, -1, -1, 60.4868421053
1732
+ 8510431014.jpg, -1, -1, 58.9101941748
1733
+ 85119046.jpg, -1, -1, 35.03125
1734
+ 8515582085.jpg, -1, -1, 67.6875
1735
+ 8518610606.jpg, -1, -1, 54.9196428571
1736
+ 8522041587.jpg, -1, -1, 58.8973214286
1737
+ 8524995451.jpg, -1, -1, 52.3581081081
1738
+ 8527988312.jpg, -1, -1, 76.9717391304
1739
+ 8528201518.jpg, -1, -1, 79.4864864865
1740
+ 8528934790.jpg, -1, -1, 72.7894736842
1741
+ 8529310804.jpg, -1, -1, 66.7847826087
1742
+ 8529705971.jpg, -1, -1, 75.7610619469
1743
+ 8530355188.jpg, -1, -1, 36.6432038835
1744
+ 8531633123.jpg, -1, -1, 80.3325892857
1745
+ 8532727626.jpg, -1, -1, 70.4663865546
1746
+ 8533373653.jpg, -1, -1, 59.6184210526
1747
+ 8535035859.jpg, -1, -1, 47.5586956522
1748
+ 8535232822.jpg, -1, -1, 68.7522123894
1749
+ 8539498079.jpg, -1, -1, 57.775
1750
+ 8540532034.jpg, -1, -1, 52.6949541284
1751
+ 8541006859.jpg, -1, -1, 61.328125
1752
+ 8545722241.jpg, -1, -1, 52.5625
1753
+ 8545905705.jpg, -1, -1, 29.875
1754
+ 8550575599.jpg, -1, -1, 57.3621495327
1755
+ 8552740610.jpg, -1, -1, 30.8211009174
1756
+ 8555190491.jpg, -1, -1, 70.9391891892
1757
+ 8556730555.jpg, -1, -1, 58.5186915888
1758
+ 8556786880.jpg, -1, -1, 73.4247787611
1759
+ 8558582535.jpg, -1, -1, 58.3340336134
1760
+ 8560249138.jpg, -1, -1, 56.1631355932
1761
+ 8565939221.jpg, -1, -1, 39.8108695652
1762
+ 8567878134.jpg, -1, -1, 54.302173913
1763
+ 8590374545.jpg, -1, -1, 66.1969026549
1764
+ 8595948410.jpg, -1, -1, 33.4260869565
1765
+ 8596901738.jpg, -1, -1, 34.3466386555
1766
+ 8602839124.jpg, -1, -1, 44.1869565217
1767
+ 8607324496.jpg, -1, -1, 50.3398058252
1768
+ 8607422328.jpg, -1, -1, 24.44375
1769
+ 8607616136.jpg, -1, -1, 43.2205882353
1770
+ 8610015449.jpg, -1, -1, 69.775
1771
+ 8610505282.jpg, -1, -1, 74.4180672269
1772
+ 8618789190.jpg, -1, -1, 74.9444444444
1773
+ 8623218161.jpg, -1, -1, 77.9760869565
1774
+ 8626678643.jpg, -1, -1, 65.0086206897
1775
+ 8630845484.jpg, -1, -1, 42.6118421053
1776
+ 8631436677.jpg, -1, -1, 72.3428571429
1777
+ 8632003704.jpg, -1, -1, 59.8451327434
1778
+ 8634276040.jpg, -1, -1, 46.9097222222
1779
+ 8634744119.jpg, -1, -1, 74.1805555556
1780
+ 8639759293.jpg, -1, -1, 72.325
1781
+ 8644207930.jpg, -1, -1, 26.4189189189
1782
+ 8644571098.jpg, -1, -1, 76.7364864865
1783
+ 8645014360.jpg, -1, -1, 44.0022123894
1784
+ 8646019769.jpg, -1, -1, 66.3181818182
1785
+ 8646711055.jpg, -1, -1, 72.5
1786
+ 8648111033.jpg, -1, -1, 50.0805084746
1787
+ 8655538784.jpg, -1, -1, 21.2162162162
1788
+ 8664375707.jpg, -1, -1, 65.9205607477
1789
+ 8667121921.jpg, -1, -1, 70.0394736842
1790
+ 8668135949.jpg, -1, -1, 53.3986486486
1791
+ 8668194198.jpg, -1, -1, 63.4324324324
1792
+ 8670498239.jpg, -1, -1, 71.5412844037
1793
+ 8675618794.jpg, -1, -1, 43.4391891892
1794
+ 8676415201.jpg, -1, -1, 66.5580357143
1795
+ 8683910174.jpg, -1, -1, 46.15
1796
+ 8687755587.jpg, -1, -1, 57.7023809524
1797
+ 8695646882.jpg, -1, -1, 77.24375
1798
+ 8696567124.jpg, -1, -1, 35.0405405405
1799
+ 8697364440.jpg, -1, -1, 66.2763157895
1800
+ 8701800150.jpg, -1, -1, 71.3108108108
1801
+ 8702450132.jpg, -1, -1, 57.5426829268
1802
+ 8702518420.jpg, -1, -1, 76.9568965517
1803
+ 8702530063.jpg, -1, -1, 68.8252212389
1804
+ 8704133333.jpg, -1, -1, 70.5044247788
1805
+ 8706343716.jpg, -1, -1, 60.5677966102
1806
+ 8706810197.jpg, -1, -1, 70.6050420168
1807
+ 8716882404.jpg, -1, -1, 69.2834821429
1808
+ 8717130944.jpg, -1, -1, 72.7542016807
1809
+ 8718842570.jpg, -1, -1, 56.5911214953
1810
+ 8725613522.jpg, -1, -1, 33.1680672269
1811
+ 8726161561.jpg, -1, -1, 74.3689320388
1812
+ 8728752853.jpg, -1, -1, 67.222972973
1813
+ 8729158424.jpg, -1, -1, 70.5357142857
1814
+ 8730747598.jpg, -1, -1, 43.6880733945
1815
+ 8731687035.jpg, -1, -1, 44.1081081081
1816
+ 8732282616.jpg, -1, -1, 66.8413461538
1817
+ 8742280365.jpg, -1, -1, 57.9385245902
1818
+ 8742323176.jpg, -1, -1, 55.6995798319
1819
+ 8743944148.jpg, -1, -1, 35.9324324324
1820
+ 8746191422.jpg, -1, -1, 80.7759433962
1821
+ 8746457937.jpg, -1, -1, 47.2876106195
1822
+ 8751039261.jpg, -1, -1, 73.4474789916
1823
+ 8751458979.jpg, -1, -1, 69.9203539823
1824
+ 8754025405.jpg, -1, -1, 77.8326086957
1825
+ 8754056878.jpg, -1, -1, 75.0290178571
1826
+ 8755395714.jpg, -1, -1, 73.1695652174
1827
+ 8755824682.jpg, -1, -1, 74.3247663551
1828
+ 8772955624.jpg, -1, -1, 77.2586956522
1829
+ 8776151757.jpg, -1, -1, 64.7702702703
1830
+ 8797772647.jpg, -1, -1, 65.35
1831
+ 8802725309.jpg, -1, -1, 40.724789916
1832
+ 8816975901.jpg, -1, -1, 61.1071428571
1833
+ 8817313814.jpg, -1, -1, 72.313559322
1834
+ 8819567462.jpg, -1, -1, 40.1875
1835
+ 8844582694.jpg, -1, -1, 49.035046729
1836
+ 8862366940.jpg, -1, -1, 61.5491071429
1837
+ 8864018641.jpg, -1, -1, 66.2887931034
1838
+ 8882534571.jpg, -1, -1, 44.2775423729
1839
+ 8899231880.jpg, -1, -1, 41.0389908257
1840
+ 8901015763.jpg, -1, -1, 46.0803571429
1841
+ 890231750.jpg, -1, -1, 49.9551886792
1842
+ 8904864157.jpg, -1, -1, 68.5656934307
1843
+ 8909361776.jpg, -1, -1, 68.4864864865
1844
+ 8910371075.jpg, -1, -1, 74.7958715596
1845
+ 8911730034.jpg, -1, -1, 60.8287401575
1846
+ 8929900709.jpg, -1, -1, 62.3175675676
1847
+ 8930797229.jpg, -1, -1, 81.1
1848
+ 8933855427.jpg, -1, -1, 59.8935643564
1849
+ 8938035476.jpg, -1, -1, 71.2385321101
1850
+ 89393627.jpg, -1, -1, 27.4138655462
1851
+ 8965909066.jpg, -1, -1, 68.7331932773
1852
+ 8975620960.jpg, -1, -1, 50.8683035714
1853
+ 8984346211.jpg, -1, -1, 69.0442477876
1854
+ 8984699375.jpg, -1, -1, 55.140625
1855
+ 9019428727.jpg, -1, -1, 66.925
1856
+ 9029384629.jpg, -1, -1, 30.2641509434
1857
+ 9034047597.jpg, -1, -1, 65.7909482759
1858
+ 9034707954.jpg, -1, -1, 57.2302631579
1859
+ 9034716133.jpg, -1, -1, 77.2546728972
1860
+ 9036180286.jpg, -1, -1, 72.3035714286
1861
+ 9050865219.jpg, -1, -1, 75.0972222222
1862
+ 9052518863.jpg, -1, -1, 52.975
1863
+ 9055879747.jpg, -1, -1, 54.2844036697
1864
+ 9058722713.jpg, -1, -1, 55.3940677966
1865
+ 9069424514.jpg, -1, -1, 56.8224299065
1866
+ 9070049178.jpg, -1, -1, 73.825
1867
+ 9076304306.jpg, -1, -1, 34.1386554622
1868
+ 9102604497.jpg, -1, -1, 41.8608490566
1869
+ 9109271301.jpg, -1, -1, 71.7803738318
1870
+ 9109755543.jpg, -1, -1, 28.3151260504
1871
+ 9113486865.jpg, -1, -1, 76.6378504673
1872
+ 9115676276.jpg, -1, -1, 70.6330275229
1873
+ 9119961649.jpg, -1, -1, 81.1025641026
1874
+ 9121896285.jpg, -1, -1, 57.6351351351
1875
+ 9131687822.jpg, -1, -1, 73.9586956522
1876
+ 91357025.jpg, -1, -1, 64.5042016807
1877
+ 9141403760.jpg, -1, -1, 62.125
1878
+ 9150255535.jpg, -1, -1, 78.4583333333
1879
+ 9159337218.jpg, -1, -1, 74.7
1880
+ 9186366283.jpg, -1, -1, 61.0
1881
+ 9194756449.jpg, -1, -1, 48.1927966102
1882
+ 9198766588.jpg, -1, -1, 72.7894736842
1883
+ 9198779264.jpg, -1, -1, 45.0491071429
1884
+ 9200829224.jpg, -1, -1, 73.8628318584
1885
+ 9202257544.jpg, -1, -1, 63.2569444444
1886
+ 9209619689.jpg, -1, -1, 68.2090517241
1887
+ 9212037816.jpg, -1, -1, 71.2013888889
1888
+ 9220351854.jpg, -1, -1, 60.6911764706
1889
+ 9221345159.jpg, -1, -1, 62.7016806723
1890
+ 9225734576.jpg, -1, -1, 67.5110619469
1891
+ 9225878690.jpg, -1, -1, 55.0517241379
1892
+ 9226793819.jpg, -1, -1, 32.5441176471
1893
+ 9232503869.jpg, -1, -1, 32.8100961538
1894
+ 9234807959.jpg, -1, -1, 47.0150862069
1895
+ 9238616212.jpg, -1, -1, 70.2004310345
1896
+ 9239862860.jpg, -1, -1, 66.9276315789
1897
+ 9245144961.jpg, -1, -1, 59.7721238938
1898
+ 92466227.jpg, -1, -1, 48.2121848739
1899
+ 9249859612.jpg, -1, -1, 67.9082568807
1900
+ 9269195837.jpg, -1, -1, 64.5472972973
1901
+ 9274677525.jpg, -1, -1, 58.0567226891
1902
+ 9277184417.jpg, -1, -1, 53.8883928571
1903
+ 9295408685.jpg, -1, -1, 58.5091240876
1904
+ 9299238943.jpg, -1, -1, 49.737394958
1905
+ 930643510.jpg, -1, -1, 24.1148648649
1906
+ 9310356178.jpg, -1, -1, 58.3040540541
1907
+ 9315620500.jpg, -1, -1, 69.0342465753
1908
+ 9322027317.jpg, -1, -1, 58.75
1909
+ 9324654543.jpg, -1, -1, 61.456
1910
+ 9329940580.jpg, -1, -1, 50.95
1911
+ 9333668178.jpg, -1, -1, 74.1351351351
1912
+ 9334502001.jpg, -1, -1, 53.0328947368
1913
+ 9341616816.jpg, -1, -1, 75.25
1914
+ 9346372358.jpg, -1, -1, 74.8080357143
1915
+ 9362014571.jpg, -1, -1, 45.4782608696
1916
+ 9365989820.jpg, -1, -1, 55.2476415094
1917
+ 9369620513.jpg, -1, -1, 76.8705357143
1918
+ 9372534946.jpg, -1, -1, 61.7972972973
1919
+ 9375359458.jpg, -1, -1, 65.8214285714
1920
+ 9379361831.jpg, -1, -1, 77.3861607143
1921
+ 9401118786.jpg, -1, -1, 52.5986842105
1922
+ 940371579.jpg, -1, -1, 59.50625
1923
+ 9408400121.jpg, -1, -1, 71.5756302521
1924
+ 9419183210.jpg, -1, -1, 69.936440678
1925
+ 9435287371.jpg, -1, -1, 78.9407894737
1926
+ 9439902792.jpg, -1, -1, 75.9571428571
1927
+ 9439964500.jpg, -1, -1, 55.4811320755
1928
+ 944341845.jpg, -1, -1, 52.1952054795
1929
+ 9463363154.jpg, -1, -1, 63.9264705882
1930
+ 9468744508.jpg, -1, -1, 79.375
1931
+ 9472868070.jpg, -1, -1, 61.4471153846
1932
+ 9476849940.jpg, -1, -1, 73.4705882353
1933
+ 9480702491.jpg, -1, -1, 73.3622881356
1934
+ 9482519952.jpg, -1, -1, 57.5047169811
1935
+ 9483838441.jpg, -1, -1, 78.4143835616
1936
+ 9488096665.jpg, -1, -1, 76.6012931034
1937
+ 9488864072.jpg, -1, -1, 50.6586538462
1938
+ 9489836089.jpg, -1, -1, 68.4380733945
1939
+ 9493293324.jpg, -1, -1, 72.3552631579
1940
+ 9500372295.jpg, -1, -1, 44.3125
1941
+ 9505522131.jpg, -1, -1, 66.5769230769
1942
+ 9510873691.jpg, -1, -1, 67.5110619469
1943
+ 952167260.jpg, -1, -1, 48.3846153846
1944
+ 9523882653.jpg, -1, -1, 47.5928571429
1945
+ 9524699151.jpg, -1, -1, 77.3304347826
1946
+ 9527678556.jpg, -1, -1, 65.7428571429
1947
+ 9527980922.jpg, -1, -1, 70.7430555556
1948
+ 9536683213.jpg, -1, -1, 65.5212264151
1949
+ 9537434864.jpg, -1, -1, 59.1150442478
1950
+ 9538674147.jpg, -1, -1, 45.6559633028
1951
+ 9541231306.jpg, -1, -1, 24.7368421053
1952
+ 9541618071.jpg, -1, -1, 41.8157894737
1953
+ 9581336227.jpg, -1, -1, 58.1607142857
1954
+ 9582086369.jpg, -1, -1, 75.6086956522
1955
+ 9585740831.jpg, -1, -1, 70.5560344828
1956
+ 9586652263.jpg, -1, -1, 74.9527027027
1957
+ 9587630414.jpg, -1, -1, 75.31875
1958
+ 9590556429.jpg, -1, -1, 34.4159663866
1959
+ 9602714230.jpg, -1, -1, 60.6081081081
1960
+ 9604185976.jpg, -1, -1, 75.25
1961
+ 9605303857.jpg, -1, -1, 67.0
1962
+ 9613807416.jpg, -1, -1, 79.5223214286
1963
+ 9630325643.jpg, -1, -1, 32.2591743119
1964
+ 9639418449.jpg, -1, -1, 69.2956521739
1965
+ 9646776074.jpg, -1, -1, 67.5110619469
1966
+ 9650999310.jpg, -1, -1, 43.6365546218
1967
+ 9674243129.jpg, -1, -1, 63.2155963303
1968
+ 9681377633.jpg, -1, -1, 18.0298165138
1969
+ 9684211981.jpg, -1, -1, 75.7352941176
1970
+ 9686480737.jpg, -1, -1, 26.0019083969
1971
+ 9687081006.jpg, -1, -1, 66.4701834862
1972
+ 9694884270.jpg, -1, -1, 54.1011904762
1973
+ 9702019166.jpg, -1, -1, 60.0135135135
1974
+ 970460687.jpg, -1, -1, 65.6716101695
1975
+ 9710823281.jpg, -1, -1, 65.2714285714
1976
+ 9712051180.jpg, -1, -1, 69.4357142857
1977
+ 9712191760.jpg, -1, -1, 64.8097345133
1978
+ 9714595106.jpg, -1, -1, 48.1637168142
1979
+ 9722821291.jpg, -1, -1, 32.7415254237
1980
+ 9723956053.jpg, -1, -1, 64.9
1981
+ 9724373732.jpg, -1, -1, 65.8541666667
1982
+ 9731493509.jpg, -1, -1, 27.6597222222
1983
+ 9733980007.jpg, -1, -1, 43.6495535714
1984
+ 9737673243.jpg, -1, -1, 58.0291262136
1985
+ 9759154326.jpg, -1, -1, 60.0783898305
1986
+ 9762410596.jpg, -1, -1, 70.9152542373
1987
+ 9765112254.jpg, -1, -1, 59.0296610169
1988
+ 9766865862.jpg, -1, -1, 48.0872641509
1989
+ 9770742411.jpg, -1, -1, 70.6
1990
+ 9774901765.jpg, -1, -1, 46.6399082569
1991
+ 9780398791.jpg, -1, -1, 68.4102564103
1992
+ 9784668705.jpg, -1, -1, 70.9152542373
1993
+ 9788787724.jpg, -1, -1, 28.2027027027
1994
+ 97984449.jpg, -1, -1, 47.0336134454
1995
+ 9829643193.jpg, -1, -1, 69.1554054054
1996
+ 9834223025.jpg, -1, -1, 57.0405405405
1997
+ 9859562873.jpg, -1, -1, 36.2834821429
1998
+ 9866692876.jpg, -1, -1, 68.1092436975
1999
+ 9877078626.jpg, -1, -1, 75.8846153846
2000
+ 9884648494.jpg, -1, -1, 71.8940677966
2001
+ 9895608016.jpg, -1, -1, 52.0197368421
2002
+ 9906667675.jpg, -1, -1, 35.468220339
2003
+ 9910043795.jpg, -1, -1, 72.8407079646
2004
+ 9910216783.jpg, -1, -1, 78.4623893805
2005
+ 9913105155.jpg, -1, -1, 72.452173913
2006
+ 9915601385.jpg, -1, -1, 65.6602564103
2007
+ 9916890605.jpg, -1, -1, 56.1285046729
2008
+ 9923882853.jpg, -1, -1, 54.6949152542
2009
+ 9925803176.jpg, -1, -1, 46.5197368421
2010
+ 9935149655.jpg, -1, -1, 53.6730769231
2011
+ 9936084895.jpg, -1, -1, 61.6486486486
2012
+ 9977056676.jpg, -1, -1, 46.3013392857
2013
+ 9977191265.jpg, -1, -1, 74.4716981132
2014
+ 9984535544.jpg, -1, -1, 70.0200892857
2015
+ 9991999836.jpg, -1, -1, 78.0924369748
examplar_data_labels/KoNiQ10k/training_labels.txt ADDED
The diff for this file is too large to render. See raw diff
 
examplar_data_labels/KoNiQ10k/validation_labels.txt ADDED
@@ -0,0 +1,1000 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 10043785683.jpg, -1, -1, 71.2043269231
2
+ 10059344614.jpg, -1, -1, 68.6651376147
3
+ 10061596254.jpg, -1, -1, 59.6418918919
4
+ 10067457634.jpg, -1, -1, 78.4390756303
5
+ 10082923485.jpg, -1, -1, 53.2260869565
6
+ 10089177336.jpg, -1, -1, 72.9459459459
7
+ 10140071304.jpg, -1, -1, 62.5576923077
8
+ 10164842933.jpg, -1, -1, 76.69375
9
+ 10241895785.jpg, -1, -1, 67.0736607143
10
+ 10244314384.jpg, -1, -1, 47.5481651376
11
+ 10246311914.jpg, -1, -1, 75.4579831933
12
+ 10264804094.jpg, -1, -1, 76.6495535714
13
+ 10264984793.jpg, -1, -1, 66.1
14
+ 10344523015.jpg, -1, -1, 52.5263157895
15
+ 10371125786.jpg, -1, -1, 38.2451456311
16
+ 10409059825.jpg, -1, -1, 73.1513157895
17
+ 10414716033.jpg, -1, -1, 71.0878378378
18
+ 10415631353.jpg, -1, -1, 43.0819327731
19
+ 10453950204.jpg, -1, -1, 58.975
20
+ 10458591.jpg, -1, -1, 51.4871794872
21
+ 10463545414.jpg, -1, -1, 55.0444915254
22
+ 10465121613.jpg, -1, -1, 61.5723684211
23
+ 10466350116.jpg, -1, -1, 77.2243589744
24
+ 10492897986.jpg, -1, -1, 44.6283783784
25
+ 10513907744.jpg, -1, -1, 59.4311926606
26
+ 10544833625.jpg, -1, -1, 50.5756880734
27
+ 10567188645.jpg, -1, -1, 64.1575630252
28
+ 10605846204.jpg, -1, -1, 70.4620535714
29
+ 10619909515.jpg, -1, -1, 60.7775423729
30
+ 10628497685.jpg, -1, -1, 54.947826087
31
+ 10680502673.jpg, -1, -1, 78.75625
32
+ 10686332213.jpg, -1, -1, 57.4216101695
33
+ 10689538413.jpg, -1, -1, 73.66875
34
+ 10692654005.jpg, -1, -1, 68.2
35
+ 10692667386.jpg, -1, -1, 71.9033018868
36
+ 10694350076.jpg, -1, -1, 73.1285714286
37
+ 10706770686.jpg, -1, -1, 76.8519417476
38
+ 10728525673.jpg, -1, -1, 68.3026315789
39
+ 107505789.jpg, -1, -1, 54.35
40
+ 10770026303.jpg, -1, -1, 72.0825892857
41
+ 10784159775.jpg, -1, -1, 74.8849557522
42
+ 1085590803.jpg, -1, -1, 24.0558035714
43
+ 10856757.jpg, -1, -1, 32.2954545455
44
+ 10883456586.jpg, -1, -1, 28.8793103448
45
+ 10905114716.jpg, -1, -1, 50.0695652174
46
+ 10923819485.jpg, -1, -1, 40.8275862069
47
+ 10947519133.jpg, -1, -1, 74.609223301
48
+ 10990887046.jpg, -1, -1, 58.0765306122
49
+ 11062902804.jpg, -1, -1, 69.9117647059
50
+ 11068654734.jpg, -1, -1, 68.0694444444
51
+ 11148529964.jpg, -1, -1, 58.2
52
+ 11170786154.jpg, -1, -1, 67.6932773109
53
+ 11172371243.jpg, -1, -1, 75.8785714286
54
+ 11229777185.jpg, -1, -1, 66.0747663551
55
+ 11256485296.jpg, -1, -1, 61.9044117647
56
+ 11262156493.jpg, -1, -1, 79.9271653543
57
+ 11268202995.jpg, -1, -1, 41.734375
58
+ 11273127766.jpg, -1, -1, 79.5575221239
59
+ 11291450085.jpg, -1, -1, 56.7394957983
60
+ 1130601148.jpg, -1, -1, 72.3804347826
61
+ 11333447674.jpg, -1, -1, 76.9978813559
62
+ 11333782783.jpg, -1, -1, 70.3584070796
63
+ 11338371864.jpg, -1, -1, 68.8245192308
64
+ 11341117543.jpg, -1, -1, 61.6850961538
65
+ 11369797883.jpg, -1, -1, 74.0277777778
66
+ 11372421485.jpg, -1, -1, 65.685840708
67
+ 11396447303.jpg, -1, -1, 48.0107758621
68
+ 11414474354.jpg, -1, -1, 18.5514705882
69
+ 11426976205.jpg, -1, -1, 35.188559322
70
+ 11433593823.jpg, -1, -1, 62.9486607143
71
+ 11436378704.jpg, -1, -1, 79.3397435897
72
+ 11474376103.jpg, -1, -1, 53.25
73
+ 11484412296.jpg, -1, -1, 63.2641509434
74
+ 11518631385.jpg, -1, -1, 32.2668067227
75
+ 11533404563.jpg, -1, -1, 65.4668141593
76
+ 11541375264.jpg, -1, -1, 50.3513513514
77
+ 11555549865.jpg, -1, -1, 60.9402654867
78
+ 117172917.jpg, -1, -1, 62.9789915966
79
+ 122553631.jpg, -1, -1, 37.85
80
+ 128781220.jpg, -1, -1, 59.7205882353
81
+ 1297819816.jpg, -1, -1, 74.2794117647
82
+ 1316239068.jpg, -1, -1, 61.6165254237
83
+ 1396109073.jpg, -1, -1, 63.4343220339
84
+ 1402675396.jpg, -1, -1, 29.9102564103
85
+ 142117255.jpg, -1, -1, 67.9625
86
+ 1434258458.jpg, -1, -1, 67.8608695652
87
+ 145215893.jpg, -1, -1, 10.14375
88
+ 147639760.jpg, -1, -1, 47.40625
89
+ 149836125.jpg, -1, -1, 46.756302521
90
+ 1540229828.jpg, -1, -1, 72.9125
91
+ 1543929747.jpg, -1, -1, 33.7203389831
92
+ 1561250178.jpg, -1, -1, 61.4191176471
93
+ 160999577.jpg, -1, -1, 30.5909090909
94
+ 1626230781.jpg, -1, -1, 15.2293577982
95
+ 1799451312.jpg, -1, -1, 62.0945945946
96
+ 183156510.jpg, -1, -1, 48.8361344538
97
+ 186520031.jpg, -1, -1, 60.8298319328
98
+ 1947195937.jpg, -1, -1, 41.0818584071
99
+ 197324620.jpg, -1, -1, 62.5630252101
100
+ 1995190133.jpg, -1, -1, 77.369266055
101
+ 208949930.jpg, -1, -1, 72.6848739496
102
+ 2115915075.jpg, -1, -1, 55.8086956522
103
+ 2149719899.jpg, -1, -1, 38.1621621622
104
+ 2172409998.jpg, -1, -1, 56.0
105
+ 2190310.jpg, -1, -1, 58.3269230769
106
+ 2197636094.jpg, -1, -1, 72.286407767
107
+ 2199106763.jpg, -1, -1, 50.775
108
+ 2203797715.jpg, -1, -1, 76.3096330275
109
+ 2204368170.jpg, -1, -1, 44.2775423729
110
+ 2217543976.jpg, -1, -1, 76.1339285714
111
+ 2221745311.jpg, -1, -1, 39.110619469
112
+ 2223255997.jpg, -1, -1, 46.825
113
+ 2225683996.jpg, -1, -1, 73.5168067227
114
+ 2227011292.jpg, -1, -1, 70.6580188679
115
+ 2237630871.jpg, -1, -1, 78.55
116
+ 2239403255.jpg, -1, -1, 77.4711538462
117
+ 2255689742.jpg, -1, -1, 58.75
118
+ 2260527943.jpg, -1, -1, 58.4472477064
119
+ 2263434837.jpg, -1, -1, 77.525862069
120
+ 2280432289.jpg, -1, -1, 43.4836448598
121
+ 2293775114.jpg, -1, -1, 43.6483050847
122
+ 22947511.jpg, -1, -1, 13.9743589744
123
+ 2305236722.jpg, -1, -1, 25.4605263158
124
+ 2306462251.jpg, -1, -1, 35.0405405405
125
+ 2308986836.jpg, -1, -1, 64.6869158879
126
+ 2314346258.jpg, -1, -1, 54.2754237288
127
+ 2322053874.jpg, -1, -1, 80.0336134454
128
+ 232327907.jpg, -1, -1, 57.0861344538
129
+ 2325353113.jpg, -1, -1, 14.5882352941
130
+ 2344211137.jpg, -1, -1, 68.6351351351
131
+ 2355891811.jpg, -1, -1, 34.9082568807
132
+ 2363717244.jpg, -1, -1, 75.5296610169
133
+ 2367261033.jpg, -1, -1, 18.3175675676
134
+ 2367765365.jpg, -1, -1, 44.3486842105
135
+ 2405333928.jpg, -1, -1, 51.0593220339
136
+ 2415583702.jpg, -1, -1, 29.3156779661
137
+ 2416231204.jpg, -1, -1, 57.5921052632
138
+ 2433641656.jpg, -1, -1, 55.2455752212
139
+ 2437860365.jpg, -1, -1, 71.725
140
+ 2447797924.jpg, -1, -1, 41.65
141
+ 2460847286.jpg, -1, -1, 36.3349056604
142
+ 2476813465.jpg, -1, -1, 54.5114678899
143
+ 2500618710.jpg, -1, -1, 71.0891304348
144
+ 2503869220.jpg, -1, -1, 42.0186915888
145
+ 2519479147.jpg, -1, -1, 66.5657894737
146
+ 2538038277.jpg, -1, -1, 68.8025210084
147
+ 2542492471.jpg, -1, -1, 40.8869565217
148
+ 255857546.jpg, -1, -1, 75.1806722689
149
+ 2569803000.jpg, -1, -1, 57.8815789474
150
+ 2636589178.jpg, -1, -1, 54.6973684211
151
+ 2654797164.jpg, -1, -1, 71.6357142857
152
+ 2664892520.jpg, -1, -1, 54.5939849624
153
+ 26847054.jpg, -1, -1, 58.6794871795
154
+ 2688019250.jpg, -1, -1, 52.8571428571
155
+ 2703740653.jpg, -1, -1, 61.2642857143
156
+ 2726774995.jpg, -1, -1, 27.55
157
+ 2741661469.jpg, -1, -1, 69.0046728972
158
+ 274223521.jpg, -1, -1, 32.9976635514
159
+ 2743703757.jpg, -1, -1, 52.75
160
+ 2748301393.jpg, -1, -1, 40.825
161
+ 278636331.jpg, -1, -1, 58.0870535714
162
+ 2807676440.jpg, -1, -1, 63.175
163
+ 2924727768.jpg, -1, -1, 67.2920353982
164
+ 2938430307.jpg, -1, -1, 72.5982142857
165
+ 2941933060.jpg, -1, -1, 79.4789915966
166
+ 2946661592.jpg, -1, -1, 71.0210084034
167
+ 2952066894.jpg, -1, -1, 70.2702702703
168
+ 2986131608.jpg, -1, -1, 50.5723684211
169
+ 30054528.jpg, -1, -1, 69.3072033898
170
+ 3006180799.jpg, -1, -1, 55.1824324324
171
+ 3007216990.jpg, -1, -1, 39.1302521008
172
+ 3048643650.jpg, -1, -1, 53.2283464567
173
+ 3049913075.jpg, -1, -1, 71.2103448276
174
+ 3069379747.jpg, -1, -1, 63.1553398058
175
+ 3091624267.jpg, -1, -1, 4.94871794872
176
+ 3092999912.jpg, -1, -1, 33.55
177
+ 3115828038.jpg, -1, -1, 24.4928571429
178
+ 3125804438.jpg, -1, -1, 56.4125
179
+ 3128928410.jpg, -1, -1, 72.5252293578
180
+ 3130276162.jpg, -1, -1, 41.63125
181
+ 3132104449.jpg, -1, -1, 74.8571428571
182
+ 3146365512.jpg, -1, -1, 79.2654867257
183
+ 3170684406.jpg, -1, -1, 69.8424369748
184
+ 3193120688.jpg, -1, -1, 61.6334951456
185
+ 3195516092.jpg, -1, -1, 69.4092920354
186
+ 3201560837.jpg, -1, -1, 41.734375
187
+ 3228957282.jpg, -1, -1, 69.5282258065
188
+ 324220404.jpg, -1, -1, 62.6100917431
189
+ 3259283855.jpg, -1, -1, 45.4623893805
190
+ 3281066235.jpg, -1, -1, 73.0911214953
191
+ 3284203868.jpg, -1, -1, 70.0
192
+ 3304676774.jpg, -1, -1, 63.4642857143
193
+ 3323937813.jpg, -1, -1, 58.75
194
+ 3347228837.jpg, -1, -1, 70.7714285714
195
+ 3351610329.jpg, -1, -1, 65.8950892857
196
+ 3355714233.jpg, -1, -1, 58.8898305085
197
+ 336052627.jpg, -1, -1, 42.6216216216
198
+ 3364868711.jpg, -1, -1, 26.5675675676
199
+ 3367803108.jpg, -1, -1, 66.9221698113
200
+ 3379476157.jpg, -1, -1, 62.4555084746
201
+ 338924292.jpg, -1, -1, 32.317961165
202
+ 3405525198.jpg, -1, -1, 34.2115384615
203
+ 3411974393.jpg, -1, -1, 50.5
204
+ 3412359483.jpg, -1, -1, 67.9545454545
205
+ 3420154563.jpg, -1, -1, 52.7372881356
206
+ 3421464878.jpg, -1, -1, 63.5940366972
207
+ 3437594883.jpg, -1, -1, 34.7690677966
208
+ 3450351361.jpg, -1, -1, 82.3318584071
209
+ 3468424403.jpg, -1, -1, 54.3453389831
210
+ 3485554199.jpg, -1, -1, 50.3457943925
211
+ 3490456545.jpg, -1, -1, 38.9639830508
212
+ 3494678423.jpg, -1, -1, 48.2214285714
213
+ 3502153355.jpg, -1, -1, 63.860619469
214
+ 35311267.jpg, -1, -1, 26.9978991597
215
+ 3532282772.jpg, -1, -1, 60.3857758621
216
+ 3547062481.jpg, -1, -1, 64.0535714286
217
+ 3549424661.jpg, -1, -1, 41.0125
218
+ 3566079728.jpg, -1, -1, 63.1351351351
219
+ 3575258454.jpg, -1, -1, 64.0142857143
220
+ 3576771116.jpg, -1, -1, 52.997706422
221
+ 3577464388.jpg, -1, -1, 67.6629464286
222
+ 3601352242.jpg, -1, -1, 66.9294871795
223
+ 3603046487.jpg, -1, -1, 49.3717948718
224
+ 3604508101.jpg, -1, -1, 63.55
225
+ 3620678223.jpg, -1, -1, 59.2041284404
226
+ 364777572.jpg, -1, -1, 63.2433035714
227
+ 3658535721.jpg, -1, -1, 78.0
228
+ 3658667754.jpg, -1, -1, 69.8448275862
229
+ 3662491369.jpg, -1, -1, 48.2121848739
230
+ 3662558151.jpg, -1, -1, 45.8073394495
231
+ 3663369776.jpg, -1, -1, 74.6260504202
232
+ 3670445078.jpg, -1, -1, 31.7397260274
233
+ 3689872234.jpg, -1, -1, 40.875
234
+ 3697493807.jpg, -1, -1, 82.2909482759
235
+ 3697530291.jpg, -1, -1, 82.0064655172
236
+ 3707022590.jpg, -1, -1, 81.5256410256
237
+ 3715780832.jpg, -1, -1, 61.8347826087
238
+ 3736336896.jpg, -1, -1, 49.8310810811
239
+ 3741259519.jpg, -1, -1, 66.342920354
240
+ 374612387.jpg, -1, -1, 64.1334745763
241
+ 375446021.jpg, -1, -1, 48.2165178571
242
+ 3775591333.jpg, -1, -1, 43.6378504673
243
+ 3805107589.jpg, -1, -1, 59.4612068966
244
+ 3824105113.jpg, -1, -1, 55.8834745763
245
+ 3824485636.jpg, -1, -1, 48.1625
246
+ 3827911032.jpg, -1, -1, 72.3971962617
247
+ 3843859642.jpg, -1, -1, 65.8214285714
248
+ 3846071699.jpg, -1, -1, 66.3503937008
249
+ 3847022878.jpg, -1, -1, 66.475
250
+ 3876523676.jpg, -1, -1, 72.2105263158
251
+ 3891717415.jpg, -1, -1, 71.4196428571
252
+ 3895663827.jpg, -1, -1, 46.6805555556
253
+ 3904942577.jpg, -1, -1, 18.2173913043
254
+ 3918062562.jpg, -1, -1, 76.7435344828
255
+ 392179845.jpg, -1, -1, 16.2482758621
256
+ 3932630945.jpg, -1, -1, 61.6227678571
257
+ 3946405001.jpg, -1, -1, 71.6228448276
258
+ 3948277105.jpg, -1, -1, 57.9789719626
259
+ 3952840501.jpg, -1, -1, 76.0
260
+ 3955560376.jpg, -1, -1, 63.0847457627
261
+ 3960128594.jpg, -1, -1, 53.5555555556
262
+ 3966829877.jpg, -1, -1, 68.0760869565
263
+ 3976883946.jpg, -1, -1, 56.5265957447
264
+ 3980401611.jpg, -1, -1, 45.4782608696
265
+ 4000012271.jpg, -1, -1, 77.45
266
+ 4005014529.jpg, -1, -1, 56.1067961165
267
+ 4008219206.jpg, -1, -1, 57.6041666667
268
+ 4016398417.jpg, -1, -1, 61.2544642857
269
+ 4016639569.jpg, -1, -1, 66.0508849558
270
+ 4023245742.jpg, -1, -1, 53.5504201681
271
+ 4023598688.jpg, -1, -1, 53.0733944954
272
+ 4029731030.jpg, -1, -1, 50.95
273
+ 4037555676.jpg, -1, -1, 24.1419491525
274
+ 404033442.jpg, -1, -1, 62.8050847458
275
+ 4046716515.jpg, -1, -1, 41.0534351145
276
+ 405908679.jpg, -1, -1, 65.2281879195
277
+ 4067961238.jpg, -1, -1, 40.0879310345
278
+ 4068780416.jpg, -1, -1, 43.9868421053
279
+ 4078925567.jpg, -1, -1, 44.6539735099
280
+ 4086204414.jpg, -1, -1, 27.625
281
+ 409233389.jpg, -1, -1, 46.0576923077
282
+ 4095383312.jpg, -1, -1, 39.4495412844
283
+ 4099198318.jpg, -1, -1, 49.7431192661
284
+ 4100631766.jpg, -1, -1, 42.0986238532
285
+ 4106517753.jpg, -1, -1, 75.108974359
286
+ 4110400471.jpg, -1, -1, 59.7543478261
287
+ 411787882.jpg, -1, -1, 75.4652173913
288
+ 4127387822.jpg, -1, -1, 75.5344827586
289
+ 4132558989.jpg, -1, -1, 75.8340707965
290
+ 4138608778.jpg, -1, -1, 65.7415254237
291
+ 415283727.jpg, -1, -1, 49.9889380531
292
+ 4162338388.jpg, -1, -1, 66.5373831776
293
+ 418145002.jpg, -1, -1, 58.6714285714
294
+ 4193122125.jpg, -1, -1, 59.725
295
+ 4193696727.jpg, -1, -1, 56.1631355932
296
+ 4194035905.jpg, -1, -1, 70.6418918919
297
+ 4214550696.jpg, -1, -1, 56.8130434783
298
+ 4217243298.jpg, -1, -1, 55.7566371681
299
+ 4237536969.jpg, -1, -1, 75.4206896552
300
+ 4259140344.jpg, -1, -1, 54.6635514019
301
+ 4270574025.jpg, -1, -1, 39.175
302
+ 4299890430.jpg, -1, -1, 62.6148648649
303
+ 430051470.jpg, -1, -1, 62.2857142857
304
+ 4304984630.jpg, -1, -1, 63.7995689655
305
+ 4305975001.jpg, -1, -1, 73.9977678571
306
+ 4311062470.jpg, -1, -1, 64.5394736842
307
+ 4320399633.jpg, -1, -1, 61.7075471698
308
+ 4336460350.jpg, -1, -1, 75.3236607143
309
+ 4338156096.jpg, -1, -1, 64.421875
310
+ 434605634.jpg, -1, -1, 70.0581896552
311
+ 4347675881.jpg, -1, -1, 50.0769230769
312
+ 4365387751.jpg, -1, -1, 67.2946428571
313
+ 4370988573.jpg, -1, -1, 39.0609243697
314
+ 4388983100.jpg, -1, -1, 32.5878378378
315
+ 4408351965.jpg, -1, -1, 61.1681034483
316
+ 441241455.jpg, -1, -1, 45.0
317
+ 4423525827.jpg, -1, -1, 47.7242990654
318
+ 4430414110.jpg, -1, -1, 79.3403361345
319
+ 4432502474.jpg, -1, -1, 59.5068807339
320
+ 4435718106.jpg, -1, -1, 55.3529411765
321
+ 4436002363.jpg, -1, -1, 51.2208737864
322
+ 4448002516.jpg, -1, -1, 71.2364864865
323
+ 4463467980.jpg, -1, -1, 80.81875
324
+ 4471526544.jpg, -1, -1, 61.8262711864
325
+ 4480082097.jpg, -1, -1, 76.3782051282
326
+ 450063799.jpg, -1, -1, 61.3472222222
327
+ 450338979.jpg, -1, -1, 7.27876106195
328
+ 4512123861.jpg, -1, -1, 74.2423664122
329
+ 4512474932.jpg, -1, -1, 74.8680555556
330
+ 4525459727.jpg, -1, -1, 76.6
331
+ 4530384020.jpg, -1, -1, 63.6214285714
332
+ 4545054058.jpg, -1, -1, 24.7012711864
333
+ 4550104555.jpg, -1, -1, 69.4264705882
334
+ 4562682674.jpg, -1, -1, 39.8928571429
335
+ 4565683087.jpg, -1, -1, 80.3680555556
336
+ 4571847379.jpg, -1, -1, 53.1517857143
337
+ 4571937568.jpg, -1, -1, 56.2894736842
338
+ 4585233162.jpg, -1, -1, 43.9717391304
339
+ 4586793310.jpg, -1, -1, 50.5693277311
340
+ 4602769821.jpg, -1, -1, 75.3886554622
341
+ 4617163865.jpg, -1, -1, 28.9044117647
342
+ 4631640521.jpg, -1, -1, 73.7168141593
343
+ 463426919.jpg, -1, -1, 35.2478991597
344
+ 4638196012.jpg, -1, -1, 73.7898230088
345
+ 4669754441.jpg, -1, -1, 73.6717391304
346
+ 4683892759.jpg, -1, -1, 52.4275700935
347
+ 4688672987.jpg, -1, -1, 61.2943925234
348
+ 4691973869.jpg, -1, -1, 34.0
349
+ 4698449914.jpg, -1, -1, 43.7891791045
350
+ 4698931028.jpg, -1, -1, 65.5192307692
351
+ 4706726398.jpg, -1, -1, 40.935840708
352
+ 47095522.jpg, -1, -1, 67.4194915254
353
+ 4710464067.jpg, -1, -1, 73.3918918919
354
+ 4714778162.jpg, -1, -1, 65.6642857143
355
+ 4714961173.jpg, -1, -1, 66.2860576923
356
+ 4718032191.jpg, -1, -1, 58.8205128205
357
+ 4728497082.jpg, -1, -1, 70.8453389831
358
+ 4733042196.jpg, -1, -1, 73.6282051282
359
+ 4735563555.jpg, -1, -1, 68.3623853211
360
+ 4750416133.jpg, -1, -1, 65.6016949153
361
+ 4761802851.jpg, -1, -1, 50.1972477064
362
+ 4781552174.jpg, -1, -1, 68.2110091743
363
+ 4782546865.jpg, -1, -1, 67.0693277311
364
+ 4782987041.jpg, -1, -1, 79.03125
365
+ 4786690796.jpg, -1, -1, 36.5651260504
366
+ 4787504766.jpg, -1, -1, 62.1513157895
367
+ 4791175187.jpg, -1, -1, 61.1764705882
368
+ 4804146129.jpg, -1, -1, 39.8819444444
369
+ 4819833644.jpg, -1, -1, 23.6008403361
370
+ 4840518597.jpg, -1, -1, 56.1766055046
371
+ 4846003639.jpg, -1, -1, 57.9385245902
372
+ 4847954914.jpg, -1, -1, 54.49609375
373
+ 486170909.jpg, -1, -1, 56.5401785714
374
+ 4877994171.jpg, -1, -1, 54.2905405405
375
+ 4883254364.jpg, -1, -1, 68.3258928571
376
+ 4886277173.jpg, -1, -1, 30.025
377
+ 4886518096.jpg, -1, -1, 50.9230769231
378
+ 4887754576.jpg, -1, -1, 51.0110619469
379
+ 4890086996.jpg, -1, -1, 59.9911504425
380
+ 4900565313.jpg, -1, -1, 49.4485294118
381
+ 4908270930.jpg, -1, -1, 46.3371559633
382
+ 4912317009.jpg, -1, -1, 76.2617924528
383
+ 4918979865.jpg, -1, -1, 68.0760869565
384
+ 4921474940.jpg, -1, -1, 63.0306603774
385
+ 4922576334.jpg, -1, -1, 68.4601769912
386
+ 4924398774.jpg, -1, -1, 51.4575892857
387
+ 4926447658.jpg, -1, -1, 59.9610091743
388
+ 4936086328.jpg, -1, -1, 73.1875
389
+ 494015270.jpg, -1, -1, 50.9541284404
390
+ 4950276392.jpg, -1, -1, 39.4768907563
391
+ 4952399474.jpg, -1, -1, 59.8260869565
392
+ 4953755434.jpg, -1, -1, 58.45
393
+ 4975468754.jpg, -1, -1, 63.745412844
394
+ 4976142964.jpg, -1, -1, 52.6554054054
395
+ 4980059014.jpg, -1, -1, 60.2867647059
396
+ 498265046.jpg, -1, -1, 36.8875
397
+ 499095228.jpg, -1, -1, 81.9862385321
398
+ 4992884384.jpg, -1, -1, 70.3846153846
399
+ 4996581735.jpg, -1, -1, 78.6470588235
400
+ 5001719293.jpg, -1, -1, 36.936440678
401
+ 5003482891.jpg, -1, -1, 67.4194915254
402
+ 5005406987.jpg, -1, -1, 67.6991525424
403
+ 5008940426.jpg, -1, -1, 59.8782051282
404
+ 5012894248.jpg, -1, -1, 65.9239130435
405
+ 5016823142.jpg, -1, -1, 49.2184466019
406
+ 5024762120.jpg, -1, -1, 76.2162162162
407
+ 5026427468.jpg, -1, -1, 64.0586956522
408
+ 5032946224.jpg, -1, -1, 40.9915254237
409
+ 5040853192.jpg, -1, -1, 23.7896039604
410
+ 5043851130.jpg, -1, -1, 38.5216346154
411
+ 5062026548.jpg, -1, -1, 38.5592105263
412
+ 5065354809.jpg, -1, -1, 67.7795275591
413
+ 5078922181.jpg, -1, -1, 56.171875
414
+ 5081637571.jpg, -1, -1, 50.9852941176
415
+ 5083763203.jpg, -1, -1, 42.7610619469
416
+ 5091924028.jpg, -1, -1, 25.8308823529
417
+ 5092945872.jpg, -1, -1, 28.775
418
+ 5097621606.jpg, -1, -1, 62.2316513761
419
+ 5105331099.jpg, -1, -1, 15.8058035714
420
+ 5107763844.jpg, -1, -1, 65.3823529412
421
+ 5117589590.jpg, -1, -1, 69.5108695652
422
+ 5118376207.jpg, -1, -1, 65.05
423
+ 5124446081.jpg, -1, -1, 64.0882352941
424
+ 5130720224.jpg, -1, -1, 73.6458333333
425
+ 5132091056.jpg, -1, -1, 41.0088495575
426
+ 513723164.jpg, -1, -1, 56.1216814159
427
+ 5165838001.jpg, -1, -1, 71.7243589744
428
+ 5170858607.jpg, -1, -1, 65.3794642857
429
+ 517369320.jpg, -1, -1, 60.0
430
+ 5175429795.jpg, -1, -1, 74.7958715596
431
+ 5184701000.jpg, -1, -1, 24.5304347826
432
+ 5197308883.jpg, -1, -1, 69.3072033898
433
+ 5211888253.jpg, -1, -1, 28.7775229358
434
+ 5212639990.jpg, -1, -1, 60.4908256881
435
+ 5224316825.jpg, -1, -1, 30.9158878505
436
+ 5235352004.jpg, -1, -1, 42.6880530973
437
+ 5240740919.jpg, -1, -1, 61.2544642857
438
+ 5243003019.jpg, -1, -1, 43.8580508475
439
+ 52568944.jpg, -1, -1, 29.0215517241
440
+ 5261084727.jpg, -1, -1, 68.2411504425
441
+ 5266123407.jpg, -1, -1, 74.0409482759
442
+ 5270171351.jpg, -1, -1, 41.1041666667
443
+ 5275524338.jpg, -1, -1, 74.8680555556
444
+ 5286915372.jpg, -1, -1, 70.4137931034
445
+ 5288290650.jpg, -1, -1, 74.0818584071
446
+ 5292219744.jpg, -1, -1, 29.9115044248
447
+ 5298734322.jpg, -1, -1, 64.9779411765
448
+ 5300293470.jpg, -1, -1, 57.0848623853
449
+ 5306806108.jpg, -1, -1, 69.7731092437
450
+ 5309326426.jpg, -1, -1, 71.0884955752
451
+ 5314378959.jpg, -1, -1, 23.8684210526
452
+ 5314923965.jpg, -1, -1, 49.7289719626
453
+ 5320165275.jpg, -1, -1, 70.796460177
454
+ 5323106663.jpg, -1, -1, 54.4456521739
455
+ 5337507620.jpg, -1, -1, 63.2433035714
456
+ 5338193826.jpg, -1, -1, 54.1418918919
457
+ 5344616817.jpg, -1, -1, 58.5384615385
458
+ 5346690387.jpg, -1, -1, 51.9864864865
459
+ 5354068609.jpg, -1, -1, 62.9142857143
460
+ 5367389511.jpg, -1, -1, 58.8973214286
461
+ 5378235552.jpg, -1, -1, 67.8461538462
462
+ 5378725814.jpg, -1, -1, 57.3628318584
463
+ 5394502414.jpg, -1, -1, 46.3349514563
464
+ 5399180378.jpg, -1, -1, 69.8589108911
465
+ 5411857475.jpg, -1, -1, 58.0428571429
466
+ 5417875149.jpg, -1, -1, 39.0540540541
467
+ 5423246371.jpg, -1, -1, 68.3107476636
468
+ 5433512452.jpg, -1, -1, 61.5231092437
469
+ 5438123525.jpg, -1, -1, 41.2543103448
470
+ 5443343893.jpg, -1, -1, 59.265625
471
+ 5444850191.jpg, -1, -1, 67.233490566
472
+ 5448376373.jpg, -1, -1, 82.9551886792
473
+ 5449529114.jpg, -1, -1, 72.3804347826
474
+ 5455599246.jpg, -1, -1, 58.9813084112
475
+ 5462654627.jpg, -1, -1, 77.0434782609
476
+ 5467862729.jpg, -1, -1, 74.9579646018
477
+ 5474201544.jpg, -1, -1, 65.0630434783
478
+ 5476657075.jpg, -1, -1, 66.1674311927
479
+ 5480545191.jpg, -1, -1, 31.3205128205
480
+ 5480778835.jpg, -1, -1, 56.4621848739
481
+ 5481249377.jpg, -1, -1, 36.2878151261
482
+ 5483224233.jpg, -1, -1, 62.354368932
483
+ 5483514140.jpg, -1, -1, 74.7605932203
484
+ 5500570132.jpg, -1, -1, 63.9855769231
485
+ 5505340753.jpg, -1, -1, 46.4789915966
486
+ 5506716444.jpg, -1, -1, 61.775
487
+ 5507113852.jpg, -1, -1, 66.5147058824
488
+ 5525979481.jpg, -1, -1, 46.375
489
+ 5527745877.jpg, -1, -1, 84.4166666667
490
+ 5540702638.jpg, -1, -1, 72.7056074766
491
+ 5541438392.jpg, -1, -1, 45.7457627119
492
+ 5542509126.jpg, -1, -1, 73.9977678571
493
+ 5550230387.jpg, -1, -1, 62.0051020408
494
+ 5554551412.jpg, -1, -1, 32.1747787611
495
+ 5561903765.jpg, -1, -1, 56.141025641
496
+ 5562990409.jpg, -1, -1, 72.2369565217
497
+ 5565514955.jpg, -1, -1, 45.9587155963
498
+ 5570837854.jpg, -1, -1, 55.8296460177
499
+ 5571768533.jpg, -1, -1, 54.6961206897
500
+ 5583866722.jpg, -1, -1, 40.875
501
+ 5592031827.jpg, -1, -1, 55.3058252427
502
+ 5592471041.jpg, -1, -1, 66.2567567568
503
+ 5594332636.jpg, -1, -1, 69.6490825688
504
+ 5596136215.jpg, -1, -1, 57.3869565217
505
+ 5596270310.jpg, -1, -1, 60.8714285714
506
+ 5597328203.jpg, -1, -1, 48.5287610619
507
+ 5606263347.jpg, -1, -1, 21.1586956522
508
+ 5610005411.jpg, -1, -1, 69.0798319328
509
+ 5617865491.jpg, -1, -1, 53.9110576923
510
+ 5631740117.jpg, -1, -1, 77.2212389381
511
+ 563494328.jpg, -1, -1, 25.68125
512
+ 5643887824.jpg, -1, -1, 69.3040540541
513
+ 5644717504.jpg, -1, -1, 46.5575221239
514
+ 5651460875.jpg, -1, -1, 69.6517857143
515
+ 5656752407.jpg, -1, -1, 77.1869565217
516
+ 5657622409.jpg, -1, -1, 68.2452830189
517
+ 5660186741.jpg, -1, -1, 37.3970588235
518
+ 5669528587.jpg, -1, -1, 63.5428571429
519
+ 5679431462.jpg, -1, -1, 72.593220339
520
+ 5680314396.jpg, -1, -1, 48.4025423729
521
+ 5681801478.jpg, -1, -1, 46.6351351351
522
+ 5682574149.jpg, -1, -1, 51.3402777778
523
+ 5705716016.jpg, -1, -1, 38.8241525424
524
+ 5712901839.jpg, -1, -1, 50.1972477064
525
+ 5724255636.jpg, -1, -1, 44.6071428571
526
+ 5730689688.jpg, -1, -1, 72.268907563
527
+ 5739537678.jpg, -1, -1, 46.1282051282
528
+ 5740000928.jpg, -1, -1, 69.0105042017
529
+ 5765859594.jpg, -1, -1, 74.102173913
530
+ 5766806996.jpg, -1, -1, 23.61875
531
+ 5770080490.jpg, -1, -1, 65.6134453782
532
+ 5807318968.jpg, -1, -1, 51.5487288136
533
+ 5812840397.jpg, -1, -1, 70.9487179487
534
+ 5825508500.jpg, -1, -1, 21.8541666667
535
+ 5825939601.jpg, -1, -1, 78.7120535714
536
+ 5832751066.jpg, -1, -1, 73.7941176471
537
+ 5846011911.jpg, -1, -1, 36.8986486486
538
+ 5848411050.jpg, -1, -1, 79.4121621622
539
+ 5849249346.jpg, -1, -1, 77.4210526316
540
+ 5850096725.jpg, -1, -1, 36.7
541
+ 5866448510.jpg, -1, -1, 69.0810810811
542
+ 5869591890.jpg, -1, -1, 70.2860169492
543
+ 5870845920.jpg, -1, -1, 72.4742990654
544
+ 5876597412.jpg, -1, -1, 57.9676724138
545
+ 5880149356.jpg, -1, -1, 53.0972222222
546
+ 5889219072.jpg, -1, -1, 68.3871681416
547
+ 5890838382.jpg, -1, -1, 61.375
548
+ 5891304676.jpg, -1, -1, 53.2266949153
549
+ 5901783769.jpg, -1, -1, 63.7717391304
550
+ 5906582267.jpg, -1, -1, 70.975
551
+ 5915008700.jpg, -1, -1, 51.7635135135
552
+ 5919706356.jpg, -1, -1, 47.2876106195
553
+ 5920883206.jpg, -1, -1, 61.7433628319
554
+ 5922588235.jpg, -1, -1, 72.0657894737
555
+ 5923067948.jpg, -1, -1, 72.2629310345
556
+ 5932454750.jpg, -1, -1, 64.675
557
+ 5936466945.jpg, -1, -1, 52.1351351351
558
+ 5946379420.jpg, -1, -1, 54.4247572816
559
+ 5950051723.jpg, -1, -1, 75.4117647059
560
+ 5958950790.jpg, -1, -1, 33.3067226891
561
+ 5959668491.jpg, -1, -1, 72.4051724138
562
+ 5970029392.jpg, -1, -1, 64.1575630252
563
+ 5973304617.jpg, -1, -1, 65.9956521739
564
+ 5990072932.jpg, -1, -1, 61.7820512821
565
+ 5998921543.jpg, -1, -1, 31.5107758621
566
+ 6008267730.jpg, -1, -1, 31.25
567
+ 6026326925.jpg, -1, -1, 62.6323529412
568
+ 6031843630.jpg, -1, -1, 73.3781512605
569
+ 6035849303.jpg, -1, -1, 64.391025641
570
+ 6046652289.jpg, -1, -1, 73.0911214953
571
+ 6059676315.jpg, -1, -1, 69.972972973
572
+ 6066179576.jpg, -1, -1, 54.1907894737
573
+ 6067656779.jpg, -1, -1, 73.781779661
574
+ 6070993297.jpg, -1, -1, 57.3095238095
575
+ 6071930227.jpg, -1, -1, 21.14375
576
+ 6080237953.jpg, -1, -1, 59.5068807339
577
+ 6096713881.jpg, -1, -1, 40.4077669903
578
+ 6098070477.jpg, -1, -1, 64.9932432432
579
+ 6100280225.jpg, -1, -1, 55.2683486239
580
+ 6106078585.jpg, -1, -1, 65.0168269231
581
+ 6107409429.jpg, -1, -1, 53.1794871795
582
+ 6109808754.jpg, -1, -1, 75.8093220339
583
+ 6112607748.jpg, -1, -1, 72.1467889908
584
+ 6113677746.jpg, -1, -1, 75.5767326733
585
+ 6127342377.jpg, -1, -1, 35.147826087
586
+ 6143020134.jpg, -1, -1, 68.5277777778
587
+ 6162845121.jpg, -1, -1, 71.0871559633
588
+ 6163563949.jpg, -1, -1, 72.025
589
+ 6163576407.jpg, -1, -1, 63.5067567568
590
+ 6164643779.jpg, -1, -1, 68.5782608696
591
+ 6167276187.jpg, -1, -1, 16.2307692308
592
+ 6169751513.jpg, -1, -1, 68.125
593
+ 6172349199.jpg, -1, -1, 71.9915966387
594
+ 6175048748.jpg, -1, -1, 70.9040178571
595
+ 6175479501.jpg, -1, -1, 59.6752336449
596
+ 6184879643.jpg, -1, -1, 62.3175675676
597
+ 6190472459.jpg, -1, -1, 64.5608695652
598
+ 6201268474.jpg, -1, -1, 65.7889908257
599
+ 6201290598.jpg, -1, -1, 65.6621621622
600
+ 6205974624.jpg, -1, -1, 51.9558823529
601
+ 6222011944.jpg, -1, -1, 63.1305309735
602
+ 6222668667.jpg, -1, -1, 70.0
603
+ 6227646726.jpg, -1, -1, 64.027027027
604
+ 6228278450.jpg, -1, -1, 62.6239130435
605
+ 6230011878.jpg, -1, -1, 77.4797297297
606
+ 6238988012.jpg, -1, -1, 52.4913793103
607
+ 6244943274.jpg, -1, -1, 39.2629310345
608
+ 6252195920.jpg, -1, -1, 71.7542372881
609
+ 6252913864.jpg, -1, -1, 48.2760869565
610
+ 6255855320.jpg, -1, -1, 54.1330275229
611
+ 6256521905.jpg, -1, -1, 72.6816037736
612
+ 6256829619.jpg, -1, -1, 78.6507633588
613
+ 6258120264.jpg, -1, -1, 71.5995575221
614
+ 6272061393.jpg, -1, -1, 73.6891891892
615
+ 6274128274.jpg, -1, -1, 61.1173913043
616
+ 6286044979.jpg, -1, -1, 66.3060747664
617
+ 6309109435.jpg, -1, -1, 33.3944954128
618
+ 6321717266.jpg, -1, -1, 54.9806034483
619
+ 6337822588.jpg, -1, -1, 47.9105839416
620
+ 6351773844.jpg, -1, -1, 44.8151260504
621
+ 6354614287.jpg, -1, -1, 49.3318584071
622
+ 6366256547.jpg, -1, -1, 71.3075221239
623
+ 6379014005.jpg, -1, -1, 44.7544642857
624
+ 6379443681.jpg, -1, -1, 47.921875
625
+ 6389008737.jpg, -1, -1, 69.6462264151
626
+ 6439499095.jpg, -1, -1, 67.525
627
+ 6459542105.jpg, -1, -1, 55.75
628
+ 6468297341.jpg, -1, -1, 56.7923728814
629
+ 6469192803.jpg, -1, -1, 32.0848214286
630
+ 6486375983.jpg, -1, -1, 20.6612149533
631
+ 6504283859.jpg, -1, -1, 49.8944954128
632
+ 6534779805.jpg, -1, -1, 34.3101503759
633
+ 6534801331.jpg, -1, -1, 49.8653846154
634
+ 6537543865.jpg, -1, -1, 32.5398230088
635
+ 6551619395.jpg, -1, -1, 69.0442477876
636
+ 6558436183.jpg, -1, -1, 73.8119266055
637
+ 656718644.jpg, -1, -1, 27.9711538462
638
+ 6576363795.jpg, -1, -1, 31.7640186916
639
+ 6598327095.jpg, -1, -1, 62.0215517241
640
+ 6605703733.jpg, -1, -1, 71.3676470588
641
+ 6621080809.jpg, -1, -1, 47.9615384615
642
+ 665094052.jpg, -1, -1, 29.8026315789
643
+ 6656519345.jpg, -1, -1, 26.3392857143
644
+ 6686438027.jpg, -1, -1, 69.175
645
+ 6699302891.jpg, -1, -1, 81.7162162162
646
+ 6703759327.jpg, -1, -1, 66.7642857143
647
+ 6711319143.jpg, -1, -1, 74.5625
648
+ 6749061363.jpg, -1, -1, 64.6201923077
649
+ 6790082633.jpg, -1, -1, 56.222972973
650
+ 6795087227.jpg, -1, -1, 29.39375
651
+ 6803864259.jpg, -1, -1, 62.6
652
+ 6810784384.jpg, -1, -1, 47.1554054054
653
+ 6812098169.jpg, -1, -1, 75.628440367
654
+ 6831984052.jpg, -1, -1, 76.3014705882
655
+ 6835820588.jpg, -1, -1, 59.4673913043
656
+ 6838263860.jpg, -1, -1, 39.6896551724
657
+ 6840130023.jpg, -1, -1, 71.3043478261
658
+ 6849285524.jpg, -1, -1, 27.275210084
659
+ 6849557935.jpg, -1, -1, 23.0723684211
660
+ 6850513360.jpg, -1, -1, 78.8913793103
661
+ 6854146276.jpg, -1, -1, 73.1081730769
662
+ 6865288108.jpg, -1, -1, 69.6776315789
663
+ 6873503299.jpg, -1, -1, 44.7179487179
664
+ 6875112664.jpg, -1, -1, 62.7073170732
665
+ 6896225625.jpg, -1, -1, 62.6323529412
666
+ 6897675527.jpg, -1, -1, 56.9935483871
667
+ 6898490953.jpg, -1, -1, 80.1554054054
668
+ 6904500540.jpg, -1, -1, 74.8057692308
669
+ 6905574433.jpg, -1, -1, 44.2775423729
670
+ 6907499876.jpg, -1, -1, 70.3883928571
671
+ 6908276949.jpg, -1, -1, 66.7226890756
672
+ 6922435153.jpg, -1, -1, 66.2142857143
673
+ 6935219850.jpg, -1, -1, 45.186440678
674
+ 6935652834.jpg, -1, -1, 35.2375
675
+ 6943168420.jpg, -1, -1, 61.6964285714
676
+ 6953798231.jpg, -1, -1, 48.3411214953
677
+ 6959482720.jpg, -1, -1, 75.8739495798
678
+ 6961812492.jpg, -1, -1, 63.5263157895
679
+ 6979435146.jpg, -1, -1, 81.1637168142
680
+ 6980277214.jpg, -1, -1, 61.3990825688
681
+ 6989772902.jpg, -1, -1, 12.7457627119
682
+ 6993179262.jpg, -1, -1, 35.2635135135
683
+ 7002011797.jpg, -1, -1, 70.2839805825
684
+ 7004025487.jpg, -1, -1, 24.375
685
+ 7004286982.jpg, -1, -1, 77.7116935484
686
+ 7006133071.jpg, -1, -1, 70.5940594059
687
+ 7017996705.jpg, -1, -1, 59.0913793103
688
+ 7019640975.jpg, -1, -1, 76.2205882353
689
+ 7035194341.jpg, -1, -1, 76.8851351351
690
+ 7076980511.jpg, -1, -1, 62.3550420168
691
+ 7077356639.jpg, -1, -1, 71.0884955752
692
+ 7084961757.jpg, -1, -1, 64.202173913
693
+ 7094182327.jpg, -1, -1, 51.6092436975
694
+ 7100857351.jpg, -1, -1, 77.195754717
695
+ 7102405013.jpg, -1, -1, 64.1776315789
696
+ 7106381255.jpg, -1, -1, 6.61853448276
697
+ 7108939139.jpg, -1, -1, 81.1228813559
698
+ 7143312581.jpg, -1, -1, 56.8577981651
699
+ 7155826261.jpg, -1, -1, 69.2834821429
700
+ 7161322897.jpg, -1, -1, 66.6144859813
701
+ 7165207702.jpg, -1, -1, 67.5945945946
702
+ 7167608916.jpg, -1, -1, 66.4054054054
703
+ 7169475537.jpg, -1, -1, 82.6189320388
704
+ 7173633933.jpg, -1, -1, 34.5689655172
705
+ 7194820358.jpg, -1, -1, 80.8716814159
706
+ 7200434768.jpg, -1, -1, 64.9530075188
707
+ 7203007382.jpg, -1, -1, 38.0526315789
708
+ 7214629206.jpg, -1, -1, 68.6176470588
709
+ 7219282312.jpg, -1, -1, 55.4639830508
710
+ 7238585598.jpg, -1, -1, 66.4889380531
711
+ 7247521810.jpg, -1, -1, 34.34375
712
+ 7250719968.jpg, -1, -1, 76.4514563107
713
+ 7255289130.jpg, -1, -1, 74.5986842105
714
+ 7260432838.jpg, -1, -1, 69.963592233
715
+ 7263783306.jpg, -1, -1, 40.0401785714
716
+ 7264968696.jpg, -1, -1, 70.8136792453
717
+ 7289957138.jpg, -1, -1, 56.6875
718
+ 7304709380.jpg, -1, -1, 77.3514150943
719
+ 7321293264.jpg, -1, -1, 49.0775862069
720
+ 7339222496.jpg, -1, -1, 65.125
721
+ 7348931184.jpg, -1, -1, 58.9333333333
722
+ 7358753494.jpg, -1, -1, 54.1504424779
723
+ 7361922724.jpg, -1, -1, 78.4390756303
724
+ 7406520610.jpg, -1, -1, 81.2847222222
725
+ 7411918198.jpg, -1, -1, 49.311440678
726
+ 7418763178.jpg, -1, -1, 55.0571428571
727
+ 7433488844.jpg, -1, -1, 48.6666666667
728
+ 7442324444.jpg, -1, -1, 59.7162162162
729
+ 7459539092.jpg, -1, -1, 64.9736842105
730
+ 7459547170.jpg, -1, -1, 65.1584821429
731
+ 7470463166.jpg, -1, -1, 73.1485849057
732
+ 7478826230.jpg, -1, -1, 67.66
733
+ 7502944264.jpg, -1, -1, 72.01875
734
+ 7512526314.jpg, -1, -1, 62.0084033613
735
+ 7526349354.jpg, -1, -1, 36.875
736
+ 7555167080.jpg, -1, -1, 64.2268907563
737
+ 7570426860.jpg, -1, -1, 73.0127118644
738
+ 7597470314.jpg, -1, -1, 52.9308035714
739
+ 7627158600.jpg, -1, -1, 61.4485981308
740
+ 7628564828.jpg, -1, -1, 67.2894736842
741
+ 7628769918.jpg, -1, -1, 76.2430555556
742
+ 7629156184.jpg, -1, -1, 70.375
743
+ 7640299878.jpg, -1, -1, 68.5608108108
744
+ 7641363622.jpg, -1, -1, 66.0833333333
745
+ 764877350.jpg, -1, -1, 52.975
746
+ 7651161432.jpg, -1, -1, 80.2146017699
747
+ 7664231886.jpg, -1, -1, 67.5202702703
748
+ 7668828488.jpg, -1, -1, 70.675
749
+ 7678821440.jpg, -1, -1, 73.3088235294
750
+ 767916820.jpg, -1, -1, 71.2867647059
751
+ 7685277932.jpg, -1, -1, 62.5
752
+ 7714653298.jpg, -1, -1, 46.0066964286
753
+ 772323238.jpg, -1, -1, 47.0183486239
754
+ 7739550292.jpg, -1, -1, 56.275
755
+ 7755789428.jpg, -1, -1, 63.5754716981
756
+ 7786382886.jpg, -1, -1, 59.6752336449
757
+ 7795515978.jpg, -1, -1, 55.0833333333
758
+ 7811282766.jpg, -1, -1, 71.2364864865
759
+ 7820783548.jpg, -1, -1, 42.3256880734
760
+ 7822391242.jpg, -1, -1, 60.0441176471
761
+ 7823939538.jpg, -1, -1, 74.8913043478
762
+ 7850123296.jpg, -1, -1, 54.4322429907
763
+ 7856451374.jpg, -1, -1, 68.875
764
+ 7859713146.jpg, -1, -1, 77.5572033898
765
+ 7866480612.jpg, -1, -1, 72.4481132075
766
+ 7871237210.jpg, -1, -1, 73.8152173913
767
+ 7883125264.jpg, -1, -1, 45.4410377358
768
+ 7904631250.jpg, -1, -1, 76.8012820513
769
+ 7914158128.jpg, -1, -1, 65.0460526316
770
+ 7920824878.jpg, -1, -1, 62.5
771
+ 7926649668.jpg, -1, -1, 47.5649038462
772
+ 7935628474.jpg, -1, -1, 77.1377118644
773
+ 7940282852.jpg, -1, -1, 43.4285714286
774
+ 7946535104.jpg, -1, -1, 63.4642857143
775
+ 7954181598.jpg, -1, -1, 64.8638392857
776
+ 7957627098.jpg, -1, -1, 38.4745762712
777
+ 7975750167.jpg, -1, -1, 71.640625
778
+ 7983388187.jpg, -1, -1, 74.8340336134
779
+ 7989520325.jpg, -1, -1, 50.0105932203
780
+ 7994018610.jpg, -1, -1, 72.2436440678
781
+ 8002007313.jpg, -1, -1, 75.1714285714
782
+ 8006234274.jpg, -1, -1, 76.3451327434
783
+ 8010069396.jpg, -1, -1, 73.8275862069
784
+ 8012731043.jpg, -1, -1, 60.0783898305
785
+ 8034708472.jpg, -1, -1, 62.8177083333
786
+ 8044706176.jpg, -1, -1, 39.0769230769
787
+ 8060595540.jpg, -1, -1, 65.1584821429
788
+ 8077634378.jpg, -1, -1, 68.44375
789
+ 8081563599.jpg, -1, -1, 35.8857142857
790
+ 8103481984.jpg, -1, -1, 47.8
791
+ 8103867208.jpg, -1, -1, 70.0200892857
792
+ 8104089685.jpg, -1, -1, 60.2364864865
793
+ 8109380358.jpg, -1, -1, 47.5487804878
794
+ 8113245382.jpg, -1, -1, 79.03125
795
+ 8127846229.jpg, -1, -1, 46.1540178571
796
+ 8130745750.jpg, -1, -1, 57.3221153846
797
+ 8132171428.jpg, -1, -1, 67.4459459459
798
+ 8135725807.jpg, -1, -1, 71.125
799
+ 8137767984.jpg, -1, -1, 14.261682243
800
+ 8142555462.jpg, -1, -1, 47.6736111111
801
+ 8152535252.jpg, -1, -1, 17.8027522936
802
+ 8157884969.jpg, -1, -1, 45.328358209
803
+ 8167934912.jpg, -1, -1, 74.6153846154
804
+ 8175674967.jpg, -1, -1, 69.9413043478
805
+ 8182062575.jpg, -1, -1, 43.1108695652
806
+ 8199832249.jpg, -1, -1, 61.988317757
807
+ 8228733135.jpg, -1, -1, 59.9181415929
808
+ 8229993789.jpg, -1, -1, 63.8040540541
809
+ 8236704145.jpg, -1, -1, 72.6793478261
810
+ 8240021404.jpg, -1, -1, 58.9690265487
811
+ 8240220210.jpg, -1, -1, 45.231092437
812
+ 8242599767.jpg, -1, -1, 72.0089285714
813
+ 8246007228.jpg, -1, -1, 51.4252336449
814
+ 8247844062.jpg, -1, -1, 66.6886792453
815
+ 8257636356.jpg, -1, -1, 71.9345794393
816
+ 8259557757.jpg, -1, -1, 63.3509615385
817
+ 8262794558.jpg, -1, -1, 44.5334821429
818
+ 8278414113.jpg, -1, -1, 58.31
819
+ 8279922752.jpg, -1, -1, 73.975
820
+ 8293206684.jpg, -1, -1, 66.1674311927
821
+ 8293797976.jpg, -1, -1, 58.2740384615
822
+ 8301114122.jpg, -1, -1, 53.8673469388
823
+ 8305612260.jpg, -1, -1, 60.3066037736
824
+ 8331607175.jpg, -1, -1, 70.7754237288
825
+ 8333619480.jpg, -1, -1, 52.4369565217
826
+ 8339157161.jpg, -1, -1, 65.9778761062
827
+ 8345197886.jpg, -1, -1, 64.1776315789
828
+ 8345496530.jpg, -1, -1, 72.8716216216
829
+ 8350531104.jpg, -1, -1, 70.6
830
+ 8353492181.jpg, -1, -1, 29.9449152542
831
+ 8364603294.jpg, -1, -1, 80.0565217391
832
+ 8381849363.jpg, -1, -1, 72.8525641026
833
+ 8385499383.jpg, -1, -1, 69.5631067961
834
+ 8385552455.jpg, -1, -1, 77.0915178571
835
+ 8391472456.jpg, -1, -1, 66.5619469027
836
+ 8406395791.jpg, -1, -1, 69.0442477876
837
+ 8410894233.jpg, -1, -1, 47.9926470588
838
+ 8412239545.jpg, -1, -1, 85.5275423729
839
+ 8412460969.jpg, -1, -1, 38.3421052632
840
+ 8417110649.jpg, -1, -1, 48.017699115
841
+ 8421578109.jpg, -1, -1, 61.4766949153
842
+ 8428610140.jpg, -1, -1, 68.1
843
+ 8430005179.jpg, -1, -1, 71.3948598131
844
+ 8430084372.jpg, -1, -1, 50.0995145631
845
+ 8431773301.jpg, -1, -1, 66.9263392857
846
+ 8439178228.jpg, -1, -1, 46.7569444444
847
+ 8441623412.jpg, -1, -1, 75.8289473684
848
+ 8443583255.jpg, -1, -1, 72.8108695652
849
+ 8451346661.jpg, -1, -1, 76.7881355932
850
+ 8458755761.jpg, -1, -1, 70.349009901
851
+ 8460289501.jpg, -1, -1, 65.0287610619
852
+ 8461148661.jpg, -1, -1, 47.0183486239
853
+ 8466537488.jpg, -1, -1, 74.5199115044
854
+ 8468743988.jpg, -1, -1, 77.9256756757
855
+ 8469388516.jpg, -1, -1, 58.6039823009
856
+ 8481265200.jpg, -1, -1, 70.3970588235
857
+ 8481724236.jpg, -1, -1, 42.0384615385
858
+ 8484786490.jpg, -1, -1, 67.4729299363
859
+ 8488084256.jpg, -1, -1, 68.44375
860
+ 8502941912.jpg, -1, -1, 53.8154205607
861
+ 8506941154.jpg, -1, -1, 58.3881578947
862
+ 8522094125.jpg, -1, -1, 65.8676470588
863
+ 8532549490.jpg, -1, -1, 66.9306722689
864
+ 8539118741.jpg, -1, -1, 52.9264705882
865
+ 8544556866.jpg, -1, -1, 65.453125
866
+ 8546536299.jpg, -1, -1, 70.575
867
+ 8551620615.jpg, -1, -1, 70.9040178571
868
+ 8552101917.jpg, -1, -1, 32.5486111111
869
+ 8553130419.jpg, -1, -1, 62.8378378378
870
+ 8553449461.jpg, -1, -1, 71.2307692308
871
+ 8554330315.jpg, -1, -1, 74.3738938053
872
+ 8556290824.jpg, -1, -1, 67.5606796117
873
+ 8560249157.jpg, -1, -1, 33.93125
874
+ 8572309728.jpg, -1, -1, 74.34765625
875
+ 8573294301.jpg, -1, -1, 33.6764705882
876
+ 8585320404.jpg, -1, -1, 66.8539823009
877
+ 8595112756.jpg, -1, -1, 77.5862831858
878
+ 8595845896.jpg, -1, -1, 57.4864864865
879
+ 8596148572.jpg, -1, -1, 61.75
880
+ 8596349698.jpg, -1, -1, 78.25
881
+ 8599488070.jpg, -1, -1, 55.4797297297
882
+ 8599891034.jpg, -1, -1, 62.7302631579
883
+ 8608196738.jpg, -1, -1, 65.6514423077
884
+ 8608242752.jpg, -1, -1, 75.0957943925
885
+ 8613815314.jpg, -1, -1, 63.0847457627
886
+ 8616708613.jpg, -1, -1, 32.4618644068
887
+ 8617343673.jpg, -1, -1, 76.4285714286
888
+ 8617452211.jpg, -1, -1, 62.9975247525
889
+ 86243803.jpg, -1, -1, 39.01875
890
+ 8626945283.jpg, -1, -1, 56.9407894737
891
+ 8637618858.jpg, -1, -1, 73.2071428571
892
+ 8639628841.jpg, -1, -1, 71.1949152542
893
+ 8642423630.jpg, -1, -1, 66.4210526316
894
+ 8647880189.jpg, -1, -1, 69.0986842105
895
+ 8652814108.jpg, -1, -1, 40.9
896
+ 8653149698.jpg, -1, -1, 62.53125
897
+ 8654833683.jpg, -1, -1, 62.8371559633
898
+ 8665123695.jpg, -1, -1, 64.9724576271
899
+ 8671346109.jpg, -1, -1, 59.4866071429
900
+ 8688672632.jpg, -1, -1, 77.1377118644
901
+ 8703113665.jpg, -1, -1, 46.3026315789
902
+ 8718552383.jpg, -1, -1, 73.8504464286
903
+ 8728473663.jpg, -1, -1, 66.8526785714
904
+ 8731143356.jpg, -1, -1, 65.4285714286
905
+ 8768474112.jpg, -1, -1, 27.2435344828
906
+ 8771756230.jpg, -1, -1, 22.1725663717
907
+ 8774523686.jpg, -1, -1, 72.5233050847
908
+ 8779946210.jpg, -1, -1, 76.6880733945
909
+ 8816701721.jpg, -1, -1, 49.7176724138
910
+ 8836006786.jpg, -1, -1, 73.785046729
911
+ 8841877925.jpg, -1, -1, 74.7521551724
912
+ 8853329035.jpg, -1, -1, 66.2826086957
913
+ 8871477031.jpg, -1, -1, 15.8220338983
914
+ 8877350801.jpg, -1, -1, 44.369266055
915
+ 8892359151.jpg, -1, -1, 55.1824324324
916
+ 8897373737.jpg, -1, -1, 72.0338983051
917
+ 8905578199.jpg, -1, -1, 77.1482300885
918
+ 8911411937.jpg, -1, -1, 75.4879807692
919
+ 8916833328.jpg, -1, -1, 56.5452586207
920
+ 8924220861.jpg, -1, -1, 74.6810344828
921
+ 8925875609.jpg, -1, -1, 76.8066037736
922
+ 8929337311.jpg, -1, -1, 59.6582568807
923
+ 8939732555.jpg, -1, -1, 17.7152173913
924
+ 8942422332.jpg, -1, -1, 72.6009174312
925
+ 8944011983.jpg, -1, -1, 29.4344660194
926
+ 8971376273.jpg, -1, -1, 79.0405405405
927
+ 8972898508.jpg, -1, -1, 28.01875
928
+ 8973350530.jpg, -1, -1, 72.4
929
+ 898061275.jpg, -1, -1, 75.4347014925
930
+ 9016868192.jpg, -1, -1, 68.1785714286
931
+ 9018209858.jpg, -1, -1, 43.4487179487
932
+ 9037452749.jpg, -1, -1, 34.0723684211
933
+ 9053743376.jpg, -1, -1, 68.1785714286
934
+ 9054377711.jpg, -1, -1, 72.2146226415
935
+ 9061831747.jpg, -1, -1, 38.5490654206
936
+ 9063333592.jpg, -1, -1, 44.3125
937
+ 9111487897.jpg, -1, -1, 75.1782608696
938
+ 9125517985.jpg, -1, -1, 47.0558252427
939
+ 9130908149.jpg, -1, -1, 63.5336134454
940
+ 9132059359.jpg, -1, -1, 55.2142857143
941
+ 9136502604.jpg, -1, -1, 73.031512605
942
+ 9142684025.jpg, -1, -1, 73.5861344538
943
+ 9146239672.jpg, -1, -1, 65.00625
944
+ 9166911645.jpg, -1, -1, 61.4294871795
945
+ 9201324227.jpg, -1, -1, 67.9575892857
946
+ 9211639075.jpg, -1, -1, 79.8581081081
947
+ 9215122110.jpg, -1, -1, 44.35
948
+ 9258012910.jpg, -1, -1, 76.2987288136
949
+ 9290273110.jpg, -1, -1, 47.921875
950
+ 9306814074.jpg, -1, -1, 74.9004237288
951
+ 9336450850.jpg, -1, -1, 79.5172413793
952
+ 9339437714.jpg, -1, -1, 64.9375
953
+ 9346802331.jpg, -1, -1, 43.41875
954
+ 935105924.jpg, -1, -1, 50.2441860465
955
+ 9369983914.jpg, -1, -1, 70.4551282051
956
+ 9380189805.jpg, -1, -1, 70.3785714286
957
+ 9407500995.jpg, -1, -1, 75.8792372881
958
+ 9418862819.jpg, -1, -1, 32.8813559322
959
+ 9419143176.jpg, -1, -1, 48.25
960
+ 9428417271.jpg, -1, -1, 69.325
961
+ 9445529272.jpg, -1, -1, 44.2388392857
962
+ 9461924966.jpg, -1, -1, 59.265625
963
+ 9463299362.jpg, -1, -1, 61.6486486486
964
+ 9480350471.jpg, -1, -1, 59.9399038462
965
+ 9482188665.jpg, -1, -1, 69.0263157895
966
+ 9482512663.jpg, -1, -1, 80.4146341463
967
+ 9484990711.jpg, -1, -1, 30.7
968
+ 9485553260.jpg, -1, -1, 77.8990825688
969
+ 95019293.jpg, -1, -1, 37.3970588235
970
+ 9513920516.jpg, -1, -1, 55.3241525424
971
+ 9526812010.jpg, -1, -1, 71.8963414634
972
+ 9527733918.jpg, -1, -1, 67.8175675676
973
+ 9557331185.jpg, -1, -1, 53.0553097345
974
+ 9560250736.jpg, -1, -1, 43.8717948718
975
+ 9569803128.jpg, -1, -1, 69.8473451327
976
+ 9583278733.jpg, -1, -1, 74.60546875
977
+ 9583424619.jpg, -1, -1, 62.8378378378
978
+ 9605307671.jpg, -1, -1, 68.6205357143
979
+ 9619949433.jpg, -1, -1, 47.6736111111
980
+ 9623701556.jpg, -1, -1, 72.3296460177
981
+ 9629668489.jpg, -1, -1, 74.4097222222
982
+ 9646156379.jpg, -1, -1, 71.8440366972
983
+ 9654367147.jpg, -1, -1, 70.8823529412
984
+ 9677256773.jpg, -1, -1, 30.6415929204
985
+ 9678381010.jpg, -1, -1, 51.237804878
986
+ 9686337427.jpg, -1, -1, 29.9142857143
987
+ 9699196303.jpg, -1, -1, 72.1043165468
988
+ 9723836931.jpg, -1, -1, 79.1747572816
989
+ 9768810935.jpg, -1, -1, 55.2567567568
990
+ 9784161203.jpg, -1, -1, 20.25
991
+ 9795395745.jpg, -1, -1, 60.1378504673
992
+ 9823108104.jpg, -1, -1, 69.9027777778
993
+ 9826425224.jpg, -1, -1, 64.1526548673
994
+ 9831633846.jpg, -1, -1, 65.1584821429
995
+ 9849484955.jpg, -1, -1, 45.4910714286
996
+ 9895678845.jpg, -1, -1, 57.7476635514
997
+ 9941134286.jpg, -1, -1, 65.9048672566
998
+ 9945099433.jpg, -1, -1, 50.4329268293
999
+ 9954707815.jpg, -1, -1, 56.1631355932
1000
+ 9960023395.jpg, -1, -1, 65.474789916
examplar_data_labels/LIVE_Qualcomm/labels.txt ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Focus/1024_TableTennisAtBalboa_HTCOneVx_VIDEO0138.yuv, -1, -1, 41.64888182462034
2
+ Focus/1019_SkateBoardingSkills_LGG2_CAM00855.yuv, -1, -1, 54.094978137969605
3
+ Focus/1019_SkateBoardingSkills_OppoFind7_VID20150507134738.yuv, -1, -1, 51.02291495174518
4
+ Exposure/0911panGarden_Note4_20150911_152420.yuv, -1, -1, 44.69628201977274
5
+ Artifacts/0923_ChattingOverLunch_G2_CAM00813.yuv, -1, -1, 45.67855937078179
6
+ Artifacts/0911nightScene_note4_20150911_193809.yuv, -1, -1, 36.546723521311996
7
+ Exposure/0912_Burgers_Nokia1020_20130306_17_07_38_Pro.yuv, -1, -1, 64.9396364629356
8
+ Color/1006_EmergencyScene_GS6_20151006_160038.yuv, -1, -1, 58.33545178737855
9
+ Stabilization/0923_WalkingToLunch2_G2_CAM00812.yuv, -1, -1, 48.7769838458931
10
+ Focus/1006_ComplexTrain_HTCOneVX_VIDEO0087.yuv, -1, -1, 22.46553753382192
11
+ Color/0913playSand_GS5_20150913_183702.yuv, -1, -1, 58.51617627453426
12
+ Stabilization/1019_ConstructionBuggy2_GS6_20151020_133229.yuv, -1, -1, 61.262155162968625
13
+ Sharpness/0913taiZi_iPhone5s_IMG_0250.yuv, -1, -1, 29.54546786777324
14
+ Artifacts/1006_FollowTheTrain_OppoFind7_VID20150424111801.yuv, -1, -1, 48.7312983952301
15
+ Stabilization/1019_ConstructionBuggy2_LGG2_CAM00867.yuv, -1, -1, 42.73349382920229
16
+ Artifacts/0918_AxelFetchesBall_GS6_20150919_090332.yuv, -1, -1, 58.23142265391711
17
+ Exposure/1019_ViewFromMetro2_OppoFind7_VID20150508085916.yuv, -1, -1, 36.18976114116138
18
+ Exposure/0923_FollowTheBee_HTCOneVx_VIDEO0072.yuv, -1, -1, 33.04439956164501
19
+ Artifacts/1019_ColorfulKites2_LGG2_CAM00872.yuv, -1, -1, 63.17108813219617
20
+ Exposure/0923_FollowTheBee_GS6_20150923_131355.yuv, -1, -1, 51.45085729576881
21
+ Color/1024_BadmintonAtBalboa_GS6_20151024_104414.yuv, -1, -1, 56.589364068655456
22
+ Exposure/1022_QCOMSoccer2_HTCOneVx_VIDEO0137.yuv, -1, -1, 29.11684686808589
23
+ Sharpness/0912_football_Nokia1020_20130306_17_51_35_Pro.yuv, -1, -1, 57.57359450935232
24
+ Artifacts/0723_ManUnderTree_GS6_03_20150723_130100.yuv, -1, -1, 57.28880925319935
25
+ Exposure/1019_ViewFromMetro2_GS6_20151020_133722.yuv, -1, -1, 39.88244836115969
26
+ Exposure/1019_TrafficFromAfar_OppoFind7_VID20150505125608.yuv, -1, -1, 43.74671562206279
27
+ Artifacts/0918_AxelFetchesBall_HTCOneVX_VIDEO0056.yuv, -1, -1, 42.628796648669706
28
+ Stabilization/0923_ChasingDucksInWater_iphone5_IMG_0274.yuv, -1, -1, 51.11518283501527
29
+ Focus/1006_ComplexTrain_GS6_20151006_155711.yuv, -1, -1, 44.35577617160543
30
+ Artifacts/1006_DucksAndFish2_HTCOneVX_VIDEO0093.yuv, -1, -1, 31.435259054533738
31
+ Color/1024_BadmintonAtBalboa_HTCOneVx_VIDEO0139.yuv, -1, -1, 38.4966864084717
32
+ Artifacts/1006_FollowTheTrain_GS6_20151006_155539.yuv, -1, -1, 47.30872740564676
33
+ Sharpness/0912_football_GS5_20150912_153941.yuv, -1, -1, 53.23486148188893
34
+ Exposure/0911_DogBall_OppoFind7_VID20150330141759.yuv, -1, -1, 66.94118272054322
35
+ Focus/0923_BirdsFlyOnPier_iphone5_IMG_0277.yuv, -1, -1, 24.644612569329546
36
+ Stabilization/1019_MetroArrives_OppoFind7_VID20150508085607.yuv, -1, -1, 55.86662414448337
37
+ Exposure/0911panGarden_Lumia1020_WindowsPhone_20130330_23_52_26_Pro.yuv, -1, -1, 61.25422142212514
38
+ Sharpness/1006_ParkBus_OppoFind7_VID20150424122747.yuv, -1, -1, 55.317108648036935
39
+ Stabilization/1019_ConstructionBuggy2_HTCOneVX_VIDEO0119.yuv, -1, -1, 37.69768941262607
40
+ Sharpness/0918_GuysPlayVolleyball_GS6_20150919_093108.yuv, -1, -1, 65.25654758810917
41
+ Sharpness/1006_ParkBus_GS6_20151006_170532.yuv, -1, -1, 61.532520666710234
42
+ Artifacts/0918_AxelFetchesBall_LGG2_CAM00802.yuv, -1, -1, 52.891660679374006
43
+ Focus/1019_FlyingKite_LGG2_CAM00875.yuv, -1, -1, 42.076217605884914
44
+ Color/1006_FamilyScooter_HTCOneVX_VIDEO0101.yuv, -1, -1, 38.8106247696991
45
+ Color/1006_EmergencyScene_OppoFind7_VID20150424112300.yuv, -1, -1, 62.01071121403491
46
+ Artifacts/0911nightScene_gs5_20150911_194737.yuv, -1, -1, 32.27546017222677
47
+ Exposure/0911panGarden_iPhone5s_IMG_0132.yuv, -1, -1, 56.68920900925609
48
+ Sharpness/0911duckSwim_iphone5s_IMG_0136.yuv, -1, -1, 69.15445557891265
49
+ Stabilization/0923_SwingingArm_Note4_20150925_153938.yuv, -1, -1, 48.1232752921398
50
+ Sharpness/0913taiZi_GS5_20150913_183616.yuv, -1, -1, 32.896692696712435
51
+ Exposure/0911_DogBall_LGG2_CAM00791.yuv, -1, -1, 46.17375037863661
52
+ Exposure/1022_QCOMSoccer2_LGG2_CAM00886.yuv, -1, -1, 26.395621305018185
53
+ Focus/0913boyWalkBy_GS5_20150913_182938.yuv, -1, -1, 51.591805400221055
54
+ Sharpness/0918_GuysPlayVolleyball_HTCOneVX_VIDEO0058.yuv, -1, -1, 53.13309561506308
55
+ Sharpness/0923_VerticalAndRollerCoaster_iphone5_IMG_0258.yuv, -1, -1, 48.66419200608179
56
+ Stabilization/1019_BollywoodDanceTraining_GS6_20151017_175412.yuv, -1, -1, 53.770511195731956
57
+ Exposure/0817_WalkIntoBarn_iPhone5_IMG_0410.yuv, -1, -1, 39.516765825233485
58
+ Color/1006_EmergencyScene_HTCOneVX_VIDEO0090.yuv, -1, -1, 42.55847413392969
59
+ Artifacts/0923_ChattingOverLunch_GS6_20150923_122025.yuv, -1, -1, 60.43118323177181
60
+ Color/0918_CyclistsNear1_OppoFind7_VID20150407033615.yuv, -1, -1, 50.22621242945224
61
+ Stabilization/1019_SkateBoarderStepsOut_OppoFind7_VID20150507134909.yuv, -1, -1, 40.40425472582464
62
+ Stabilization/0923_FollowPigeons_iphone5_IMG_0279.yuv, -1, -1, 47.7605951367109
63
+ Sharpness/0826_TrainPullsAway_OppoFind7_VID20150727213123.yuv, -1, -1, 58.976466922446484
64
+ Artifacts/0723_ManUnderTree_GS5_03_20150723_130016.yuv, -1, -1, 52.56657483287041
65
+ Artifacts/0918_DogsOnBeach_HTCOneVX_VIDEO0055.yuv, -1, -1, 50.378497196338884
66
+ Artifacts/0723_ManUnderTree_HTCOneVx_03_VIDEO0006.yuv, -1, -1, 36.709288593516966
67
+ Color/0912_CheerLeader1_Nokia1080_20130306_17_46_21_Pro.yuv, -1, -1, 41.529339991183875
68
+ Sharpness/0923_VerticalAndRollerCoaster_Note4_20150925_151648.yuv, -1, -1, 55.628924969845
69
+ Color/0923_RiverBed_GS5_20150925_165808.yuv, -1, -1, 61.69082350125231
70
+ Focus/1024_TableTennisAtBalboa_OppoFind7_VID20150512060301.yuv, -1, -1, 50.92808485111922
71
+ Stabilization/0923_FollowPigeons_Nokia1020_WP_20130305_20_16_29_Pro.yuv, -1, -1, 52.181316514970064
72
+ Color/1019_FlyingKite3_LGG2_CAM00877.yuv, -1, -1, 55.74782132350435
73
+ Color/0918_CyclistsNear1_HTCOneVX_VIDEO0050.yuv, -1, -1, 43.90725048088645
74
+ Color/1006_FamilyScooter_OppoFind7_VID20150424122855.yuv, -1, -1, 51.17307547834518
75
+ Artifacts/0911nightScene_1020_WindowsPhone_20130305_21_59_44_Pro.yuv, -1, -1, 21.187371167047402
76
+ Sharpness/0826_TrainPullsAway_iPhone5s_IMG_0423.yuv, -1, -1, 60.880133455554756
77
+ Stabilization/0923_CrazySubmarine_Note4_20150925_160005.yuv, -1, -1, 56.08844663824663
78
+ Focus/0923_BirdsFlyOnPier_Note4_20150925_164318.yuv, -1, -1, 34.77241986411562
79
+ Artifacts/1006_DucksAndFish2_GS6_20151006_161642.yuv, -1, -1, 51.0393821878597
80
+ Stabilization/1019_SkateBoarderStepsOut_LGG2_CAM00857.yuv, -1, -1, 48.672533412525446
81
+ Exposure/0911_DogBall_HTCOneVx_VIDEO0045.yuv, -1, -1, 49.7123541769036
82
+ Color/1019_FlyingKite3_HTCOneVX_VIDEO0129.yuv, -1, -1, 49.33731661081409
83
+ Focus/1006_ComplexTrain_OppoFind7_VID20150424111934.yuv, -1, -1, 31.508494371587865
84
+ Focus/0723_OldTownShop_GS6_12_20150723_133507.yuv, -1, -1, 64.9917581345629
85
+ Stabilization/0923_ChasingDucksInWater_GS5_20150925_163521.yuv, -1, -1, 48.91083829259606
86
+ Focus/0723_OldTownShop_Nokia1020_12_WP_20130426_22_46_05_Pro.yuv, -1, -1, 52.969252792370625
87
+ Focus/1019_FlyingKite_HTCOneVX_VIDEO0127.yuv, -1, -1, 20.802340977962967
88
+ Color/1006_FamilyScooter_LGG2_CAM00849.yuv, -1, -1, 49.68892718051139
89
+ Stabilization/0923_ChasingDucksInWater_Nokia1020_WP_20130305_19_56_39_Pro.yuv, -1, -1, 40.700157633712266
90
+ Sharpness/0827_OldTownFountain_GS5_20150827_180240.yuv, -1, -1, 66.22997014329692
91
+ Exposure/0912_Burgers_GS5_20150912_145544.yuv, -1, -1, 63.475870057217286
92
+ Color/1006_FamilyScooter_GS6_20151006_170632.yuv, -1, -1, 60.28464598447542
93
+ Color/1024_BadmintonAtBalboa_LGG2_CAM00888.yuv, -1, -1, 52.3694844910836
94
+ Stabilization/1019_BollywoodDanceTraining_OppoFind7_VID20150505131616.yuv, -1, -1, 49.18933619585497
95
+ Exposure/0912_Burgers_Note4_20150912_144558.yuv, -1, -1, 61.98320494013754
96
+ Stabilization/0923_WalkingToLunch2_GS6_20150923_120934.yuv, -1, -1, 46.50669762802673
97
+ Stabilization/1019_MetroArrives_LGG2_CAM00868.yuv, -1, -1, 59.05090948407636
98
+ Artifacts/1019_ColorfulKites2_GS6_20151020_140327.yuv, -1, -1, 69.34239333030179
99
+ Focus/0923_BirdsFlyOnPier_GS5_20150925_165328.yuv, -1, -1, 46.858952126850156
100
+ Focus/0923_BirdsFlyOnPier_Nokia1020_WP_20130305_20_14_47_Pro.yuv, -1, -1, 49.44336625534995
101
+ Color/0923_RiverBed_iphone5_IMG_0280.yuv, -1, -1, 63.16961373461168
102
+ Sharpness/0911duckSwim_gs5_20150911_154802.yuv, -1, -1, 59.46925058712509
103
+ Color/0918_DelMarView_HTCOneVX_VIDEO0062.yuv, -1, -1, 45.79645003864048
104
+ Color/0918_CyclistsNear1_LGG2_CAM00795.yuv, -1, -1, 46.753110388198024
105
+ Exposure/0817_WalkIntoBarn_OppoFind7_VID20150718144333.yuv, -1, -1, 49.02405995062579
106
+ Color/1019_FlyingKite3_GS6_20151020_141052.yuv, -1, -1, 73.64276415152705
107
+ Artifacts/0918_DogsOnBeach_GS6_20150919_085853.yuv, -1, -1, 65.94233767581703
108
+ Stabilization/0923_CrazySubmarine_Nokia1020_WP_20130305_19_31_30_Pro.yuv, -1, -1, 59.038670696087216
109
+ Focus/0913boyWalkBy_iPhone5s_IMG_0245.yuv, -1, -1, 62.812549213807614
110
+ Stabilization/1019_MetroArrives_GS6_20151020_133409.yuv, -1, -1, 65.82583000957678
111
+ Sharpness/0911duckSwim_notes4_20150911_153835.yuv, -1, -1, 71.74875099099955
112
+ Sharpness/1006_ParkBus_HTCOneVX_VIDEO0100.yuv, -1, -1, 31.50387615900128
113
+ Focus/1019_SkateBoardingSkills_GS6_20151019_182541.yuv, -1, -1, 59.007584920405066
114
+ Color/0918_DelMarView_LGG2_CAM00809.yuv, -1, -1, 48.7408776505235
115
+ Color/0912_Cheerleader1_Note4_20150912_152444.yuv, -1, -1, 46.853275685229434
116
+ Focus/1019_FlyingKite_OppoFind7_VID20150508093000.yuv, -1, -1, 54.075938062059876
117
+ Exposure/1019_TrafficFromAfar_HTCOneVX_VIDEO0102.yuv, -1, -1, 38.676262337846296
118
+ Focus/0911driving_5s_IMG_0145.yuv, -1, -1, 44.9586564442459
119
+ Exposure/1019_ViewFromMetro2_LGG2_CAM00870.yuv, -1, -1, 37.58488437787315
120
+ Focus/0723_OldTownShop_HTCOneVx_12_VIDEO0019.yuv, -1, -1, 30.53679255118817
121
+ Stabilization/0923_ChasingDucksInWater_Note4_20150925_162515.yuv, -1, -1, 48.75250168989043
122
+ Sharpness/0826_TrainPullsAway_Note4_20150826_175232.yuv, -1, -1, 57.182744558097575
123
+ Focus/1019_FlyingKite_GS6_20151020_140804.yuv, -1, -1, 23.50289297893303
124
+ Color/0918_CyclistsNear1_GS6_20150919_081325.yuv, -1, -1, 52.81500748332389
125
+ Artifacts/1006_PlaneLandsAfar2_OppoFind7_VID20150424122651.yuv, -1, -1, 57.22288718627244
126
+ Exposure/0912_Burgers_iphone5s_IMG_0167.yuv, -1, -1, 64.38345299460406
127
+ Sharpness/0923_VerticalAndRollerCoaster_GS5_20150925_152655.yuv, -1, -1, 55.74790371519573
128
+ Stabilization/0923_FollowPigeons_GS5_20150925_165510.yuv, -1, -1, 55.46436837100166
129
+ Exposure/0817_WalkIntoBarn_LGG2_CAM00718.yuv, -1, -1, 51.146870112855
130
+ Color/0913playSand_iPhone5s_IMG_0251.yuv, -1, -1, 72.96615929361013
131
+ Stabilization/0923_CrazySubmarine_GS5_20150925_161013.yuv, -1, -1, 62.68393406357753
132
+ Sharpness/0912_football_Note4_20150912_152954.yuv, -1, -1, 55.629277646191774
133
+ Color/1006_EmergencyScene_LGG2_CAM00838.yuv, -1, -1, 66.8679745839632
134
+ Stabilization/0923_CrazySubmarine_iphone5_IMG_0268.yuv, -1, -1, 62.539356287114956
135
+ Color/0912_Cheerleader1_iphone_IMG_0184.yuv, -1, -1, 59.46973486351669
136
+ Exposure/1019_TrafficFromAfar_GS6_20151017_173401.yuv, -1, -1, 60.08891382298475
137
+ Color/0913playSand_Note4_20150913_182713.yuv, -1, -1, 54.20213781634974
138
+ Focus/0911driving_gs5_20150911_170903.yuv, -1, -1, 52.638967951340504
139
+ Artifacts/1019_ColorfulKites2_OppoFind7_VID20150508092524.yuv, -1, -1, 50.97310511796482
140
+ Color/0918_DelMarView_OppoFind7_VID20150409024137.yuv, -1, -1, 60.963433745979756
141
+ Color/0918_DelMarView_GS6_20150921_071851.yuv, -1, -1, 57.249620671155895
142
+ Exposure/0911panGarden_Gs5_20150911_153348.yuv, -1, -1, 30.910356850986815
143
+ Sharpness/0827_OldTownFountain_Note4_20150827_175314.yuv, -1, -1, 66.98799834034504
144
+ Stabilization/0923_FollowPigeons_Note4_20150925_164500.yuv, -1, -1, 52.37350976713892
145
+ Exposure/0911_DogBall_GS6_20150911_185459.yuv, -1, -1, 65.10542900249484
146
+ Focus/1019_HelicopterInSky_OppoFind7_VID20150508093547.yuv, -1, -1, 53.701231222442544
147
+ Artifacts/1006_DucksAndFish2_OppoFind7_VID20150424113906.yuv, -1, -1, 21.403852037578215
148
+ Exposure/1022_QCOMSoccer2_GS6_20151022_174002.yuv, -1, -1, 44.361660121662815
149
+ Color/0912_Cheerleader1_GS5_20150912_153429.yuv, -1, -1, 51.08416769793256
150
+ Stabilization/0923_WalkingToLunch2_HTCOneVx_VIDEO0065.yuv, -1, -1, 22.111602493222765
151
+ Sharpness/0918_GuysPlayVolleyball_LGG2_CAM00804.yuv, -1, -1, 59.486900886871
152
+ Artifacts/1006_FollowTheTrain_LGG2_CAM00834.yuv, -1, -1, 33.939419249644835
153
+ Color/1024_BadmintonAtBalboa_OppoFined7_VID20150512060558.yuv, -1, -1, 56.55278041432343
154
+ Color/0923_RiverBed_Nokia1020_WP_20130305_20_19_29_Pro.yuv, -1, -1, 66.39664147071237
155
+ Stabilization/1019_BollywoodDanceTraining_HTCOneVX_VIDEO0103.yuv, -1, -1, 46.903863263746985
156
+ Focus/1019_SkateBoardingSkills_HTCOneVX_VIDEO0106.yuv, -1, -1, 31.781376267831334
157
+ Sharpness/0826_TrainPullsAway_LG_G2_CAM00726.yuv, -1, -1, 49.77539197067738
158
+ Artifacts/0918_AxelFetchesBall_OppoFind7_VID20150407042625.yuv, -1, -1, 56.47030298915485
159
+ Stabilization/1019_ConstructionBuggy2_OppoFind7_VID20150508085425.yuv, -1, -1, 43.721940313265264
160
+ Artifacts/0918_DogsOnBeach_LGG2_CAM00801.yuv, -1, -1, 60.0145230621533
161
+ Artifacts/0923_ChattingOverLunch_Oppo_VID20150411074312.yuv, -1, -1, 52.436942132562734
162
+ Artifacts/0723_ManUnderTree_Nokia1020_03_WP_20130426_22_12_02_Pro.yuv, -1, -1, 45.67874272310085
163
+ Exposure/0817_WalkIntoBarn_Note4_20150817_110508.yuv, -1, -1, 58.079428695078406
164
+ Sharpness/0911duckSwim_lumia1020_WindowsPhone_20130331_00_06_41_Pro.yuv, -1, -1, 71.3530679089854
165
+ Exposure/1022_QCOMSoccer2_OppoFind7_VID20150510130154.yuv, -1, -1, 45.88425102889353
166
+ Artifacts/1006_PlaneLandsAfar2_GS6_20151006_170428.yuv, -1, -1, 54.47172841710812
167
+ Focus/1019_HelicopterInSky_LGG2_CAM00880.yuv, -1, -1, 49.06343607078643
168
+ Focus/0911driving_note4_20150911_165933.yuv, -1, -1, 66.5912517517975
169
+ Exposure/0911skateBoard_note4_20150911_173356.yuv, -1, -1, 61.56026393111605
170
+ Focus/0913boyWalkBy_Note4_20150913_181950.yuv, -1, -1, 53.97028926187148
171
+ Artifacts/1006_PlaneLandsAfar2_HTCOneVX_VIDEO0099.yuv, -1, -1, 43.13950764717603
172
+ Artifacts/0911nightScene_5s_IMG_0160.yuv, -1, -1, 35.72522155502472
173
+ Stabilization/0923_WalkingToLunch2_Oppo_VID20150411073225.yuv, -1, -1, 34.807565694465076
174
+ Stabilization/0923_SwingingArm_iphone5_IMG_0266.yuv, -1, -1, 59.04852227983286
175
+ Focus/1024_TableTennisAtBalboa_GS6_20151024_104121.yuv, -1, -1, 48.88189507385214
176
+ Artifacts/1019_ColorfulKites2_HTCOneVX_VIDEO0124.yuv, -1, -1, 45.61583176940698
177
+ Sharpness/0923_VerticalAndRollerCoaster_Nokia1020_WP_20130319_18_36_46_Pro.yuv, -1, -1, 63.52586365094227
178
+ Sharpness/0918_GuysPlayVolleyball_OppoFind7_VID20150407045402.yuv, -1, -1, 51.96402419847002
179
+ Artifacts/0918_DogsOnBeach_OppoFind7_VID20150407042141.yuv, -1, -1, 61.24971954276459
180
+ Artifacts/1006_DucksAndFish2_LGG2_CAM00841.yuv, -1, -1, 61.63231290029228
181
+ Exposure/1019_ViewFromMetro2_HTCOneVX_VIDEO0122.yuv, -1, -1, 31.44885665910503
182
+ Sharpness/1006_ParkBus_LGG2_CAM00848.yuv, -1, -1, 49.148682603066575
183
+ Color/1019_FlyingKite3_OppoFind7_VID20150508093248.yuv, -1, -1, 56.46153730303658
184
+ Stabilization/1019_SkateBoarderStepsOut_HTCOneVX.yuv, -1, -1, 36.92437220840707
185
+ Exposure/1019_TrafficFromAfar_LGG2_CAM00850.yuv, -1, -1, 43.920755609079976
186
+ Stabilization/0923_SwingingArm_GS5_20150925_154945.yuv, -1, -1, 59.37209120186363
187
+ Stabilization/1019_SkateBoarderStepsOut_GS6_20151019_182712.yuv, -1, -1, 61.247990668124054
188
+ Focus/1019_HelicopterInSky_HTCOneVX_VIDEO0132.yuv, -1, -1, 16.562095145958942
189
+ Stabilization/1019_BollywoodDanceTraining_LGG2_CAM00851.yuv, -1, -1, 54.376932661964815
190
+ Exposure/0911skateBoard_gs5_20150911_174325.yuv, -1, -1, 49.66995169846537
191
+ Focus/1024_TableTennisAtBalboa_LGG2_CAM00887.yuv, -1, -1, 47.09583764080335
192
+ Sharpness/0923_DucksInWater_Note4_20150925_162109.yuv, -1, -1, 40.52447294642663
193
+ Artifacts/1006_FollowTheTrain_HTCOneVX_VIDEO0086.yuv, -1, -1, 27.486434026248876
194
+ Artifacts/0923_ChattingOverLunch_HTCOneVx_VIDEO0066.yuv, -1, -1, 31.49041491004223
195
+ Sharpness/0913taiZi_Note4_20150913_182629.yuv, -1, -1, 32.936571931353
196
+ Sharpness/0827_OldTownFountain_GS6_20150827_180303.yuv, -1, -1, 66.09356090065711
197
+ Exposure/0911skateBoard_1020_WindowsPhone_20130305_19_55_21_Pro.yuv, -1, -1, 55.13130426481119
198
+ Artifacts/1006_PlaneLandsAfar2_LGG2_CAM00847.yuv, -1, -1, 43.7872800043708
199
+ Sharpness/0923_DucksInWater_GS5_20150925_163115.yuv, -1, -1, 50.962588180142056
200
+ Sharpness/0827_OldTownFountain_iphone5s_IMG_0461.yuv, -1, -1, 63.719020683993385
201
+ Focus/1019_HelicopterInSky_GS6_20151020_141352.yuv, -1, -1, 22.843160682622553
202
+ Exposure/0911skateBoard_5s_IMG_0151.yuv, -1, -1, 66.17622463832596
203
+ Color/0923_RiverBed_Note4_20150925_164759.yuv, -1, -1, 64.23939021565612
204
+ Focus/0723_OldTownShop_GS5_12_20150723_133424.yuv, -1, -1, 51.65560419378202
205
+ Sharpness/0923_DucksInWater_Nokia1020_WP_20130305_19_54_00_Pro.yuv, -1, -1, 54.16160278100095
206
+ Sharpness/0923_DucksInWater_iphone5_IMG_0272.yuv, -1, -1, 49.06737404472014
207
+ Focus/1006_ComplexTrain_LGG2_CAM00835.yuv, -1, -1, 27.479486381827094
208
+ Stabilization/1019_MetroArrives_HTCOneVX_VIDEO0120.yuv, -1, -1, 45.266375356943335
examplar_data_labels/LIVE_Qualcomm/mp4labels.txt ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Focus/1024_TableTennisAtBalboa_HTCOneVx_VIDEO0138.mp4, -1, -1, 41.64888182462034
2
+ Focus/1019_SkateBoardingSkills_LGG2_CAM00855.mp4, -1, -1, 54.094978137969605
3
+ Focus/1019_SkateBoardingSkills_OppoFind7_VID20150507134738.mp4, -1, -1, 51.02291495174518
4
+ Exposure/0911panGarden_Note4_20150911_152420.mp4, -1, -1, 44.69628201977274
5
+ Artifacts/0923_ChattingOverLunch_G2_CAM00813.mp4, -1, -1, 45.67855937078179
6
+ Artifacts/0911nightScene_note4_20150911_193809.mp4, -1, -1, 36.546723521311996
7
+ Exposure/0912_Burgers_Nokia1020_20130306_17_07_38_Pro.mp4, -1, -1, 64.9396364629356
8
+ Color/1006_EmergencyScene_GS6_20151006_160038.mp4, -1, -1, 58.33545178737855
9
+ Stabilization/0923_WalkingToLunch2_G2_CAM00812.mp4, -1, -1, 48.7769838458931
10
+ Focus/1006_ComplexTrain_HTCOneVX_VIDEO0087.mp4, -1, -1, 22.46553753382192
11
+ Color/0913playSand_GS5_20150913_183702.mp4, -1, -1, 58.51617627453426
12
+ Stabilization/1019_ConstructionBuggy2_GS6_20151020_133229.mp4, -1, -1, 61.262155162968625
13
+ Sharpness/0913taiZi_iPhone5s_IMG_0250.mp4, -1, -1, 29.54546786777324
14
+ Artifacts/1006_FollowTheTrain_OppoFind7_VID20150424111801.mp4, -1, -1, 48.7312983952301
15
+ Stabilization/1019_ConstructionBuggy2_LGG2_CAM00867.mp4, -1, -1, 42.73349382920229
16
+ Artifacts/0918_AxelFetchesBall_GS6_20150919_090332.mp4, -1, -1, 58.23142265391711
17
+ Exposure/1019_ViewFromMetro2_OppoFind7_VID20150508085916.mp4, -1, -1, 36.18976114116138
18
+ Exposure/0923_FollowTheBee_HTCOneVx_VIDEO0072.mp4, -1, -1, 33.04439956164501
19
+ Artifacts/1019_ColorfulKites2_LGG2_CAM00872.mp4, -1, -1, 63.17108813219617
20
+ Exposure/0923_FollowTheBee_GS6_20150923_131355.mp4, -1, -1, 51.45085729576881
21
+ Color/1024_BadmintonAtBalboa_GS6_20151024_104414.mp4, -1, -1, 56.589364068655456
22
+ Exposure/1022_QCOMSoccer2_HTCOneVx_VIDEO0137.mp4, -1, -1, 29.11684686808589
23
+ Sharpness/0912_football_Nokia1020_20130306_17_51_35_Pro.mp4, -1, -1, 57.57359450935232
24
+ Artifacts/0723_ManUnderTree_GS6_03_20150723_130100.mp4, -1, -1, 57.28880925319935
25
+ Exposure/1019_ViewFromMetro2_GS6_20151020_133722.mp4, -1, -1, 39.88244836115969
26
+ Exposure/1019_TrafficFromAfar_OppoFind7_VID20150505125608.mp4, -1, -1, 43.74671562206279
27
+ Artifacts/0918_AxelFetchesBall_HTCOneVX_VIDEO0056.mp4, -1, -1, 42.628796648669706
28
+ Stabilization/0923_ChasingDucksInWater_iphone5_IMG_0274.mp4, -1, -1, 51.11518283501527
29
+ Focus/1006_ComplexTrain_GS6_20151006_155711.mp4, -1, -1, 44.35577617160543
30
+ Artifacts/1006_DucksAndFish2_HTCOneVX_VIDEO0093.mp4, -1, -1, 31.435259054533738
31
+ Color/1024_BadmintonAtBalboa_HTCOneVx_VIDEO0139.mp4, -1, -1, 38.4966864084717
32
+ Artifacts/1006_FollowTheTrain_GS6_20151006_155539.mp4, -1, -1, 47.30872740564676
33
+ Sharpness/0912_football_GS5_20150912_153941.mp4, -1, -1, 53.23486148188893
34
+ Exposure/0911_DogBall_OppoFind7_VID20150330141759.mp4, -1, -1, 66.94118272054322
35
+ Focus/0923_BirdsFlyOnPier_iphone5_IMG_0277.mp4, -1, -1, 24.644612569329546
36
+ Stabilization/1019_MetroArrives_OppoFind7_VID20150508085607.mp4, -1, -1, 55.86662414448337
37
+ Exposure/0911panGarden_Lumia1020_WindowsPhone_20130330_23_52_26_Pro.mp4, -1, -1, 61.25422142212514
38
+ Sharpness/1006_ParkBus_OppoFind7_VID20150424122747.mp4, -1, -1, 55.317108648036935
39
+ Stabilization/1019_ConstructionBuggy2_HTCOneVX_VIDEO0119.mp4, -1, -1, 37.69768941262607
40
+ Sharpness/0918_GuysPlayVolleyball_GS6_20150919_093108.mp4, -1, -1, 65.25654758810917
41
+ Sharpness/1006_ParkBus_GS6_20151006_170532.mp4, -1, -1, 61.532520666710234
42
+ Artifacts/0918_AxelFetchesBall_LGG2_CAM00802.mp4, -1, -1, 52.891660679374006
43
+ Focus/1019_FlyingKite_LGG2_CAM00875.mp4, -1, -1, 42.076217605884914
44
+ Color/1006_FamilyScooter_HTCOneVX_VIDEO0101.mp4, -1, -1, 38.8106247696991
45
+ Color/1006_EmergencyScene_OppoFind7_VID20150424112300.mp4, -1, -1, 62.01071121403491
46
+ Artifacts/0911nightScene_gs5_20150911_194737.mp4, -1, -1, 32.27546017222677
47
+ Exposure/0911panGarden_iPhone5s_IMG_0132.mp4, -1, -1, 56.68920900925609
48
+ Sharpness/0911duckSwim_iphone5s_IMG_0136.mp4, -1, -1, 69.15445557891265
49
+ Stabilization/0923_SwingingArm_Note4_20150925_153938.mp4, -1, -1, 48.1232752921398
50
+ Sharpness/0913taiZi_GS5_20150913_183616.mp4, -1, -1, 32.896692696712435
51
+ Exposure/0911_DogBall_LGG2_CAM00791.mp4, -1, -1, 46.17375037863661
52
+ Exposure/1022_QCOMSoccer2_LGG2_CAM00886.mp4, -1, -1, 26.395621305018185
53
+ Focus/0913boyWalkBy_GS5_20150913_182938.mp4, -1, -1, 51.591805400221055
54
+ Sharpness/0918_GuysPlayVolleyball_HTCOneVX_VIDEO0058.mp4, -1, -1, 53.13309561506308
55
+ Sharpness/0923_VerticalAndRollerCoaster_iphone5_IMG_0258.mp4, -1, -1, 48.66419200608179
56
+ Stabilization/1019_BollywoodDanceTraining_GS6_20151017_175412.mp4, -1, -1, 53.770511195731956
57
+ Exposure/0817_WalkIntoBarn_iPhone5_IMG_0410.mp4, -1, -1, 39.516765825233485
58
+ Color/1006_EmergencyScene_HTCOneVX_VIDEO0090.mp4, -1, -1, 42.55847413392969
59
+ Artifacts/0923_ChattingOverLunch_GS6_20150923_122025.mp4, -1, -1, 60.43118323177181
60
+ Color/0918_CyclistsNear1_OppoFind7_VID20150407033615.mp4, -1, -1, 50.22621242945224
61
+ Stabilization/1019_SkateBoarderStepsOut_OppoFind7_VID20150507134909.mp4, -1, -1, 40.40425472582464
62
+ Stabilization/0923_FollowPigeons_iphone5_IMG_0279.mp4, -1, -1, 47.7605951367109
63
+ Sharpness/0826_TrainPullsAway_OppoFind7_VID20150727213123.mp4, -1, -1, 58.976466922446484
64
+ Artifacts/0723_ManUnderTree_GS5_03_20150723_130016.mp4, -1, -1, 52.56657483287041
65
+ Artifacts/0918_DogsOnBeach_HTCOneVX_VIDEO0055.mp4, -1, -1, 50.378497196338884
66
+ Artifacts/0723_ManUnderTree_HTCOneVx_03_VIDEO0006.mp4, -1, -1, 36.709288593516966
67
+ Color/0912_CheerLeader1_Nokia1080_20130306_17_46_21_Pro.mp4, -1, -1, 41.529339991183875
68
+ Sharpness/0923_VerticalAndRollerCoaster_Note4_20150925_151648.mp4, -1, -1, 55.628924969845
69
+ Color/0923_RiverBed_GS5_20150925_165808.mp4, -1, -1, 61.69082350125231
70
+ Focus/1024_TableTennisAtBalboa_OppoFind7_VID20150512060301.mp4, -1, -1, 50.92808485111922
71
+ Stabilization/0923_FollowPigeons_Nokia1020_WP_20130305_20_16_29_Pro.mp4, -1, -1, 52.181316514970064
72
+ Color/1019_FlyingKite3_LGG2_CAM00877.mp4, -1, -1, 55.74782132350435
73
+ Color/0918_CyclistsNear1_HTCOneVX_VIDEO0050.mp4, -1, -1, 43.90725048088645
74
+ Color/1006_FamilyScooter_OppoFind7_VID20150424122855.mp4, -1, -1, 51.17307547834518
75
+ Artifacts/0911nightScene_1020_WindowsPhone_20130305_21_59_44_Pro.mp4, -1, -1, 21.187371167047402
76
+ Sharpness/0826_TrainPullsAway_iPhone5s_IMG_0423.mp4, -1, -1, 60.880133455554756
77
+ Stabilization/0923_CrazySubmarine_Note4_20150925_160005.mp4, -1, -1, 56.08844663824663
78
+ Focus/0923_BirdsFlyOnPier_Note4_20150925_164318.mp4, -1, -1, 34.77241986411562
79
+ Artifacts/1006_DucksAndFish2_GS6_20151006_161642.mp4, -1, -1, 51.0393821878597
80
+ Stabilization/1019_SkateBoarderStepsOut_LGG2_CAM00857.mp4, -1, -1, 48.672533412525446
81
+ Exposure/0911_DogBall_HTCOneVx_VIDEO0045.mp4, -1, -1, 49.7123541769036
82
+ Color/1019_FlyingKite3_HTCOneVX_VIDEO0129.mp4, -1, -1, 49.33731661081409
83
+ Focus/1006_ComplexTrain_OppoFind7_VID20150424111934.mp4, -1, -1, 31.508494371587865
84
+ Focus/0723_OldTownShop_GS6_12_20150723_133507.mp4, -1, -1, 64.9917581345629
85
+ Stabilization/0923_ChasingDucksInWater_GS5_20150925_163521.mp4, -1, -1, 48.91083829259606
86
+ Focus/0723_OldTownShop_Nokia1020_12_WP_20130426_22_46_05_Pro.mp4, -1, -1, 52.969252792370625
87
+ Focus/1019_FlyingKite_HTCOneVX_VIDEO0127.mp4, -1, -1, 20.802340977962967
88
+ Color/1006_FamilyScooter_LGG2_CAM00849.mp4, -1, -1, 49.68892718051139
89
+ Stabilization/0923_ChasingDucksInWater_Nokia1020_WP_20130305_19_56_39_Pro.mp4, -1, -1, 40.700157633712266
90
+ Sharpness/0827_OldTownFountain_GS5_20150827_180240.mp4, -1, -1, 66.22997014329692
91
+ Exposure/0912_Burgers_GS5_20150912_145544.mp4, -1, -1, 63.475870057217286
92
+ Color/1006_FamilyScooter_GS6_20151006_170632.mp4, -1, -1, 60.28464598447542
93
+ Color/1024_BadmintonAtBalboa_LGG2_CAM00888.mp4, -1, -1, 52.3694844910836
94
+ Stabilization/1019_BollywoodDanceTraining_OppoFind7_VID20150505131616.mp4, -1, -1, 49.18933619585497
95
+ Exposure/0912_Burgers_Note4_20150912_144558.mp4, -1, -1, 61.98320494013754
96
+ Stabilization/0923_WalkingToLunch2_GS6_20150923_120934.mp4, -1, -1, 46.50669762802673
97
+ Stabilization/1019_MetroArrives_LGG2_CAM00868.mp4, -1, -1, 59.05090948407636
98
+ Artifacts/1019_ColorfulKites2_GS6_20151020_140327.mp4, -1, -1, 69.34239333030179
99
+ Focus/0923_BirdsFlyOnPier_GS5_20150925_165328.mp4, -1, -1, 46.858952126850156
100
+ Focus/0923_BirdsFlyOnPier_Nokia1020_WP_20130305_20_14_47_Pro.mp4, -1, -1, 49.44336625534995
101
+ Color/0923_RiverBed_iphone5_IMG_0280.mp4, -1, -1, 63.16961373461168
102
+ Sharpness/0911duckSwim_gs5_20150911_154802.mp4, -1, -1, 59.46925058712509
103
+ Color/0918_DelMarView_HTCOneVX_VIDEO0062.mp4, -1, -1, 45.79645003864048
104
+ Color/0918_CyclistsNear1_LGG2_CAM00795.mp4, -1, -1, 46.753110388198024
105
+ Exposure/0817_WalkIntoBarn_OppoFind7_VID20150718144333.mp4, -1, -1, 49.02405995062579
106
+ Color/1019_FlyingKite3_GS6_20151020_141052.mp4, -1, -1, 73.64276415152705
107
+ Artifacts/0918_DogsOnBeach_GS6_20150919_085853.mp4, -1, -1, 65.94233767581703
108
+ Stabilization/0923_CrazySubmarine_Nokia1020_WP_20130305_19_31_30_Pro.mp4, -1, -1, 59.038670696087216
109
+ Focus/0913boyWalkBy_iPhone5s_IMG_0245.mp4, -1, -1, 62.812549213807614
110
+ Stabilization/1019_MetroArrives_GS6_20151020_133409.mp4, -1, -1, 65.82583000957678
111
+ Sharpness/0911duckSwim_notes4_20150911_153835.mp4, -1, -1, 71.74875099099955
112
+ Sharpness/1006_ParkBus_HTCOneVX_VIDEO0100.mp4, -1, -1, 31.50387615900128
113
+ Focus/1019_SkateBoardingSkills_GS6_20151019_182541.mp4, -1, -1, 59.007584920405066
114
+ Color/0918_DelMarView_LGG2_CAM00809.mp4, -1, -1, 48.7408776505235
115
+ Color/0912_Cheerleader1_Note4_20150912_152444.mp4, -1, -1, 46.853275685229434
116
+ Focus/1019_FlyingKite_OppoFind7_VID20150508093000.mp4, -1, -1, 54.075938062059876
117
+ Exposure/1019_TrafficFromAfar_HTCOneVX_VIDEO0102.mp4, -1, -1, 38.676262337846296
118
+ Focus/0911driving_5s_IMG_0145.mp4, -1, -1, 44.9586564442459
119
+ Exposure/1019_ViewFromMetro2_LGG2_CAM00870.mp4, -1, -1, 37.58488437787315
120
+ Focus/0723_OldTownShop_HTCOneVx_12_VIDEO0019.mp4, -1, -1, 30.53679255118817
121
+ Stabilization/0923_ChasingDucksInWater_Note4_20150925_162515.mp4, -1, -1, 48.75250168989043
122
+ Sharpness/0826_TrainPullsAway_Note4_20150826_175232.mp4, -1, -1, 57.182744558097575
123
+ Focus/1019_FlyingKite_GS6_20151020_140804.mp4, -1, -1, 23.50289297893303
124
+ Color/0918_CyclistsNear1_GS6_20150919_081325.mp4, -1, -1, 52.81500748332389
125
+ Artifacts/1006_PlaneLandsAfar2_OppoFind7_VID20150424122651.mp4, -1, -1, 57.22288718627244
126
+ Exposure/0912_Burgers_iphone5s_IMG_0167.mp4, -1, -1, 64.38345299460406
127
+ Sharpness/0923_VerticalAndRollerCoaster_GS5_20150925_152655.mp4, -1, -1, 55.74790371519573
128
+ Stabilization/0923_FollowPigeons_GS5_20150925_165510.mp4, -1, -1, 55.46436837100166
129
+ Exposure/0817_WalkIntoBarn_LGG2_CAM00718.mp4, -1, -1, 51.146870112855
130
+ Color/0913playSand_iPhone5s_IMG_0251.mp4, -1, -1, 72.96615929361013
131
+ Stabilization/0923_CrazySubmarine_GS5_20150925_161013.mp4, -1, -1, 62.68393406357753
132
+ Sharpness/0912_football_Note4_20150912_152954.mp4, -1, -1, 55.629277646191774
133
+ Color/1006_EmergencyScene_LGG2_CAM00838.mp4, -1, -1, 66.8679745839632
134
+ Stabilization/0923_CrazySubmarine_iphone5_IMG_0268.mp4, -1, -1, 62.539356287114956
135
+ Color/0912_Cheerleader1_iphone_IMG_0184.mp4, -1, -1, 59.46973486351669
136
+ Exposure/1019_TrafficFromAfar_GS6_20151017_173401.mp4, -1, -1, 60.08891382298475
137
+ Color/0913playSand_Note4_20150913_182713.mp4, -1, -1, 54.20213781634974
138
+ Focus/0911driving_gs5_20150911_170903.mp4, -1, -1, 52.638967951340504
139
+ Artifacts/1019_ColorfulKites2_OppoFind7_VID20150508092524.mp4, -1, -1, 50.97310511796482
140
+ Color/0918_DelMarView_OppoFind7_VID20150409024137.mp4, -1, -1, 60.963433745979756
141
+ Color/0918_DelMarView_GS6_20150921_071851.mp4, -1, -1, 57.249620671155895
142
+ Exposure/0911panGarden_Gs5_20150911_153348.mp4, -1, -1, 30.910356850986815
143
+ Sharpness/0827_OldTownFountain_Note4_20150827_175314.mp4, -1, -1, 66.98799834034504
144
+ Stabilization/0923_FollowPigeons_Note4_20150925_164500.mp4, -1, -1, 52.37350976713892
145
+ Exposure/0911_DogBall_GS6_20150911_185459.mp4, -1, -1, 65.10542900249484
146
+ Focus/1019_HelicopterInSky_OppoFind7_VID20150508093547.mp4, -1, -1, 53.701231222442544
147
+ Artifacts/1006_DucksAndFish2_OppoFind7_VID20150424113906.mp4, -1, -1, 21.403852037578215
148
+ Exposure/1022_QCOMSoccer2_GS6_20151022_174002.mp4, -1, -1, 44.361660121662815
149
+ Color/0912_Cheerleader1_GS5_20150912_153429.mp4, -1, -1, 51.08416769793256
150
+ Stabilization/0923_WalkingToLunch2_HTCOneVx_VIDEO0065.mp4, -1, -1, 22.111602493222765
151
+ Sharpness/0918_GuysPlayVolleyball_LGG2_CAM00804.mp4, -1, -1, 59.486900886871
152
+ Artifacts/1006_FollowTheTrain_LGG2_CAM00834.mp4, -1, -1, 33.939419249644835
153
+ Color/1024_BadmintonAtBalboa_OppoFined7_VID20150512060558.mp4, -1, -1, 56.55278041432343
154
+ Color/0923_RiverBed_Nokia1020_WP_20130305_20_19_29_Pro.mp4, -1, -1, 66.39664147071237
155
+ Stabilization/1019_BollywoodDanceTraining_HTCOneVX_VIDEO0103.mp4, -1, -1, 46.903863263746985
156
+ Focus/1019_SkateBoardingSkills_HTCOneVX_VIDEO0106.mp4, -1, -1, 31.781376267831334
157
+ Sharpness/0826_TrainPullsAway_LG_G2_CAM00726.mp4, -1, -1, 49.77539197067738
158
+ Artifacts/0918_AxelFetchesBall_OppoFind7_VID20150407042625.mp4, -1, -1, 56.47030298915485
159
+ Stabilization/1019_ConstructionBuggy2_OppoFind7_VID20150508085425.mp4, -1, -1, 43.721940313265264
160
+ Artifacts/0918_DogsOnBeach_LGG2_CAM00801.mp4, -1, -1, 60.0145230621533
161
+ Artifacts/0923_ChattingOverLunch_Oppo_VID20150411074312.mp4, -1, -1, 52.436942132562734
162
+ Artifacts/0723_ManUnderTree_Nokia1020_03_WP_20130426_22_12_02_Pro.mp4, -1, -1, 45.67874272310085
163
+ Exposure/0817_WalkIntoBarn_Note4_20150817_110508.mp4, -1, -1, 58.079428695078406
164
+ Sharpness/0911duckSwim_lumia1020_WindowsPhone_20130331_00_06_41_Pro.mp4, -1, -1, 71.3530679089854
165
+ Exposure/1022_QCOMSoccer2_OppoFind7_VID20150510130154.mp4, -1, -1, 45.88425102889353
166
+ Artifacts/1006_PlaneLandsAfar2_GS6_20151006_170428.mp4, -1, -1, 54.47172841710812
167
+ Focus/1019_HelicopterInSky_LGG2_CAM00880.mp4, -1, -1, 49.06343607078643
168
+ Focus/0911driving_note4_20150911_165933.mp4, -1, -1, 66.5912517517975
169
+ Exposure/0911skateBoard_note4_20150911_173356.mp4, -1, -1, 61.56026393111605
170
+ Focus/0913boyWalkBy_Note4_20150913_181950.mp4, -1, -1, 53.97028926187148
171
+ Artifacts/1006_PlaneLandsAfar2_HTCOneVX_VIDEO0099.mp4, -1, -1, 43.13950764717603
172
+ Artifacts/0911nightScene_5s_IMG_0160.mp4, -1, -1, 35.72522155502472
173
+ Stabilization/0923_WalkingToLunch2_Oppo_VID20150411073225.mp4, -1, -1, 34.807565694465076
174
+ Stabilization/0923_SwingingArm_iphone5_IMG_0266.mp4, -1, -1, 59.04852227983286
175
+ Focus/1024_TableTennisAtBalboa_GS6_20151024_104121.mp4, -1, -1, 48.88189507385214
176
+ Artifacts/1019_ColorfulKites2_HTCOneVX_VIDEO0124.mp4, -1, -1, 45.61583176940698
177
+ Sharpness/0923_VerticalAndRollerCoaster_Nokia1020_WP_20130319_18_36_46_Pro.mp4, -1, -1, 63.52586365094227
178
+ Sharpness/0918_GuysPlayVolleyball_OppoFind7_VID20150407045402.mp4, -1, -1, 51.96402419847002
179
+ Artifacts/0918_DogsOnBeach_OppoFind7_VID20150407042141.mp4, -1, -1, 61.24971954276459
180
+ Artifacts/1006_DucksAndFish2_LGG2_CAM00841.mp4, -1, -1, 61.63231290029228
181
+ Exposure/1019_ViewFromMetro2_HTCOneVX_VIDEO0122.mp4, -1, -1, 31.44885665910503
182
+ Sharpness/1006_ParkBus_LGG2_CAM00848.mp4, -1, -1, 49.148682603066575
183
+ Color/1019_FlyingKite3_OppoFind7_VID20150508093248.mp4, -1, -1, 56.46153730303658
184
+ Stabilization/1019_SkateBoarderStepsOut_HTCOneVX.mp4, -1, -1, 36.92437220840707
185
+ Exposure/1019_TrafficFromAfar_LGG2_CAM00850.mp4, -1, -1, 43.920755609079976
186
+ Stabilization/0923_SwingingArm_GS5_20150925_154945.mp4, -1, -1, 59.37209120186363
187
+ Stabilization/1019_SkateBoarderStepsOut_GS6_20151019_182712.mp4, -1, -1, 61.247990668124054
188
+ Focus/1019_HelicopterInSky_HTCOneVX_VIDEO0132.mp4, -1, -1, 16.562095145958942
189
+ Stabilization/1019_BollywoodDanceTraining_LGG2_CAM00851.mp4, -1, -1, 54.376932661964815
190
+ Exposure/0911skateBoard_gs5_20150911_174325.mp4, -1, -1, 49.66995169846537
191
+ Focus/1024_TableTennisAtBalboa_LGG2_CAM00887.mp4, -1, -1, 47.09583764080335
192
+ Sharpness/0923_DucksInWater_Note4_20150925_162109.mp4, -1, -1, 40.52447294642663
193
+ Artifacts/1006_FollowTheTrain_HTCOneVX_VIDEO0086.mp4, -1, -1, 27.486434026248876
194
+ Artifacts/0923_ChattingOverLunch_HTCOneVx_VIDEO0066.mp4, -1, -1, 31.49041491004223
195
+ Sharpness/0913taiZi_Note4_20150913_182629.mp4, -1, -1, 32.936571931353
196
+ Sharpness/0827_OldTownFountain_GS6_20150827_180303.mp4, -1, -1, 66.09356090065711
197
+ Exposure/0911skateBoard_1020_WindowsPhone_20130305_19_55_21_Pro.mp4, -1, -1, 55.13130426481119
198
+ Artifacts/1006_PlaneLandsAfar2_LGG2_CAM00847.mp4, -1, -1, 43.7872800043708
199
+ Sharpness/0923_DucksInWater_GS5_20150925_163115.mp4, -1, -1, 50.962588180142056
200
+ Sharpness/0827_OldTownFountain_iphone5s_IMG_0461.mp4, -1, -1, 63.719020683993385
201
+ Focus/1019_HelicopterInSky_GS6_20151020_141352.mp4, -1, -1, 22.843160682622553
202
+ Exposure/0911skateBoard_5s_IMG_0151.mp4, -1, -1, 66.17622463832596
203
+ Color/0923_RiverBed_Note4_20150925_164759.mp4, -1, -1, 64.23939021565612
204
+ Focus/0723_OldTownShop_GS5_12_20150723_133424.mp4, -1, -1, 51.65560419378202
205
+ Sharpness/0923_DucksInWater_Nokia1020_WP_20130305_19_54_00_Pro.mp4, -1, -1, 54.16160278100095
206
+ Sharpness/0923_DucksInWater_iphone5_IMG_0272.mp4, -1, -1, 49.06737404472014
207
+ Focus/1006_ComplexTrain_LGG2_CAM00835.mp4, -1, -1, 27.479486381827094
208
+ Stabilization/1019_MetroArrives_HTCOneVX_VIDEO0120.mp4, -1, -1, 45.266375356943335
examplar_data_labels/LIVE_VQA/labels.txt ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pa/pa2_25fps.yuv, -1, -1, 44.5104
2
+ pa/pa3_25fps.yuv, -1, -1, 70.1054
3
+ pa/pa4_25fps.yuv, -1, -1, 66.4280
4
+ pa/pa5_25fps.yuv, -1, -1, 75.1225
5
+ pa/pa6_25fps.yuv, -1, -1, 73.8803
6
+ pa/pa7_25fps.yuv, -1, -1, 63.2564
7
+ pa/pa8_25fps.yuv, -1, -1, 61.2726
8
+ pa/pa9_25fps.yuv, -1, -1, 40.5551
9
+ pa/pa10_25fps.yuv, -1, -1, 52.6111
10
+ pa/pa11_25fps.yuv, -1, -1, 60.2534
11
+ pa/pa12_25fps.yuv, -1, -1, 68.7186
12
+ pa/pa13_25fps.yuv, -1, -1, 42.9784
13
+ pa/pa14_25fps.yuv, -1, -1, 51.0530
14
+ pa/pa15_25fps.yuv, -1, -1, 55.7020
15
+ pa/pa16_25fps.yuv, -1, -1, 65.6457
16
+ rb/rb2_25fps.yuv, -1, -1, 64.9369
17
+ rb/rb3_25fps.yuv, -1, -1, 46.2446
18
+ rb/rb4_25fps.yuv, -1, -1, 54.3732
19
+ rb/rb5_25fps.yuv, -1, -1, 46.4907
20
+ rb/rb6_25fps.yuv, -1, -1, 68.1064
21
+ rb/rb7_25fps.yuv, -1, -1, 54.8101
22
+ rb/rb8_25fps.yuv, -1, -1, 54.6555
23
+ rb/rb9_25fps.yuv, -1, -1, 39.1978
24
+ rb/rb10_25fps.yuv, -1, -1, 43.6833
25
+ rb/rb11_25fps.yuv, -1, -1, 55.8563
26
+ rb/rb12_25fps.yuv, -1, -1, 63.5809
27
+ rb/rb13_25fps.yuv, -1, -1, 38.8828
28
+ rb/rb14_25fps.yuv, -1, -1, 45.6069
29
+ rb/rb15_25fps.yuv, -1, -1, 48.0089
30
+ rb/rb16_25fps.yuv, -1, -1, 47.5270
31
+ rh/rh2_25fps.yuv, -1, -1, 68.1431
32
+ rh/rh3_25fps.yuv, -1, -1, 63.5698
33
+ rh/rh4_25fps.yuv, -1, -1, 48.0196
34
+ rh/rh5_25fps.yuv, -1, -1, 51.4980
35
+ rh/rh6_25fps.yuv, -1, -1, 55.2291
36
+ rh/rh7_25fps.yuv, -1, -1, 62.3778
37
+ rh/rh8_25fps.yuv, -1, -1, 42.6909
38
+ rh/rh9_25fps.yuv, -1, -1, 37.8713
39
+ rh/rh10_25fps.yuv, -1, -1, 45.4363
40
+ rh/rh11_25fps.yuv, -1, -1, 53.6343
41
+ rh/rh14_25fps.yuv, -1, -1, 42.8568
42
+ rh/rh15_25fps.yuv, -1, -1, 52.0988
43
+ rh/rh16_25fps.yuv, -1, -1, 62.2062
44
+ tr/tr2_25fps.yuv, -1, -1, 71.2731
45
+ tr/tr3_25fps.yuv, -1, -1, 72.1356
46
+ tr/tr4_25fps.yuv, -1, -1, 64.6561
47
+ tr/tr5_25fps.yuv, -1, -1, 53.1125
48
+ tr/tr6_25fps.yuv, -1, -1, 73.4730
49
+ tr/tr7_25fps.yuv, -1, -1, 55.3531
50
+ tr/tr8_25fps.yuv, -1, -1, 52.4524
51
+ tr/tr9_25fps.yuv, -1, -1, 38.6726
52
+ tr/tr10_25fps.yuv, -1, -1, 47.7716
53
+ tr/tr11_25fps.yuv, -1, -1, 56.9119
54
+ tr/tr12_25fps.yuv, -1, -1, 63.7984
55
+ tr/tr13_25fps.yuv, -1, -1, 33.4734
56
+ tr/tr14_25fps.yuv, -1, -1, 42.5381
57
+ tr/tr15_25fps.yuv, -1, -1, 56.1328
58
+ tr/tr16_25fps.yuv, -1, -1, 65.7102
59
+ st/st2_25fps.yuv, -1, -1, 65.6522
60
+ st/st3_25fps.yuv, -1, -1, 61.3221
61
+ st/st4_25fps.yuv, -1, -1, 44.0305
62
+ st/st5_25fps.yuv, -1, -1, 41.4157
63
+ st/st6_25fps.yuv, -1, -1, 58.4534
64
+ st/st7_25fps.yuv, -1, -1, 44.2762
65
+ st/st8_25fps.yuv, -1, -1, 48.3834
66
+ st/st9_25fps.yuv, -1, -1, 40.7745
67
+ st/st10_25fps.yuv, -1, -1, 46.5633
68
+ st/st11_25fps.yuv, -1, -1, 52.3269
69
+ st/st12_25fps.yuv, -1, -1, 56.0811
70
+ st/st13_25fps.yuv, -1, -1, 36.5136
71
+ st/st14_25fps.yuv, -1, -1, 42.9632
72
+ st/st15_25fps.yuv, -1, -1, 49.1987
73
+ st/st16_25fps.yuv, -1, -1, 57.4200
74
+ sf/sf2_25fps.yuv, -1, -1, 54.9213
75
+ sf/sf3_25fps.yuv, -1, -1, 63.2756
76
+ sf/sf4_25fps.yuv, -1, -1, 56.8614
77
+ sf/sf5_25fps.yuv, -1, -1, 49.2987
78
+ sf/sf6_25fps.yuv, -1, -1, 59.3959
79
+ sf/sf7_25fps.yuv, -1, -1, 44.8094
80
+ sf/sf8_25fps.yuv, -1, -1, 39.1088
81
+ sf/sf9_25fps.yuv, -1, -1, 32.6002
82
+ sf/sf10_25fps.yuv, -1, -1, 44.0164
83
+ sf/sf11_25fps.yuv, -1, -1, 54.9423
84
+ sf/sf12_25fps.yuv, -1, -1, 57.1497
85
+ sf/sf13_25fps.yuv, -1, -1, 40.9999
86
+ sf/sf14_25fps.yuv, -1, -1, 44.6477
87
+ sf/sf15_25fps.yuv, -1, -1, 49.2215
88
+ sf/sf16_25fps.yuv, -1, -1, 53.7003
89
+ bs/bs2_25fps.yuv, -1, -1, 68.9412
90
+ bs/bs3_25fps.yuv, -1, -1, 52.9363
91
+ bs/bs4_25fps.yuv, -1, -1, 51.0109
92
+ bs/bs5_25fps.yuv, -1, -1, 55.9066
93
+ bs/bs6_25fps.yuv, -1, -1, 61.7965
94
+ bs/bs7_25fps.yuv, -1, -1, 45.9273
95
+ bs/bs8_25fps.yuv, -1, -1, 40.9576
96
+ bs/bs9_25fps.yuv, -1, -1, 31.9421
97
+ bs/bs10_25fps.yuv, -1, -1, 36.6396
98
+ bs/bs11_25fps.yuv, -1, -1, 38.6448
99
+ bs/bs12_25fps.yuv, -1, -1, 52.1844
100
+ bs/bs13_25fps.yuv, -1, -1, 32.7252
101
+ bs/bs14_25fps.yuv, -1, -1, 43.9984
102
+ bs/bs15_25fps.yuv, -1, -1, 50.5090
103
+ bs/bs16_25fps.yuv, -1, -1, 53.4364
104
+ sh/sh2_50fps.yuv, -1, -1, 81.1601
105
+ sh/sh3_50fps.yuv, -1, -1, 70.5494
106
+ sh/sh4_50fps.yuv, -1, -1, 54.9174
107
+ sh/sh5_50fps.yuv, -1, -1, 49.6350
108
+ sh/sh6_50fps.yuv, -1, -1, 55.5307
109
+ sh/sh7_50fps.yuv, -1, -1, 61.2837
110
+ sh/sh8_50fps.yuv, -1, -1, 46.2254
111
+ sh/sh9_50fps.yuv, -1, -1, 36.2440
112
+ sh/sh10_50fps.yuv, -1, -1, 40.8004
113
+ sh/sh11_50fps.yuv, -1, -1, 51.6153
114
+ sh/sh12_50fps.yuv, -1, -1, 66.3166
115
+ sh/sh13_50fps.yuv, -1, -1, 37.0212
116
+ sh/sh14_50fps.yuv, -1, -1, 44.0813
117
+ sh/sh15_50fps.yuv, -1, -1, 57.5757
118
+ sh/sh16_50fps.yuv, -1, -1, 62.0745
119
+ mc/mc2_50fps.yuv, -1, -1, 78.3431
120
+ mc/mc3_50fps.yuv, -1, -1, 69.2258
121
+ mc/mc4_50fps.yuv, -1, -1, 59.5299
122
+ mc/mc5_50fps.yuv, -1, -1, 57.8482
123
+ mc/mc6_50fps.yuv, -1, -1, 73.3075
124
+ mc/mc7_50fps.yuv, -1, -1, 58.5392
125
+ mc/mc8_50fps.yuv, -1, -1, 54.0963
126
+ mc/mc9_50fps.yuv, -1, -1, 47.3711
127
+ mc/mc10_50fps.yuv, -1, -1, 48.7705
128
+ mc/mc11_50fps.yuv, -1, -1, 57.6788
129
+ mc/mc12_50fps.yuv, -1, -1, 67.8232
130
+ mc/mc13_50fps.yuv, -1, -1, 30.9426
131
+ mc/mc14_50fps.yuv, -1, -1, 40.5326
132
+ mc/mc15_50fps.yuv, -1, -1, 52.5435
133
+ mc/mc16_50fps.yuv, -1, -1, 64.8173
134
+ pr/pr2_50fps.yuv, -1, -1, 61.3882
135
+ pr/pr3_50fps.yuv, -1, -1, 66.3322
136
+ pr/pr4_50fps.yuv, -1, -1, 45.4702
137
+ pr/pr5_50fps.yuv, -1, -1, 45.3150
138
+ pr/pr6_50fps.yuv, -1, -1, 55.3240
139
+ pr/pr7_50fps.yuv, -1, -1, 56.1730
140
+ pr/pr8_50fps.yuv, -1, -1, 44.6086
141
+ pr/pr9_50fps.yuv, -1, -1, 39.8067
142
+ pr/pr10_50fps.yuv, -1, -1, 53.7598
143
+ pr/pr11_50fps.yuv, -1, -1, 59.8921
144
+ pr/pr12_50fps.yuv, -1, -1, 77.2518
145
+ pr/pr13_50fps.yuv, -1, -1, 39.7105
146
+ pr/pr14_50fps.yuv, -1, -1, 46.8271
147
+ pr/pr15_50fps.yuv, -1, -1, 54.4239
148
+ pr/pr16_50fps.yuv, -1, -1, 61.8235
examplar_data_labels/LIVE_VQA/names.txt ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pa2_25fps.yuv
2
+ pa3_25fps.yuv
3
+ pa4_25fps.yuv
4
+ pa5_25fps.yuv
5
+ pa6_25fps.yuv
6
+ pa7_25fps.yuv
7
+ pa8_25fps.yuv
8
+ pa9_25fps.yuv
9
+ pa10_25fps.yuv
10
+ pa11_25fps.yuv
11
+ pa12_25fps.yuv
12
+ pa13_25fps.yuv
13
+ pa14_25fps.yuv
14
+ pa15_25fps.yuv
15
+ pa16_25fps.yuv
16
+ rb2_25fps.yuv
17
+ rb3_25fps.yuv
18
+ rb4_25fps.yuv
19
+ rb5_25fps.yuv
20
+ rb6_25fps.yuv
21
+ rb7_25fps.yuv
22
+ rb8_25fps.yuv
23
+ rb9_25fps.yuv
24
+ rb10_25fps.yuv
25
+ rb11_25fps.yuv
26
+ rb12_25fps.yuv
27
+ rb13_25fps.yuv
28
+ rb14_25fps.yuv
29
+ rb15_25fps.yuv
30
+ rb16_25fps.yuv
31
+ rh2_25fps.yuv
32
+ rh3_25fps.yuv
33
+ rh4_25fps.yuv
34
+ rh5_25fps.yuv
35
+ rh6_25fps.yuv
36
+ rh7_25fps.yuv
37
+ rh8_25fps.yuv
38
+ rh9_25fps.yuv
39
+ rh10_25fps.yuv
40
+ rh11_25fps.yuv
41
+ rh12_25fps.yuv
42
+ rh13_25fps.yuv
43
+ rh14_25fps.yuv
44
+ rh15_25fps.yuv
45
+ rh16_25fps.yuv
46
+ tr2_25fps.yuv
47
+ tr3_25fps.yuv
48
+ tr4_25fps.yuv
49
+ tr5_25fps.yuv
50
+ tr6_25fps.yuv
51
+ tr7_25fps.yuv
52
+ tr8_25fps.yuv
53
+ tr9_25fps.yuv
54
+ tr10_25fps.yuv
55
+ tr11_25fps.yuv
56
+ tr12_25fps.yuv
57
+ tr13_25fps.yuv
58
+ tr14_25fps.yuv
59
+ tr15_25fps.yuv
60
+ tr16_25fps.yuv
61
+ st2_25fps.yuv
62
+ st3_25fps.yuv
63
+ st4_25fps.yuv
64
+ st5_25fps.yuv
65
+ st6_25fps.yuv
66
+ st7_25fps.yuv
67
+ st8_25fps.yuv
68
+ st9_25fps.yuv
69
+ st10_25fps.yuv
70
+ st11_25fps.yuv
71
+ st12_25fps.yuv
72
+ st13_25fps.yuv
73
+ st14_25fps.yuv
74
+ st15_25fps.yuv
75
+ st16_25fps.yuv
76
+ sf2_25fps.yuv
77
+ sf3_25fps.yuv
78
+ sf4_25fps.yuv
79
+ sf5_25fps.yuv
80
+ sf6_25fps.yuv
81
+ sf7_25fps.yuv
82
+ sf8_25fps.yuv
83
+ sf9_25fps.yuv
84
+ sf10_25fps.yuv
85
+ sf11_25fps.yuv
86
+ sf12_25fps.yuv
87
+ sf13_25fps.yuv
88
+ sf14_25fps.yuv
89
+ sf15_25fps.yuv
90
+ sf16_25fps.yuv
91
+ bs2_25fps.yuv
92
+ bs3_25fps.yuv
93
+ bs4_25fps.yuv
94
+ bs5_25fps.yuv
95
+ bs6_25fps.yuv
96
+ bs7_25fps.yuv
97
+ bs8_25fps.yuv
98
+ bs9_25fps.yuv
99
+ bs10_25fps.yuv
100
+ bs11_25fps.yuv
101
+ bs12_25fps.yuv
102
+ bs13_25fps.yuv
103
+ bs14_25fps.yuv
104
+ bs15_25fps.yuv
105
+ bs16_25fps.yuv
106
+ sh2_50fps.yuv
107
+ sh3_50fps.yuv
108
+ sh4_50fps.yuv
109
+ sh5_50fps.yuv
110
+ sh6_50fps.yuv
111
+ sh7_50fps.yuv
112
+ sh8_50fps.yuv
113
+ sh9_50fps.yuv
114
+ sh10_50fps.yuv
115
+ sh11_50fps.yuv
116
+ sh12_50fps.yuv
117
+ sh13_50fps.yuv
118
+ sh14_50fps.yuv
119
+ sh15_50fps.yuv
120
+ sh16_50fps.yuv
121
+ mc2_50fps.yuv
122
+ mc3_50fps.yuv
123
+ mc4_50fps.yuv
124
+ mc5_50fps.yuv
125
+ mc6_50fps.yuv
126
+ mc7_50fps.yuv
127
+ mc8_50fps.yuv
128
+ mc9_50fps.yuv
129
+ mc10_50fps.yuv
130
+ mc11_50fps.yuv
131
+ mc12_50fps.yuv
132
+ mc13_50fps.yuv
133
+ mc14_50fps.yuv
134
+ mc15_50fps.yuv
135
+ mc16_50fps.yuv
136
+ pr2_50fps.yuv
137
+ pr3_50fps.yuv
138
+ pr4_50fps.yuv
139
+ pr5_50fps.yuv
140
+ pr6_50fps.yuv
141
+ pr7_50fps.yuv
142
+ pr8_50fps.yuv
143
+ pr9_50fps.yuv
144
+ pr10_50fps.yuv
145
+ pr11_50fps.yuv
146
+ pr12_50fps.yuv
147
+ pr13_50fps.yuv
148
+ pr14_50fps.yuv
149
+ pr15_50fps.yuv
150
+ pr16_50fps.yuv
examplar_data_labels/LIVE_VQA/scores.txt ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 44.5104 12.2909
2
+ 70.1054 8.4630
3
+ 66.4280 10.9220
4
+ 75.1225 8.7056
5
+ 73.8803 5.7825
6
+ 63.2564 8.8315
7
+ 61.2726 10.6827
8
+ 40.5551 8.4040
9
+ 52.6111 9.8646
10
+ 60.2534 9.0097
11
+ 68.7186 9.3995
12
+ 42.9784 8.5050
13
+ 51.0530 8.0119
14
+ 55.7020 9.3731
15
+ 65.6457 10.8023
16
+ 64.9369 12.4744
17
+ 46.2446 9.8897
18
+ 54.3732 12.0351
19
+ 46.4907 10.9136
20
+ 68.1064 10.4983
21
+ 54.8101 13.2412
22
+ 54.6555 12.2369
23
+ 39.1978 11.7595
24
+ 43.6833 12.6685
25
+ 55.8563 15.2382
26
+ 63.5809 12.0636
27
+ 38.8828 11.0500
28
+ 45.6069 14.4528
29
+ 48.0089 13.7996
30
+ 47.5270 11.8475
31
+ 68.1431 12.0123
32
+ 63.5698 12.6835
33
+ 48.0196 11.2378
34
+ 51.4980 13.1559
35
+ 55.2291 11.2665
36
+ 62.3778 12.1601
37
+ 42.6909 9.5547
38
+ 37.8713 9.9518
39
+ 45.4363 11.9058
40
+ 53.6343 13.7169
41
+ 62.9934 10.0094
42
+ 31.4716 8.0896
43
+ 42.8568 11.4820
44
+ 52.0988 8.0925
45
+ 62.2062 10.8021
46
+ 71.2731 7.3171
47
+ 72.1356 8.2769
48
+ 64.6561 8.7193
49
+ 53.1125 10.2891
50
+ 73.4730 11.2189
51
+ 55.3531 10.7032
52
+ 52.4524 9.9872
53
+ 38.6726 8.7816
54
+ 47.7716 8.6263
55
+ 56.9119 9.3595
56
+ 63.7984 7.4827
57
+ 33.4734 8.8625
58
+ 42.5381 12.2394
59
+ 56.1328 10.0524
60
+ 65.7102 10.8513
61
+ 65.6522 11.8297
62
+ 61.3221 11.2218
63
+ 44.0305 12.3100
64
+ 41.4157 10.1887
65
+ 58.4534 10.2342
66
+ 44.2762 10.2308
67
+ 48.3834 10.8759
68
+ 40.7745 10.9440
69
+ 46.5633 9.3641
70
+ 52.3269 11.2327
71
+ 56.0811 9.9024
72
+ 36.5136 10.6661
73
+ 42.9632 9.5615
74
+ 49.1987 12.5682
75
+ 57.4200 10.8714
76
+ 54.9213 9.9593
77
+ 63.2756 7.0135
78
+ 56.8614 10.3063
79
+ 49.2987 7.9941
80
+ 59.3959 8.3076
81
+ 44.8094 11.1511
82
+ 39.1088 8.8315
83
+ 32.6002 7.5710
84
+ 44.0164 9.5158
85
+ 54.9423 8.8703
86
+ 57.1497 10.3586
87
+ 40.9999 10.2129
88
+ 44.6477 9.6876
89
+ 49.2215 8.2303
90
+ 53.7003 8.3839
91
+ 68.9412 13.2694
92
+ 52.9363 10.9429
93
+ 51.0109 11.6969
94
+ 55.9066 12.9653
95
+ 61.7965 8.9395
96
+ 45.9273 12.2075
97
+ 40.9576 10.0565
98
+ 31.9421 10.0953
99
+ 36.6396 10.2083
100
+ 38.6448 9.1071
101
+ 52.1844 10.8366
102
+ 32.7252 11.6010
103
+ 43.9984 9.6540
104
+ 50.5090 8.9686
105
+ 53.4364 11.3882
106
+ 81.1601 8.8839
107
+ 70.5494 7.0154
108
+ 54.9174 10.3442
109
+ 49.6350 9.8661
110
+ 55.5307 8.4316
111
+ 61.2837 9.8106
112
+ 46.2254 8.0034
113
+ 36.2440 8.7969
114
+ 40.8004 8.8023
115
+ 51.6153 10.4552
116
+ 66.3166 10.0913
117
+ 37.0212 7.4451
118
+ 44.0813 9.4971
119
+ 57.5757 6.4381
120
+ 62.0745 6.2390
121
+ 78.3431 9.9876
122
+ 69.2258 8.0969
123
+ 59.5299 9.8755
124
+ 57.8482 10.2606
125
+ 73.3075 9.0790
126
+ 58.5392 11.3208
127
+ 54.0963 10.0428
128
+ 47.3711 10.8012
129
+ 48.7705 7.7892
130
+ 57.6788 9.7494
131
+ 67.8232 7.4454
132
+ 30.9426 8.0339
133
+ 40.5326 9.8009
134
+ 52.5435 9.9240
135
+ 64.8173 9.5076
136
+ 61.3882 10.2155
137
+ 66.3322 11.1123
138
+ 45.4702 7.6892
139
+ 45.3150 8.6377
140
+ 55.3240 6.1770
141
+ 56.1730 8.7040
142
+ 44.6086 10.3585
143
+ 39.8067 8.2885
144
+ 53.7598 9.0671
145
+ 59.8921 10.6386
146
+ 77.2518 8.7931
147
+ 39.7105 9.5447
148
+ 46.8271 10.3513
149
+ 54.4239 11.2077
150
+ 61.8235 11.1164
examplar_data_labels/LIVE_VQC/labels.txt ADDED
@@ -0,0 +1,585 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Video/A001.mp4, 10.002367, 30.0, 80.232
2
+ Video/A002.mp4, 10.04, 29.97002997002997, 57.3005
3
+ Video/A003.mp4, 10.008333, 29.97002997002997, 78.1558
4
+ Video/A004.mp4, 9.966821999999999, 30.0, 59.3179
5
+ Video/A005.mp4, 10.033332999999999, 30.0, 47.1778
6
+ Video/A006.mp4, 10.030721999999999, 30.0, 84.6517
7
+ Video/A007.mp4, 10.008333, 29.97002997002997, 50.8675
8
+ Video/A008.mp4, 10.006667, 29.97002997002997, 46.8902
9
+ Video/A009.mp4, 10.006667, 29.97002997002997, 80.7925
10
+ Video/A010.mp4, 10.0306, 30.0, 31.2785
11
+ Video/A011.mp4, 10.006611, 30.0, 87.2287
12
+ Video/A012.mp4, 10.033389, 29.916666666666668, 47.7826
13
+ Video/A013.mp4, 10.04, 29.97002997002997, 83.4083
14
+ Video/A014.mp4, 10.006667, 29.97002997002997, 69.9652
15
+ Video/A015.mp4, 10.003910999999999, 30.0, 78.3957
16
+ Video/A016.mp4, 10.000855999999999, 29.916666666666668, 37.8421
17
+ Video/A017.mp4, 10.030332999999999, 30.0, 65.4778
18
+ Video/A018.mp4, 10.006667, 29.97002997002997, 76.0205
19
+ Video/A019.mp4, 10.04, 29.97002997002997, 70.843
20
+ Video/A020.mp4, 10.005455999999999, 30.0, 77.7258
21
+ Video/A021.mp4, 10.016733, 29.850746268656717, 37.6011
22
+ Video/A022.mp4, 10.031666999999999, 30.0, 38.674
23
+ Video/A023.mp4, 10.006667, 29.97002997002997, 69.0588
24
+ Video/A024.mp4, 10.006667, 29.97002997002997, 71.962
25
+ Video/A025.mp4, 10.024867, 30.0, 42.0856
26
+ Video/A026.mp4, 10.008066999999999, 30.0, 74.5397
27
+ Video/A027.mp4, 10.006667, 29.97002997002997, 56.3388
28
+ Video/A028.mp4, 10.023678, 29.916666666666668, 41.5304
29
+ Video/A029.mp4, 10.004999999999999, 29.97002997002997, 75.3861
30
+ Video/A030.mp4, 10.031478, 30.0, 81.5278
31
+ Video/A031.mp4, 10.023522, 30.0, 62.1042
32
+ Video/A032.mp4, 10.04, 29.97002997002997, 74.3189
33
+ Video/A033.mp4, 10.017267, 30.0, 71.7097
34
+ Video/A034.mp4, 10.04, 29.97002997002997, 80.4271
35
+ Video/A035.mp4, 10.04, 29.97002997002997, 85.0968
36
+ Video/A036.mp4, 10.028767, 30.0, 46.6608
37
+ Video/A037.mp4, 10.04, 29.97002997002997, 77.5858
38
+ Video/A038.mp4, 10.025063, 29.925, 51.9195
39
+ Video/A039.mp4, 10.025599999999999, 29.916666666666668, 53.006
40
+ Video/A040.mp4, 10.032022, 30.0, 50.3763
41
+ Video/A041.mp4, 10.004622, 30.0, 35.1412
42
+ Video/A042.mp4, 10.030278, 30.0, 76.8312
43
+ Video/A043.mp4, 10.031666999999999, 30.0, 38.1554
44
+ Video/A044.mp4, 10.033332999999999, 30.0, 53.4333
45
+ Video/A045.mp4, 10.001510999999999, 30.0, 81.9773
46
+ Video/A046.mp4, 10.030344, 30.0, 69.1902
47
+ Video/A047.mp4, 10.030356, 30.0, 58.4895
48
+ Video/A048.mp4, 10.006667, 29.97002997002997, 72.3462
49
+ Video/A049.mp4, 10.006667, 29.97002997002997, 72.2176
50
+ Video/A050.mp4, 10.002177999999999, 30.0, 48.0519
51
+ Video/A051.mp4, 10.008333, 29.97002997002997, 80.2807
52
+ Video/A052.mp4, 10.006667, 29.97002997002997, 44.8256
53
+ Video/A053.mp4, 10.030433, 30.0, 67.0884
54
+ Video/A054.mp4, 10.027743, 29.917, 76.1667
55
+ Video/A055.mp4, 10.034666999999999, 30.0, 53.8092
56
+ Video/A056.mp4, 10.0, 30.0, 52.3594
57
+ Video/A057.mp4, 10.004999999999999, 29.97002997002997, 66.4892
58
+ Video/A058.mp4, 10.000656, 29.850746268656717, 57.0
59
+ Video/A059.mp4, 10.011099999999999, 30.0, 74.2873
60
+ Video/A060.mp4, 10.008578, 30.0, 63.1839
61
+ Video/A061.mp4, 10.002078, 30.0, 56.4521
62
+ Video/A062.mp4, 10.026188999999999, 30.0, 47.5187
63
+ Video/A063.mp4, 10.031378, 30.0, 44.8971
64
+ Video/A064.mp4, 10.012844, 30.0, 49.8497
65
+ Video/A065.mp4, 10.030622, 30.0, 81.8222
66
+ Video/A066.mp4, 10.006667, 29.97002997002997, 74.7647
67
+ Video/A067.mp4, 10.04, 29.97002997002997, 77.4551
68
+ Video/A068.mp4, 10.005644, 30.0, 39.123
69
+ Video/A069.mp4, 10.0, 30.0, 49.4444
70
+ Video/A070.mp4, 10.006667, 29.97002997002997, 65.4608
71
+ Video/A071.mp4, 10.006667, 29.97002997002997, 74.1071
72
+ Video/A072.mp4, 10.08, 25.0, 20.4406
73
+ Video/A073.mp4, 10.04, 29.97002997002997, 91.1313
74
+ Video/A074.mp4, 10.004999999999999, 29.97002997002997, 57.3204
75
+ Video/A075.mp4, 10.030356, 30.0, 58.1867
76
+ Video/A076.mp4, 10.030332999999999, 30.0, 64.7835
77
+ Video/A077.mp4, 10.04, 29.97002997002997, 74.4483
78
+ Video/A078.mp4, 9.971478, 30.0, 38.8777
79
+ Video/A079.mp4, 9.999977999999999, 30.0, 81.0943
80
+ Video/A080.mp4, 10.015233, 30.0, 39.2086
81
+ Video/A081.mp4, 10.006667, 29.97002997002997, 75.7143
82
+ Video/A082.mp4, 10.008333, 29.97002997002997, 87.0859
83
+ Video/A083.mp4, 10.023052999999999, 29.931, 46.3077
84
+ Video/A084.mp4, 10.003357, 29.79, 55.3835
85
+ Video/A085.mp4, 10.030367, 30.0, 70.8486
86
+ Video/A086.mp4, 10.006667, 29.97002997002997, 35.2749
87
+ Video/A087.mp4, 10.019810999999999, 29.916666666666668, 56.6415
88
+ Video/A088.mp4, 10.027033, 29.416666666666668, 43.4892
89
+ Video/A089.mp4, 10.019771, 29.841, 46.9444
90
+ Video/A090.mp4, 10.006667, 29.97002997002997, 86.1709
91
+ Video/A091.mp4, 10.021115, 29.837, 46.2238
92
+ Video/A092.mp4, 10.026022, 29.833333333333332, 71.095
93
+ Video/A093.mp4, 10.005956, 30.0, 66.9632
94
+ Video/A094.mp4, 10.031467, 30.0, 52.3441
95
+ Video/A095.mp4, 10.006667, 29.97002997002997, 71.5491
96
+ Video/A096.mp4, 10.008333, 29.97002997002997, 66.5815
97
+ Video/A097.mp4, 10.041667, 29.97002997002997, 66.4593
98
+ Video/A098.mp4, 10.030356, 30.0, 85.8091
99
+ Video/A099.mp4, 10.030510999999999, 30.0, 46.1517
100
+ Video/A100.mp4, 10.024967, 30.0, 46.6273
101
+ Video/A101.mp4, 10.008333, 29.97002997002997, 69.3657
102
+ Video/A102.mp4, 10.026043999999999, 29.850746268656717, 62.987
103
+ Video/A103.mp4, 10.016499999999999, 29.850746268656717, 71.6203
104
+ Video/A104.mp4, 10.021355999999999, 30.0, 50.385
105
+ Video/A105.mp4, 10.006667, 29.97002997002997, 37.8824
106
+ Video/A106.mp4, 9.984587999999999, 29.833333333333332, 72.642
107
+ Video/A107.mp4, 10.006667, 29.97002997002997, 81.6325
108
+ Video/A108.mp4, 10.005556, 120.0, 35.8462
109
+ Video/A109.mp4, 10.003021, 29.791, 36.5086
110
+ Video/A110.mp4, 10.04, 29.97002997002997, 87.8585
111
+ Video/B001.mp4, 10.019677999999999, 11.083333333333334, 27.2485
112
+ Video/B002.mp4, 10.031467, 30.0, 73.4245
113
+ Video/B003.mp4, 10.031367, 30.0, 49.1111
114
+ Video/B004.mp4, 10.006321999999999, 30.0, 60.6719
115
+ Video/B005.mp4, 10.038333, 29.97002997002997, 47.2959
116
+ Video/B006.mp4, 9.998232999999999, 30.0, 44.4845
117
+ Video/B007.mp4, 9.998267, 30.0, 37.858
118
+ Video/B008.mp4, 10.030766999999999, 30.0, 75.0204
119
+ Video/B009.mp4, 9.998066999999999, 30.0, 39.4971
120
+ Video/B010.mp4, 10.002689, 29.916666666666668, 33.9518
121
+ Video/B011.mp4, 10.031644, 29.916666666666668, 36.0333
122
+ Video/B012.mp4, 10.024211, 29.916666666666668, 33.9801
123
+ Video/B013.mp4, 10.025466999999999, 29.916666666666668, 58.0795
124
+ Video/B014.mp4, 10.030267, 30.0, 62.3243
125
+ Video/B015.mp4, 10.033332999999999, 30.0, 56.9636
126
+ Video/B016.mp4, 10.04, 29.97002997002997, 52.1065
127
+ Video/B017.mp4, 10.01, 29.97002997002997, 65.3693
128
+ Video/B018.mp4, 10.001944, 30.0, 54.024
129
+ Video/B019.mp4, 10.021422, 30.0, 66.1658
130
+ Video/B020.mp4, 10.033332999999999, 30.0, 69.2781
131
+ Video/B021.mp4, 10.038333, 29.97002997002997, 84.9903
132
+ Video/B022.mp4, 10.038333, 29.97002997002997, 84.532
133
+ Video/B023.mp4, 10.038333, 29.97002997002997, 70.8629
134
+ Video/B024.mp4, 10.038333, 29.97002997002997, 67.6078
135
+ Video/B025.mp4, 10.038333, 29.97002997002997, 86.53
136
+ Video/B026.mp4, 10.04, 29.97002997002997, 72.0794
137
+ Video/B027.mp4, 10.04, 29.97002997002997, 89.2689
138
+ Video/B028.mp4, 10.04, 29.97002997002997, 78.8442
139
+ Video/B029.mp4, 10.04, 29.97002997002997, 90.8788
140
+ Video/B030.mp4, 10.04, 29.97002997002997, 23.7704
141
+ Video/B031.mp4, 10.04, 29.97002997002997, 69.9155
142
+ Video/B032.mp4, 10.04, 29.97002997002997, 62.1639
143
+ Video/B033.mp4, 10.04, 29.97002997002997, 67.869
144
+ Video/B034.mp4, 10.04, 29.97002997002997, 71.4773
145
+ Video/B035.mp4, 10.04, 29.97002997002997, 43.9005
146
+ Video/B036.mp4, 10.04, 29.97002997002997, 82.8776
147
+ Video/B037.mp4, 10.04, 29.97002997002997, 47.7402
148
+ Video/B038.mp4, 10.04, 29.97002997002997, 67.4641
149
+ Video/B039.mp4, 10.04, 29.97002997002997, 61.2217
150
+ Video/B040.mp4, 10.04, 29.97002997002997, 67.0729
151
+ Video/B041.mp4, 10.04, 29.97002997002997, 45.799
152
+ Video/B042.mp4, 10.04, 29.97002997002997, 62.8774
153
+ Video/B043.mp4, 10.04, 29.97002997002997, 38.3
154
+ Video/B044.mp4, 10.04, 29.97002997002997, 87.1329
155
+ Video/B045.mp4, 10.04, 29.97002997002997, 64.1244
156
+ Video/B046.mp4, 10.04, 29.97002997002997, 75.0229
157
+ Video/B047.mp4, 10.04, 29.97002997002997, 63.7714
158
+ Video/B048.mp4, 10.04, 29.97002997002997, 65.9427
159
+ Video/B049.mp4, 10.04, 29.97002997002997, 78.5928
160
+ Video/B050.mp4, 10.04, 29.97002997002997, 77.2019
161
+ Video/B051.mp4, 10.04, 29.97002997002997, 46.0611
162
+ Video/B052.mp4, 10.04, 29.97002997002997, 58.6099
163
+ Video/B053.mp4, 10.04, 29.97002997002997, 74.9505
164
+ Video/B054.mp4, 10.003333, 120.0, 27.2126
165
+ Video/B055.mp4, 10.04, 29.97002997002997, 64.193
166
+ Video/B056.mp4, 10.038333, 29.97002997002997, 62.6744
167
+ Video/B057.mp4, 10.04, 29.97002997002997, 59.9947
168
+ Video/B058.mp4, 10.04, 29.97002997002997, 58.367
169
+ Video/B059.mp4, 10.04, 29.97002997002997, 52.0
170
+ Video/B060.mp4, 10.04, 29.97002997002997, 59.4882
171
+ Video/B061.mp4, 10.04, 29.97002997002997, 63.7011
172
+ Video/B062.mp4, 10.04, 29.97002997002997, 56.8361
173
+ Video/B063.mp4, 10.038333, 29.97002997002997, 44.152
174
+ Video/B064.mp4, 10.04, 29.97002997002997, 60.0559
175
+ Video/B065.mp4, 10.04, 29.97002997002997, 58.7207
176
+ Video/B066.mp4, 10.04, 29.97002997002997, 57.5947
177
+ Video/B067.mp4, 10.0, 30.0, 70.6193
178
+ Video/B068.mp4, 10.0, 30.0, 76.6719
179
+ Video/B069.mp4, 10.0, 30.0, 68.4948
180
+ Video/B070.mp4, 10.0, 30.0, 56.4128
181
+ Video/B071.mp4, 10.0, 30.0, 81.3516
182
+ Video/B072.mp4, 10.0, 30.0, 82.573
183
+ Video/B073.mp4, 10.0, 30.0, 66.0347
184
+ Video/B074.mp4, 10.0, 30.0, 83.0838
185
+ Video/B075.mp4, 10.0, 30.0, 55.4301
186
+ Video/B076.mp4, 10.0, 30.0, 85.2071
187
+ Video/B077.mp4, 10.0, 30.0, 81.9227
188
+ Video/B078.mp4, 10.0, 30.0, 87.6489
189
+ Video/B079.mp4, 10.0, 30.0, 73.5323
190
+ Video/B080.mp4, 10.0, 30.0, 78.8136
191
+ Video/B081.mp4, 10.033332999999999, 30.0, 46.3822
192
+ Video/B082.mp4, 10.027854999999999, 29.916666666666668, 39.3422
193
+ Video/B083.mp4, 10.01001, 29.97, 47.1299
194
+ Video/B084.mp4, 10.0, 30.0, 27.3091
195
+ Video/B085.mp4, 10.033332999999999, 30.0, 78.5556
196
+ Video/B086.mp4, 10.033332999999999, 30.0, 80.3536
197
+ Video/B087.mp4, 10.0, 30.0, 72.178
198
+ Video/B088.mp4, 10.0, 30.0, 52.8177
199
+ Video/B089.mp4, 10.01, 29.97002997002997, 74.2079
200
+ Video/B090.mp4, 10.016807, 29.75, 26.0497
201
+ Video/B091.mp4, 10.022345999999999, 29.833333333333332, 71.7738
202
+ Video/B092.mp4, 10.033332999999999, 30.0, 75.2238
203
+ Video/B093.mp4, 10.016499999999999, 29.850746268656717, 70.0112
204
+ Video/B094.mp4, 10.033332999999999, 30.0, 72.7368
205
+ Video/B095.mp4, 10.033332999999999, 30.0, 72.7588
206
+ Video/B096.mp4, 10.018093, 29.846, 57.6856
207
+ Video/B097.mp4, 10.028329, 29.416666666666668, 60.4053
208
+ Video/B098.mp4, 10.01, 29.97002997002997, 80.199
209
+ Video/B099.mp4, 10.022345999999999, 29.833333333333332, 69.5399
210
+ Video/B100.mp4, 10.01, 29.97002997002997, 80.2398
211
+ Video/B101.mp4, 10.01, 29.97002997002997, 59.2431
212
+ Video/B102.mp4, 10.01, 29.97002997002997, 88.0219
213
+ Video/B103.mp4, 10.033332999999999, 30.0, 25.2484
214
+ Video/B104.mp4, 10.033332999999999, 30.0, 71.3464
215
+ Video/B105.mp4, 10.01, 29.97002997002997, 70.9
216
+ Video/B106.mp4, 10.01, 29.97002997002997, 57.0578
217
+ Video/B107.mp4, 10.01, 29.97002997002997, 76.7188
218
+ Video/B108.mp4, 10.0, 30.0, 85.7418
219
+ Video/B109.mp4, 10.01, 29.97002997002997, 79.3295
220
+ Video/B110.mp4, 10.01, 29.97002997002997, 77.6527
221
+ Video/B111.mp4, 10.0, 30.0, 57.7553
222
+ Video/B112.mp4, 10.033332999999999, 30.0, 56.3086
223
+ Video/B113.mp4, 10.033332999999999, 30.0, 73.7705
224
+ Video/B114.mp4, 10.01, 29.97002997002997, 85.8129
225
+ Video/B115.mp4, 10.033332999999999, 30.0, 39.1942
226
+ Video/B116.mp4, 10.016499999999999, 29.850746268656717, 65.6578
227
+ Video/B117.mp4, 10.0, 30.0, 70.7436
228
+ Video/B118.mp4, 10.041667, 24.0, 71.8769
229
+ Video/B119.mp4, 10.033332999999999, 30.0, 69.3909
230
+ Video/B120.mp4, 10.043367, 29.97002997002997, 82.9815
231
+ Video/B121.mp4, 10.033332999999999, 30.0, 54.3529
232
+ Video/B122.mp4, 10.01, 29.97002997002997, 59.2645
233
+ Video/B123.mp4, 10.033332999999999, 30.0, 76.6535
234
+ Video/B124.mp4, 10.033332999999999, 30.0, 55.3564
235
+ Video/B125.mp4, 10.033332999999999, 30.0, 52.9389
236
+ Video/B126.mp4, 10.033332999999999, 30.0, 73.5285
237
+ Video/B127.mp4, 10.01, 29.97002997002997, 58.382
238
+ Video/B128.mp4, 10.01, 29.97002997002997, 59.2601
239
+ Video/B129.mp4, 10.01, 29.97002997002997, 38.2353
240
+ Video/B130.mp4, 10.01001, 29.97, 52.7733
241
+ Video/B131.mp4, 10.01, 29.97002997002997, 37.062
242
+ Video/B132.mp4, 10.036451, 20.027, 42.6774
243
+ Video/B133.mp4, 10.01, 29.97002997002997, 81.2515
244
+ Video/B134.mp4, 10.033332999999999, 30.0, 69.35
245
+ Video/B135.mp4, 10.043367, 29.97002997002997, 79.775
246
+ Video/B136.mp4, 10.01, 29.97002997002997, 71.8718
247
+ Video/B137.mp4, 10.033332999999999, 30.0, 47.1258
248
+ Video/B138.mp4, 10.0, 30.0, 79.3155
249
+ Video/B139.mp4, 10.033332999999999, 30.0, 68.8052
250
+ Video/B140.mp4, 10.043367, 29.97002997002997, 68.4787
251
+ Video/B141.mp4, 10.01, 29.97002997002997, 86.3333
252
+ Video/B142.mp4, 10.033332999999999, 30.0, 64.9667
253
+ Video/B143.mp4, 10.033332999999999, 30.0, 64.0795
254
+ Video/B144.mp4, 10.01, 29.97002997002997, 46.1283
255
+ Video/B145.mp4, 10.043367, 29.97002997002997, 87.7442
256
+ Video/B146.mp4, 10.089385, 29.833333333333332, 62.9054
257
+ Video/B147.mp4, 10.022345999999999, 29.833333333333332, 73.0254
258
+ Video/B148.mp4, 10.027854999999999, 29.916666666666668, 27.5249
259
+ Video/B149.mp4, 10.033332999999999, 30.0, 51.5759
260
+ Video/B150.mp4, 10.016499999999999, 29.850746268656717, 58.6421
261
+ Video/B151.mp4, 10.027854999999999, 29.916666666666668, 71.6954
262
+ Video/B152.mp4, 10.075641, 23.919073800308954, 70.9887
263
+ Video/B153.mp4, 10.012811, 29.662, 62.9021
264
+ Video/B154.mp4, 10.031096, 29.90700104493208, 49.736
265
+ Video/B155.mp4, 10.01, 29.97002997002997, 43.4969
266
+ Video/B156.mp4, 10.0, 30.0, 69.2067
267
+ Video/B157.mp4, 10.0, 30.0, 75.9554
268
+ Video/B158.mp4, 10.043367, 29.97002997002997, 76.8757
269
+ Video/B159.mp4, 10.066666999999999, 30.0, 35.9529
270
+ Video/B160.mp4, 10.022122, 29.834, 34.3556
271
+ Video/B161.mp4, 9.566666999999999, 30.0, 71.6517
272
+ Video/B162.mp4, 10.050419999999999, 29.75, 51.3829
273
+ Video/B163.mp4, 10.005035999999999, 29.785, 45.5813
274
+ Video/B164.mp4, 10.043367, 29.97002997002997, 83.1737
275
+ Video/B165.mp4, 10.01, 29.97002997002997, 67.3736
276
+ Video/B166.mp4, 10.033332999999999, 30.0, 57.8166
277
+ Video/B167.mp4, 10.0, 30.0, 14.8662
278
+ Video/B168.mp4, 10.01, 29.97002997002997, 56.3245
279
+ Video/B169.mp4, 10.0, 30.0, 54.0769
280
+ Video/B170.mp4, 10.027854999999999, 29.916666666666668, 35.8132
281
+ Video/B171.mp4, 10.01, 29.97002997002997, 89.7447
282
+ Video/B172.mp4, 10.01, 29.97002997002997, 75.619
283
+ Video/B173.mp4, 10.0, 30.0, 66.0171
284
+ Video/B174.mp4, 10.0, 30.0, 17.9012
285
+ Video/B175.mp4, 10.027854999999999, 29.916666666666668, 38.6221
286
+ Video/B176.mp4, 10.01, 29.97002997002997, 68.9807
287
+ Video/B177.mp4, 10.033332999999999, 30.0, 40.7351
288
+ Video/B178.mp4, 10.033332999999999, 30.0, 61.3542
289
+ Video/B179.mp4, 10.033332999999999, 30.0, 56.5635
290
+ Video/B180.mp4, 10.01, 29.97002997002997, 72.9946
291
+ Video/B181.mp4, 10.055866, 29.833333333333332, 38.5263
292
+ Video/B182.mp4, 10.01, 29.97002997002997, 82.9684
293
+ Video/B183.mp4, 10.043367, 29.97002997002997, 85.2941
294
+ Video/B184.mp4, 10.033332999999999, 30.0, 57.0256
295
+ Video/B185.mp4, 10.0, 30.0, 65.4143
296
+ Video/B186.mp4, 10.01, 29.97002997002997, 64.5275
297
+ Video/B187.mp4, 10.01, 29.97002997002997, 90.199
298
+ Video/B188.mp4, 10.0, 30.0, 70.3474
299
+ Video/B189.mp4, 10.043367, 29.97002997002997, 49.8683
300
+ Video/B190.mp4, 10.01001, 29.97, 72.1381
301
+ Video/B191.mp4, 10.043367, 29.97002997002997, 82.8227
302
+ Video/B192.mp4, 10.066666999999999, 30.0, 55.6981
303
+ Video/B193.mp4, 10.01, 29.97002997002997, 70.5926
304
+ Video/B194.mp4, 10.0, 30.0, 55.5238
305
+ Video/B195.mp4, 10.027854999999999, 29.916666666666668, 70.2275
306
+ Video/B196.mp4, 10.033332999999999, 30.0, 50.7012
307
+ Video/B197.mp4, 10.004363999999999, 29.787, 36.4242
308
+ Video/B198.mp4, 10.043367, 29.97002997002997, 84.7438
309
+ Video/B199.mp4, 10.01001, 29.97, 76.1538
310
+ Video/B200.mp4, 10.01, 29.97002997002997, 36.2614
311
+ Video/B201.mp4, 10.0, 30.0, 49.4186
312
+ Video/B202.mp4, 10.01, 29.97002997002997, 78.8095
313
+ Video/B203.mp4, 10.01, 29.97002997002997, 78.539
314
+ Video/B204.mp4, 10.01, 29.97002997002997, 72.3743
315
+ Video/B205.mp4, 10.0, 30.0, 63.2195
316
+ Video/B206.mp4, 10.027854999999999, 29.916666666666668, 80.9784
317
+ Video/B207.mp4, 10.01, 29.97002997002997, 86.5106
318
+ Video/B208.mp4, 10.019802, 25.25, 74.1111
319
+ Video/B209.mp4, 10.033332999999999, 30.0, 74.7753
320
+ Video/B210.mp4, 10.01, 29.97002997002997, 29.4167
321
+ Video/B211.mp4, 10.01, 29.97002997002997, 80.995
322
+ Video/B212.mp4, 10.028329, 29.416666666666668, 71.6895
323
+ Video/B213.mp4, 10.01, 29.97002997002997, 85.9296
324
+ Video/B214.mp4, 10.01, 29.97002997002997, 45.2158
325
+ Video/B215.mp4, 10.0, 30.0, 49.1789
326
+ Video/B216.mp4, 10.033332999999999, 30.0, 85.8934
327
+ Video/B217.mp4, 10.01, 29.97002997002997, 37.7778
328
+ Video/B218.mp4, 10.043367, 29.97002997002997, 89.0446
329
+ Video/B219.mp4, 10.043367, 29.97002997002997, 78.0149
330
+ Video/B220.mp4, 10.01, 29.97002997002997, 70.117
331
+ Video/B221.mp4, 10.0, 30.0, 80.4144
332
+ Video/B222.mp4, 10.083333, 24.0, 34.2513
333
+ Video/B223.mp4, 10.01, 29.97002997002997, 60.6067
334
+ Video/B224.mp4, 10.01, 29.97002997002997, 81.0055
335
+ Video/B225.mp4, 10.033332999999999, 30.0, 80.4624
336
+ Video/B226.mp4, 10.022345999999999, 29.833333333333332, 62.5157
337
+ Video/B227.mp4, 10.01, 29.97002997002997, 90.2448
338
+ Video/B228.mp4, 10.033332999999999, 30.0, 71.1987
339
+ Video/B229.mp4, 10.01, 29.97002997002997, 73.8866
340
+ Video/B230.mp4, 10.03009, 29.91, 73.6337
341
+ Video/B231.mp4, 10.01, 29.97002997002997, 65.0412
342
+ Video/B232.mp4, 10.0, 29.8, 65.1549
343
+ Video/B233.mp4, 10.016499999999999, 29.850746268656717, 50.8201
344
+ Video/B234.mp4, 10.033332999999999, 30.0, 80.4612
345
+ Video/B235.mp4, 10.0, 30.0, 68.4103
346
+ Video/B236.mp4, 10.020107, 29.84, 53.3737
347
+ Video/B237.mp4, 10.033332999999999, 30.0, 70.1209
348
+ Video/B238.mp4, 10.016499999999999, 29.850746268656717, 60.5058
349
+ Video/B239.mp4, 10.0, 30.0, 67.5337
350
+ Video/B240.mp4, 10.086454999999999, 29.842, 67.1576
351
+ Video/B241.mp4, 10.027854999999999, 29.916666666666668, 66.459
352
+ Video/B242.mp4, 10.033332999999999, 30.0, 71.6724
353
+ Video/B243.mp4, 10.016499999999999, 29.850746268656717, 47.8021
354
+ Video/B244.mp4, 10.033332999999999, 30.0, 63.5397
355
+ Video/B245.mp4, 10.033332999999999, 30.0, 82.8444
356
+ Video/B246.mp4, 10.033332999999999, 30.0, 56.3813
357
+ Video/B247.mp4, 10.033332999999999, 30.0, 76.2626
358
+ Video/B248.mp4, 10.01, 29.97002997002997, 62.3922
359
+ Video/B249.mp4, 10.0, 30.0, 69.7869
360
+ Video/B250.mp4, 10.016499999999999, 29.850746268656717, 61.2128
361
+ Video/B251.mp4, 10.01, 29.97002997002997, 82.7214
362
+ Video/B252.mp4, 10.015691, 29.953, 73.2762
363
+ Video/B253.mp4, 10.033332999999999, 30.0, 72.1818
364
+ Video/B254.mp4, 10.033332999999999, 30.0, 54.569
365
+ Video/B255.mp4, 10.022345999999999, 29.833333333333332, 62.2424
366
+ Video/B256.mp4, 10.0, 30.0, 59.5503
367
+ Video/B257.mp4, 10.01, 29.97002997002997, 82.2251
368
+ Video/B258.mp4, 10.01, 29.97002997002997, 85.4278
369
+ Video/B259.mp4, 10.01, 29.97002997002997, 84.8989
370
+ Video/B260.mp4, 10.041667, 24.0, 6.22368
371
+ Video/B261.mp4, 10.033332999999999, 30.0, 78.0125
372
+ Video/B262.mp4, 10.01, 29.97002997002997, 51.9461
373
+ Video/B263.mp4, 10.033332999999999, 30.0, 64.4931
374
+ Video/B264.mp4, 10.033332999999999, 30.0, 78.114
375
+ Video/B265.mp4, 9.7, 30.0, 74.4638
376
+ Video/B266.mp4, 10.01, 29.97002997002997, 88.24
377
+ Video/B267.mp4, 10.01, 29.97002997002997, 62.3061
378
+ Video/B268.mp4, 10.027854999999999, 29.916666666666668, 25.0861
379
+ Video/B269.mp4, 10.022345999999999, 29.833333333333332, 66.5404
380
+ Video/B270.mp4, 10.043367, 29.97002997002997, 90.3281
381
+ Video/B271.mp4, 10.0, 30.0, 84.2073
382
+ Video/B272.mp4, 10.041667, 24.0, 49.6404
383
+ Video/B273.mp4, 10.066666999999999, 30.0, 53.4568
384
+ Video/B274.mp4, 10.033332999999999, 30.0, 54.9497
385
+ Video/B275.mp4, 10.043367, 29.97002997002997, 79.0355
386
+ Video/B276.mp4, 10.043367, 29.97002997002997, 83.5027
387
+ Video/B277.mp4, 10.01, 29.97002997002997, 28.6053
388
+ Video/B278.mp4, 10.01, 29.97002997002997, 70.3646
389
+ Video/B279.mp4, 10.033332999999999, 30.0, 67.9827
390
+ Video/B280.mp4, 10.0, 30.0, 69.5136
391
+ Video/B281.mp4, 10.01, 29.97002997002997, 50.288
392
+ Video/B282.mp4, 9.994429, 29.916666666666668, 70.8817
393
+ Video/B283.mp4, 10.027009, 29.62, 64.2865
394
+ Video/B284.mp4, 10.01, 29.97002997002997, 71.3248
395
+ Video/B285.mp4, 10.01, 29.97002997002997, 73.6837
396
+ Video/B286.mp4, 10.033332999999999, 30.0, 38.6012
397
+ Video/B287.mp4, 10.0, 30.0, 70.2771
398
+ Video/B288.mp4, 10.043367, 29.97002997002997, 82.4922
399
+ Video/B289.mp4, 10.01, 29.97002997002997, 84.8406
400
+ Video/B290.mp4, 10.033332999999999, 30.0, 62.4348
401
+ Video/B291.mp4, 10.0, 30.0, 71.1256
402
+ Video/B292.mp4, 10.033332999999999, 30.0, 81.9686
403
+ Video/B293.mp4, 10.066666999999999, 30.0, 78.2485
404
+ Video/B294.mp4, 10.0, 30.0, 66.3431
405
+ Video/B295.mp4, 10.01, 29.97002997002997, 54.7539
406
+ Video/B296.mp4, 10.021044, 29.937, 63.203
407
+ Video/B297.mp4, 10.033332999999999, 30.0, 61.4341
408
+ Video/B298.mp4, 10.033332999999999, 30.0, 42.3916
409
+ Video/B299.mp4, 10.033332999999999, 30.0, 72.21
410
+ Video/B300.mp4, 10.033332999999999, 30.0, 27.691
411
+ Video/B301.mp4, 10.0, 30.0, 49.1242
412
+ Video/B302.mp4, 10.01, 29.97002997002997, 72.25
413
+ Video/B303.mp4, 10.041667, 24.0, 18.8824
414
+ Video/B304.mp4, 10.043367, 29.97002997002997, 91.7312
415
+ Video/B305.mp4, 10.01, 29.97002997002997, 66.0914
416
+ Video/B306.mp4, 10.033332999999999, 30.0, 79.1263
417
+ Video/B307.mp4, 10.022345999999999, 29.833333333333332, 62.3543
418
+ Video/B308.mp4, 10.033332999999999, 30.0, 82.8098
419
+ Video/B309.mp4, 10.0, 30.0, 75.774
420
+ Video/B310.mp4, 10.033332999999999, 30.0, 85.1746
421
+ Video/B311.mp4, 10.033332999999999, 30.0, 88.8258
422
+ Video/B312.mp4, 10.033332999999999, 30.0, 61.1696
423
+ Video/B313.mp4, 10.033332999999999, 30.0, 89.2074
424
+ Video/B314.mp4, 10.01, 29.97002997002997, 94.2865
425
+ Video/B315.mp4, 10.033332999999999, 30.0, 85.593
426
+ Video/B316.mp4, 10.033332999999999, 30.0, 89.1236
427
+ Video/C001.mp4, 10.04, 30.0, 84.2191
428
+ Video/D001.mp4, 10.020211, 16.666666666666668, 32.7293
429
+ Video/E001.mp4, 10.0314, 30.0, 51.6061
430
+ Video/F001.mp4, 10.026644, 30.020013342228154, 69.3289
431
+ Video/F002.mp4, 10.007033, 30.0, 29.0101
432
+ Video/F003.mp4, 10.011688999999999, 30.0, 40.2153
433
+ Video/F004.mp4, 10.001610999999999, 30.0, 18.6163
434
+ Video/F005.mp4, 10.004222, 30.0, 56.6974
435
+ Video/F006.mp4, 10.01, 29.97002997002997, 59.6044
436
+ Video/F007.mp4, 10.008333, 29.97002997002997, 42.7267
437
+ Video/G001.mp4, 10.0, 30.0, 72.0884
438
+ Video/G002.mp4, 10.033332999999999, 30.0, 47.5172
439
+ Video/G003.mp4, 10.033332999999999, 30.0, 73.1593
440
+ Video/G004.mp4, 10.0, 30.0, 76.068
441
+ Video/G005.mp4, 10.033332999999999, 30.0, 85.2933
442
+ Video/G006.mp4, 10.0, 30.0, 72.881
443
+ Video/G007.mp4, 10.01, 29.97002997002997, 87.2169
444
+ Video/G008.mp4, 10.01, 29.97002997002997, 65.8113
445
+ Video/G009.mp4, 10.01, 29.97002997002997, 72.7931
446
+ Video/G010.mp4, 10.01, 29.97002997002997, 68.4129
447
+ Video/G011.mp4, 10.01, 29.97002997002997, 24.7697
448
+ Video/G012.mp4, 10.01, 29.97002997002997, 51.3923
449
+ Video/G013.mp4, 10.036451, 20.027, 73.2679
450
+ Video/G014.mp4, 10.01, 29.97002997002997, 56.096
451
+ Video/G015.mp4, 10.01, 29.97002997002997, 89.2513
452
+ Video/G016.mp4, 10.01, 29.97002997002997, 80.5959
453
+ Video/G017.mp4, 10.0, 30.0, 75.1832
454
+ Video/G018.mp4, 10.0, 30.0, 83.5848
455
+ Video/G019.mp4, 10.0, 30.0, 71.6599
456
+ Video/G020.mp4, 10.033332999999999, 30.0, 60.6335
457
+ Video/G021.mp4, 10.0, 30.0, 41.2703
458
+ Video/G022.mp4, 10.0, 30.0, 58.5622
459
+ Video/G023.mp4, 10.0, 30.0, 58.9406
460
+ Video/G024.mp4, 10.033332999999999, 30.0, 59.6292
461
+ Video/G025.mp4, 10.0, 30.0, 40.5028
462
+ Video/G026.mp4, 10.033332999999999, 30.0, 54.4118
463
+ Video/G027.mp4, 10.033332999999999, 30.0, 45.7368
464
+ Video/G028.mp4, 10.033332999999999, 30.0, 61.3883
465
+ Video/G029.mp4, 10.033332999999999, 30.0, 50.4835
466
+ Video/G030.mp4, 10.01, 29.97002997002997, 77.7895
467
+ Video/G031.mp4, 10.01, 29.97002997002997, 70.6823
468
+ Video/G032.mp4, 10.01, 29.97002997002997, 53.278
469
+ Video/G033.mp4, 10.01, 29.97002997002997, 60.6505
470
+ Video/G034.mp4, 10.01, 29.97002997002997, 82.4354
471
+ Video/G035.mp4, 10.01, 29.97002997002997, 72.2569
472
+ Video/G036.mp4, 10.01, 29.97002997002997, 78.6898
473
+ Video/G037.mp4, 10.01, 29.97002997002997, 78.2783
474
+ Video/G038.mp4, 10.01, 29.97002997002997, 72.125
475
+ Video/G039.mp4, 10.01, 29.97002997002997, 73.5789
476
+ Video/G040.mp4, 10.01, 29.97002997002997, 80.9306
477
+ Video/G041.mp4, 10.016499999999999, 29.850746268656717, 43.7181
478
+ Video/G042.mp4, 10.022345999999999, 29.833333333333332, 70.191
479
+ Video/G043.mp4, 10.022345999999999, 29.833333333333332, 77.4279
480
+ Video/G044.mp4, 10.036432999999999, 29.791459781529294, 74.9635
481
+ Video/G045.mp4, 10.016499999999999, 29.850746268656717, 62.4847
482
+ Video/G046.mp4, 10.016499999999999, 29.850746268656717, 78.8212
483
+ Video/G047.mp4, 10.066666999999999, 30.0, 70.1804
484
+ Video/G048.mp4, 10.066666999999999, 30.0, 78.9434
485
+ Video/G049.mp4, 9.994429, 29.916666666666668, 44.5829
486
+ Video/G050.mp4, 10.033332999999999, 30.0, 79.9171
487
+ Video/G051.mp4, 10.066666999999999, 30.0, 75.6131
488
+ Video/G052.mp4, 10.066666999999999, 30.0, 67.6825
489
+ Video/G053.mp4, 10.033332999999999, 30.0, 74.3118
490
+ Video/G054.mp4, 10.0, 30.0, 56.0
491
+ Video/G055.mp4, 10.066666999999999, 30.0, 63.478
492
+ Video/G056.mp4, 10.01, 29.97002997002997, 62.0506
493
+ Video/G057.mp4, 10.033332999999999, 30.0, 54.5354
494
+ Video/G058.mp4, 10.033332999999999, 30.0, 65.1421
495
+ Video/G059.mp4, 10.033332999999999, 30.0, 38.0054
496
+ Video/G060.mp4, 10.033332999999999, 30.0, 71.4348
497
+ Video/G061.mp4, 10.033332999999999, 30.0, 73.8883
498
+ Video/G062.mp4, 10.041667, 24.0, 49.3832
499
+ Video/G063.mp4, 10.115701999999999, 20.166666666666668, 29.0845
500
+ Video/G064.mp4, 10.016529, 20.166666666666668, 11.3333
501
+ Video/G065.mp4, 10.020619, 24.25, 68.6814
502
+ Video/G066.mp4, 10.097999999999999, 20.2020202020202, 11.9079
503
+ Video/G067.mp4, 10.022345999999999, 29.833333333333332, 57.2944
504
+ Video/G068.mp4, 10.016499999999999, 29.850746268656717, 59.4638
505
+ Video/G069.mp4, 10.022345999999999, 29.833333333333332, 66.1244
506
+ Video/G070.mp4, 10.066666999999999, 30.0, 79.5904
507
+ Video/G071.mp4, 10.0, 30.0, 48.3714
508
+ Video/G072.mp4, 10.01, 29.97002997002997, 47.7432
509
+ Video/G073.mp4, 10.0, 30.0, 18.6333
510
+ Video/G074.mp4, 10.01, 29.97002997002997, 83.1826
511
+ Video/G075.mp4, 10.049915, 29.851, 64.293
512
+ Video/G076.mp4, 10.020443, 29.839, 80.0361
513
+ Video/G077.mp4, 10.022345999999999, 29.833333333333332, 85.2383
514
+ Video/G078.mp4, 10.022345999999999, 29.833333333333332, 81.5829
515
+ Video/G079.mp4, 10.033332999999999, 30.0, 15.3576
516
+ Video/G080.mp4, 10.027854999999999, 29.916666666666668, 26.2575
517
+ Video/G081.mp4, 10.041667, 24.0, 46.7325
518
+ Video/G082.mp4, 10.043367, 29.97002997002997, 78.019
519
+ Video/G083.mp4, 10.01, 29.97002997002997, 78.7753
520
+ Video/G084.mp4, 10.0, 30.0, 59.3198
521
+ Video/G085.mp4, 10.01, 29.97002997002997, 76.4971
522
+ Video/G086.mp4, 10.01, 29.97002997002997, 41.3556
523
+ Video/G087.mp4, 10.01, 29.97002997002997, 71.1184
524
+ Video/G088.mp4, 10.01, 29.97002997002997, 56.0826
525
+ Video/G089.mp4, 10.041667, 24.0, 31.2638
526
+ Video/G090.mp4, 10.043367, 29.97002997002997, 75.4464
527
+ Video/G091.mp4, 10.01, 29.97002997002997, 84.2475
528
+ Video/G092.mp4, 10.01, 29.97002997002997, 67.8351
529
+ Video/G093.mp4, 10.01, 29.97002997002997, 66.1105
530
+ Video/G094.mp4, 10.01, 29.97002997002997, 66.865
531
+ Video/G095.mp4, 10.01, 29.97002997002997, 79.2192
532
+ Video/G096.mp4, 10.01, 29.97002997002997, 86.6243
533
+ Video/G097.mp4, 10.01, 29.97002997002997, 84.6976
534
+ Video/G098.mp4, 10.01, 29.97002997002997, 35.8556
535
+ Video/G099.mp4, 10.01, 29.97002997002997, 80.5632
536
+ Video/G100.mp4, 10.0, 30.0, 80.0573
537
+ Video/G101.mp4, 10.01, 29.97002997002997, 63.0345
538
+ Video/G102.mp4, 10.01, 29.97002997002997, 67.75
539
+ Video/G103.mp4, 10.01, 29.97002997002997, 81.3545
540
+ Video/G104.mp4, 10.01, 29.97002997002997, 86.1474
541
+ Video/G105.mp4, 10.033332999999999, 30.0, 45.0703
542
+ Video/G106.mp4, 10.043367, 29.97002997002997, 23.4251
543
+ Video/G107.mp4, 10.033332999999999, 30.0, 49.2717
544
+ Video/G108.mp4, 10.041667, 24.0, 58.5988
545
+ Video/G109.mp4, 10.0, 30.0, 79.2459
546
+ Video/G110.mp4, 10.0, 30.0, 83.5217
547
+ Video/G111.mp4, 10.033332999999999, 30.0, 55.0282
548
+ Video/G112.mp4, 10.1, 30.0, 74.3393
549
+ Video/G113.mp4, 10.043367, 29.97002997002997, 74.4923
550
+ Video/G114.mp4, 10.033332999999999, 30.0, 65.0532
551
+ Video/G115.mp4, 10.043367, 29.97002997002997, 77.8551
552
+ Video/G116.mp4, 10.043367, 29.97002997002997, 88.4254
553
+ Video/G117.mp4, 10.043367, 29.97002997002997, 71.7751
554
+ Video/G118.mp4, 10.043367, 29.97002997002997, 65.9
555
+ Video/G119.mp4, 10.043367, 29.97002997002997, 76.8795
556
+ Video/H001.mp4, 10.0, 30.0, 63.6974
557
+ Video/H002.mp4, 10.010714, 29.868, 69.1073
558
+ Video/I001.mp4, 10.01, 29.97002997002997, 72.3462
559
+ Video/I002.mp4, 10.022122, 29.834, 63.4225
560
+ Video/I003.mp4, 10.018032, 29.946, 69.5414
561
+ Video/J001.mp4, 10.011974, 30.064, 71.2778
562
+ Video/J002.mp4, 10.022197, 29.734, 62.36
563
+ Video/J003.mp4, 10.003988, 30.088, 60.4424
564
+ Video/J004.mp4, 10.028224999999999, 30.115, 62.645
565
+ Video/J005.mp4, 10.034182, 27.207, 24.1117
566
+ Video/K001.mp4, 10.0, 30.0, 33.0114
567
+ Video/K002.mp4, 10.016694, 29.95, 53.9415
568
+ Video/L001.mp4, 10.006667, 29.97002997002997, 76.5463
569
+ Video/M001.mp4, 10.0, 30.0, 36.9665
570
+ Video/M002.mp4, 10.0, 30.0, 69.3081
571
+ Video/M003.mp4, 10.0, 30.0, 21.1084
572
+ Video/N001.mp4, 10.004999999999999, 120.0, 67.445
573
+ Video/O001.mp4, 10.036667, 24.0, 27.8
574
+ Video/O002.mp4, 10.04, 120.0, 31.0169
575
+ Video/P001.mp4, 10.0, 30.0, 64.399
576
+ Video/P002.mp4, 10.0, 30.0, 60.1489
577
+ Video/P003.mp4, 10.0, 30.0, 61.749
578
+ Video/P004.mp4, 10.0, 30.0, 63.4194
579
+ Video/P005.mp4, 10.0, 30.0, 74.1421
580
+ Video/P006.mp4, 10.0, 30.0, 54.2271
581
+ Video/P007.mp4, 10.0, 30.0, 38.2139
582
+ Video/P008.mp4, 10.0, 30.0, 38.4545
583
+ Video/P009.mp4, 10.0, 30.0, 55.9663
584
+ Video/Q001.mp4, 10.026644, 30.020013342228154, 61.1466
585
+ Video/R001.mp4, 10.026644, 30.020013342228154, 72.4848
examplar_data_labels/LSVQ/labels.txt ADDED
The diff for this file is too large to render. See raw diff
 
examplar_data_labels/LSVQ/labels_1080p.txt ADDED
The diff for this file is too large to render. See raw diff
 
examplar_data_labels/LSVQ/labels_test.txt ADDED
The diff for this file is too large to render. See raw diff
 
examplar_data_labels/PIPAL/labels.txt ADDED
The diff for this file is too large to render. See raw diff
 
examplar_data_labels/PIPAL_NTIRE22/labels.txt ADDED
@@ -0,0 +1,1650 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ A0000_10_00.bmp, -1, -1, -1
2
+ A0000_10_01.bmp, -1, -1, -1
3
+ A0000_10_02.bmp, -1, -1, -1
4
+ A0000_10_03.bmp, -1, -1, -1
5
+ A0000_10_04.bmp, -1, -1, -1
6
+ A0000_10_05.bmp, -1, -1, -1
7
+ A0000_10_06.bmp, -1, -1, -1
8
+ A0000_10_07.bmp, -1, -1, -1
9
+ A0000_10_08.bmp, -1, -1, -1
10
+ A0000_10_10.bmp, -1, -1, -1
11
+ A0000_10_11.bmp, -1, -1, -1
12
+ A0000_10_12.bmp, -1, -1, -1
13
+ A0000_10_14.bmp, -1, -1, -1
14
+ A0000_10_15.bmp, -1, -1, -1
15
+ A0000_10_16.bmp, -1, -1, -1
16
+ A0000_10_17.bmp, -1, -1, -1
17
+ A0000_10_18.bmp, -1, -1, -1
18
+ A0000_10_19.bmp, -1, -1, -1
19
+ A0000_10_20.bmp, -1, -1, -1
20
+ A0000_10_21.bmp, -1, -1, -1
21
+ A0000_10_24.bmp, -1, -1, -1
22
+ A0000_10_26.bmp, -1, -1, -1
23
+ A0000_10_27.bmp, -1, -1, -1
24
+ A0000_10_28.bmp, -1, -1, -1
25
+ A0000_10_29.bmp, -1, -1, -1
26
+ A0000_10_30.bmp, -1, -1, -1
27
+ A0000_10_31.bmp, -1, -1, -1
28
+ A0000_10_32.bmp, -1, -1, -1
29
+ A0000_10_34.bmp, -1, -1, -1
30
+ A0000_10_35.bmp, -1, -1, -1
31
+ A0000_10_36.bmp, -1, -1, -1
32
+ A0000_10_37.bmp, -1, -1, -1
33
+ A0000_10_38.bmp, -1, -1, -1
34
+ A0000_10_39.bmp, -1, -1, -1
35
+ A0000_10_40.bmp, -1, -1, -1
36
+ A0000_10_41.bmp, -1, -1, -1
37
+ A0000_10_42.bmp, -1, -1, -1
38
+ A0000_10_43.bmp, -1, -1, -1
39
+ A0000_10_44.bmp, -1, -1, -1
40
+ A0000_10_46.bmp, -1, -1, -1
41
+ A0000_10_47.bmp, -1, -1, -1
42
+ A0000_10_48.bmp, -1, -1, -1
43
+ A0000_10_49.bmp, -1, -1, -1
44
+ A0000_10_50.bmp, -1, -1, -1
45
+ A0000_10_51.bmp, -1, -1, -1
46
+ A0000_10_52.bmp, -1, -1, -1
47
+ A0000_10_53.bmp, -1, -1, -1
48
+ A0000_10_54.bmp, -1, -1, -1
49
+ A0000_10_55.bmp, -1, -1, -1
50
+ A0000_10_56.bmp, -1, -1, -1
51
+ A0000_10_58.bmp, -1, -1, -1
52
+ A0000_10_61.bmp, -1, -1, -1
53
+ A0000_10_62.bmp, -1, -1, -1
54
+ A0000_10_63.bmp, -1, -1, -1
55
+ A0000_10_64.bmp, -1, -1, -1
56
+ A0000_10_65.bmp, -1, -1, -1
57
+ A0000_10_66.bmp, -1, -1, -1
58
+ A0000_10_67.bmp, -1, -1, -1
59
+ A0000_10_68.bmp, -1, -1, -1
60
+ A0000_10_69.bmp, -1, -1, -1
61
+ A0000_10_70.bmp, -1, -1, -1
62
+ A0000_10_71.bmp, -1, -1, -1
63
+ A0000_10_72.bmp, -1, -1, -1
64
+ A0000_10_73.bmp, -1, -1, -1
65
+ A0000_10_74.bmp, -1, -1, -1
66
+ A0000_10_75.bmp, -1, -1, -1
67
+ A0004_10_00.bmp, -1, -1, -1
68
+ A0004_10_01.bmp, -1, -1, -1
69
+ A0004_10_02.bmp, -1, -1, -1
70
+ A0004_10_03.bmp, -1, -1, -1
71
+ A0004_10_04.bmp, -1, -1, -1
72
+ A0004_10_05.bmp, -1, -1, -1
73
+ A0004_10_06.bmp, -1, -1, -1
74
+ A0004_10_07.bmp, -1, -1, -1
75
+ A0004_10_08.bmp, -1, -1, -1
76
+ A0004_10_10.bmp, -1, -1, -1
77
+ A0004_10_11.bmp, -1, -1, -1
78
+ A0004_10_12.bmp, -1, -1, -1
79
+ A0004_10_14.bmp, -1, -1, -1
80
+ A0004_10_15.bmp, -1, -1, -1
81
+ A0004_10_16.bmp, -1, -1, -1
82
+ A0004_10_17.bmp, -1, -1, -1
83
+ A0004_10_18.bmp, -1, -1, -1
84
+ A0004_10_19.bmp, -1, -1, -1
85
+ A0004_10_20.bmp, -1, -1, -1
86
+ A0004_10_21.bmp, -1, -1, -1
87
+ A0004_10_24.bmp, -1, -1, -1
88
+ A0004_10_26.bmp, -1, -1, -1
89
+ A0004_10_27.bmp, -1, -1, -1
90
+ A0004_10_28.bmp, -1, -1, -1
91
+ A0004_10_29.bmp, -1, -1, -1
92
+ A0004_10_30.bmp, -1, -1, -1
93
+ A0004_10_31.bmp, -1, -1, -1
94
+ A0004_10_32.bmp, -1, -1, -1
95
+ A0004_10_34.bmp, -1, -1, -1
96
+ A0004_10_35.bmp, -1, -1, -1
97
+ A0004_10_36.bmp, -1, -1, -1
98
+ A0004_10_37.bmp, -1, -1, -1
99
+ A0004_10_38.bmp, -1, -1, -1
100
+ A0004_10_39.bmp, -1, -1, -1
101
+ A0004_10_40.bmp, -1, -1, -1
102
+ A0004_10_41.bmp, -1, -1, -1
103
+ A0004_10_42.bmp, -1, -1, -1
104
+ A0004_10_43.bmp, -1, -1, -1
105
+ A0004_10_44.bmp, -1, -1, -1
106
+ A0004_10_46.bmp, -1, -1, -1
107
+ A0004_10_47.bmp, -1, -1, -1
108
+ A0004_10_48.bmp, -1, -1, -1
109
+ A0004_10_49.bmp, -1, -1, -1
110
+ A0004_10_50.bmp, -1, -1, -1
111
+ A0004_10_51.bmp, -1, -1, -1
112
+ A0004_10_52.bmp, -1, -1, -1
113
+ A0004_10_53.bmp, -1, -1, -1
114
+ A0004_10_54.bmp, -1, -1, -1
115
+ A0004_10_55.bmp, -1, -1, -1
116
+ A0004_10_56.bmp, -1, -1, -1
117
+ A0004_10_58.bmp, -1, -1, -1
118
+ A0004_10_61.bmp, -1, -1, -1
119
+ A0004_10_62.bmp, -1, -1, -1
120
+ A0004_10_63.bmp, -1, -1, -1
121
+ A0004_10_64.bmp, -1, -1, -1
122
+ A0004_10_65.bmp, -1, -1, -1
123
+ A0004_10_66.bmp, -1, -1, -1
124
+ A0004_10_67.bmp, -1, -1, -1
125
+ A0004_10_68.bmp, -1, -1, -1
126
+ A0004_10_69.bmp, -1, -1, -1
127
+ A0004_10_70.bmp, -1, -1, -1
128
+ A0004_10_71.bmp, -1, -1, -1
129
+ A0004_10_72.bmp, -1, -1, -1
130
+ A0004_10_73.bmp, -1, -1, -1
131
+ A0004_10_74.bmp, -1, -1, -1
132
+ A0004_10_75.bmp, -1, -1, -1
133
+ A0011_10_00.bmp, -1, -1, -1
134
+ A0011_10_01.bmp, -1, -1, -1
135
+ A0011_10_02.bmp, -1, -1, -1
136
+ A0011_10_03.bmp, -1, -1, -1
137
+ A0011_10_04.bmp, -1, -1, -1
138
+ A0011_10_05.bmp, -1, -1, -1
139
+ A0011_10_06.bmp, -1, -1, -1
140
+ A0011_10_07.bmp, -1, -1, -1
141
+ A0011_10_08.bmp, -1, -1, -1
142
+ A0011_10_10.bmp, -1, -1, -1
143
+ A0011_10_11.bmp, -1, -1, -1
144
+ A0011_10_12.bmp, -1, -1, -1
145
+ A0011_10_14.bmp, -1, -1, -1
146
+ A0011_10_15.bmp, -1, -1, -1
147
+ A0011_10_16.bmp, -1, -1, -1
148
+ A0011_10_17.bmp, -1, -1, -1
149
+ A0011_10_18.bmp, -1, -1, -1
150
+ A0011_10_19.bmp, -1, -1, -1
151
+ A0011_10_20.bmp, -1, -1, -1
152
+ A0011_10_21.bmp, -1, -1, -1
153
+ A0011_10_24.bmp, -1, -1, -1
154
+ A0011_10_26.bmp, -1, -1, -1
155
+ A0011_10_27.bmp, -1, -1, -1
156
+ A0011_10_28.bmp, -1, -1, -1
157
+ A0011_10_29.bmp, -1, -1, -1
158
+ A0011_10_30.bmp, -1, -1, -1
159
+ A0011_10_31.bmp, -1, -1, -1
160
+ A0011_10_32.bmp, -1, -1, -1
161
+ A0011_10_34.bmp, -1, -1, -1
162
+ A0011_10_35.bmp, -1, -1, -1
163
+ A0011_10_36.bmp, -1, -1, -1
164
+ A0011_10_37.bmp, -1, -1, -1
165
+ A0011_10_38.bmp, -1, -1, -1
166
+ A0011_10_39.bmp, -1, -1, -1
167
+ A0011_10_40.bmp, -1, -1, -1
168
+ A0011_10_41.bmp, -1, -1, -1
169
+ A0011_10_42.bmp, -1, -1, -1
170
+ A0011_10_43.bmp, -1, -1, -1
171
+ A0011_10_44.bmp, -1, -1, -1
172
+ A0011_10_46.bmp, -1, -1, -1
173
+ A0011_10_47.bmp, -1, -1, -1
174
+ A0011_10_48.bmp, -1, -1, -1
175
+ A0011_10_49.bmp, -1, -1, -1
176
+ A0011_10_50.bmp, -1, -1, -1
177
+ A0011_10_51.bmp, -1, -1, -1
178
+ A0011_10_52.bmp, -1, -1, -1
179
+ A0011_10_53.bmp, -1, -1, -1
180
+ A0011_10_54.bmp, -1, -1, -1
181
+ A0011_10_55.bmp, -1, -1, -1
182
+ A0011_10_56.bmp, -1, -1, -1
183
+ A0011_10_58.bmp, -1, -1, -1
184
+ A0011_10_61.bmp, -1, -1, -1
185
+ A0011_10_62.bmp, -1, -1, -1
186
+ A0011_10_63.bmp, -1, -1, -1
187
+ A0011_10_64.bmp, -1, -1, -1
188
+ A0011_10_65.bmp, -1, -1, -1
189
+ A0011_10_66.bmp, -1, -1, -1
190
+ A0011_10_67.bmp, -1, -1, -1
191
+ A0011_10_68.bmp, -1, -1, -1
192
+ A0011_10_69.bmp, -1, -1, -1
193
+ A0011_10_70.bmp, -1, -1, -1
194
+ A0011_10_71.bmp, -1, -1, -1
195
+ A0011_10_72.bmp, -1, -1, -1
196
+ A0011_10_73.bmp, -1, -1, -1
197
+ A0011_10_74.bmp, -1, -1, -1
198
+ A0011_10_75.bmp, -1, -1, -1
199
+ A0022_10_00.bmp, -1, -1, -1
200
+ A0022_10_01.bmp, -1, -1, -1
201
+ A0022_10_02.bmp, -1, -1, -1
202
+ A0022_10_03.bmp, -1, -1, -1
203
+ A0022_10_04.bmp, -1, -1, -1
204
+ A0022_10_05.bmp, -1, -1, -1
205
+ A0022_10_06.bmp, -1, -1, -1
206
+ A0022_10_07.bmp, -1, -1, -1
207
+ A0022_10_08.bmp, -1, -1, -1
208
+ A0022_10_10.bmp, -1, -1, -1
209
+ A0022_10_11.bmp, -1, -1, -1
210
+ A0022_10_12.bmp, -1, -1, -1
211
+ A0022_10_14.bmp, -1, -1, -1
212
+ A0022_10_15.bmp, -1, -1, -1
213
+ A0022_10_16.bmp, -1, -1, -1
214
+ A0022_10_17.bmp, -1, -1, -1
215
+ A0022_10_18.bmp, -1, -1, -1
216
+ A0022_10_19.bmp, -1, -1, -1
217
+ A0022_10_20.bmp, -1, -1, -1
218
+ A0022_10_21.bmp, -1, -1, -1
219
+ A0022_10_24.bmp, -1, -1, -1
220
+ A0022_10_26.bmp, -1, -1, -1
221
+ A0022_10_27.bmp, -1, -1, -1
222
+ A0022_10_28.bmp, -1, -1, -1
223
+ A0022_10_29.bmp, -1, -1, -1
224
+ A0022_10_30.bmp, -1, -1, -1
225
+ A0022_10_31.bmp, -1, -1, -1
226
+ A0022_10_32.bmp, -1, -1, -1
227
+ A0022_10_34.bmp, -1, -1, -1
228
+ A0022_10_35.bmp, -1, -1, -1
229
+ A0022_10_36.bmp, -1, -1, -1
230
+ A0022_10_37.bmp, -1, -1, -1
231
+ A0022_10_38.bmp, -1, -1, -1
232
+ A0022_10_39.bmp, -1, -1, -1
233
+ A0022_10_40.bmp, -1, -1, -1
234
+ A0022_10_41.bmp, -1, -1, -1
235
+ A0022_10_42.bmp, -1, -1, -1
236
+ A0022_10_43.bmp, -1, -1, -1
237
+ A0022_10_44.bmp, -1, -1, -1
238
+ A0022_10_46.bmp, -1, -1, -1
239
+ A0022_10_47.bmp, -1, -1, -1
240
+ A0022_10_48.bmp, -1, -1, -1
241
+ A0022_10_49.bmp, -1, -1, -1
242
+ A0022_10_50.bmp, -1, -1, -1
243
+ A0022_10_51.bmp, -1, -1, -1
244
+ A0022_10_52.bmp, -1, -1, -1
245
+ A0022_10_53.bmp, -1, -1, -1
246
+ A0022_10_54.bmp, -1, -1, -1
247
+ A0022_10_55.bmp, -1, -1, -1
248
+ A0022_10_56.bmp, -1, -1, -1
249
+ A0022_10_58.bmp, -1, -1, -1
250
+ A0022_10_61.bmp, -1, -1, -1
251
+ A0022_10_62.bmp, -1, -1, -1
252
+ A0022_10_63.bmp, -1, -1, -1
253
+ A0022_10_64.bmp, -1, -1, -1
254
+ A0022_10_65.bmp, -1, -1, -1
255
+ A0022_10_66.bmp, -1, -1, -1
256
+ A0022_10_67.bmp, -1, -1, -1
257
+ A0022_10_68.bmp, -1, -1, -1
258
+ A0022_10_69.bmp, -1, -1, -1
259
+ A0022_10_70.bmp, -1, -1, -1
260
+ A0022_10_71.bmp, -1, -1, -1
261
+ A0022_10_72.bmp, -1, -1, -1
262
+ A0022_10_73.bmp, -1, -1, -1
263
+ A0022_10_74.bmp, -1, -1, -1
264
+ A0022_10_75.bmp, -1, -1, -1
265
+ A0033_10_00.bmp, -1, -1, -1
266
+ A0033_10_01.bmp, -1, -1, -1
267
+ A0033_10_02.bmp, -1, -1, -1
268
+ A0033_10_03.bmp, -1, -1, -1
269
+ A0033_10_04.bmp, -1, -1, -1
270
+ A0033_10_05.bmp, -1, -1, -1
271
+ A0033_10_06.bmp, -1, -1, -1
272
+ A0033_10_07.bmp, -1, -1, -1
273
+ A0033_10_08.bmp, -1, -1, -1
274
+ A0033_10_10.bmp, -1, -1, -1
275
+ A0033_10_11.bmp, -1, -1, -1
276
+ A0033_10_12.bmp, -1, -1, -1
277
+ A0033_10_14.bmp, -1, -1, -1
278
+ A0033_10_15.bmp, -1, -1, -1
279
+ A0033_10_16.bmp, -1, -1, -1
280
+ A0033_10_17.bmp, -1, -1, -1
281
+ A0033_10_18.bmp, -1, -1, -1
282
+ A0033_10_19.bmp, -1, -1, -1
283
+ A0033_10_20.bmp, -1, -1, -1
284
+ A0033_10_21.bmp, -1, -1, -1
285
+ A0033_10_24.bmp, -1, -1, -1
286
+ A0033_10_26.bmp, -1, -1, -1
287
+ A0033_10_27.bmp, -1, -1, -1
288
+ A0033_10_28.bmp, -1, -1, -1
289
+ A0033_10_29.bmp, -1, -1, -1
290
+ A0033_10_30.bmp, -1, -1, -1
291
+ A0033_10_31.bmp, -1, -1, -1
292
+ A0033_10_32.bmp, -1, -1, -1
293
+ A0033_10_34.bmp, -1, -1, -1
294
+ A0033_10_35.bmp, -1, -1, -1
295
+ A0033_10_36.bmp, -1, -1, -1
296
+ A0033_10_37.bmp, -1, -1, -1
297
+ A0033_10_38.bmp, -1, -1, -1
298
+ A0033_10_39.bmp, -1, -1, -1
299
+ A0033_10_40.bmp, -1, -1, -1
300
+ A0033_10_41.bmp, -1, -1, -1
301
+ A0033_10_42.bmp, -1, -1, -1
302
+ A0033_10_43.bmp, -1, -1, -1
303
+ A0033_10_44.bmp, -1, -1, -1
304
+ A0033_10_46.bmp, -1, -1, -1
305
+ A0033_10_47.bmp, -1, -1, -1
306
+ A0033_10_48.bmp, -1, -1, -1
307
+ A0033_10_49.bmp, -1, -1, -1
308
+ A0033_10_50.bmp, -1, -1, -1
309
+ A0033_10_51.bmp, -1, -1, -1
310
+ A0033_10_52.bmp, -1, -1, -1
311
+ A0033_10_53.bmp, -1, -1, -1
312
+ A0033_10_54.bmp, -1, -1, -1
313
+ A0033_10_55.bmp, -1, -1, -1
314
+ A0033_10_56.bmp, -1, -1, -1
315
+ A0033_10_58.bmp, -1, -1, -1
316
+ A0033_10_61.bmp, -1, -1, -1
317
+ A0033_10_62.bmp, -1, -1, -1
318
+ A0033_10_63.bmp, -1, -1, -1
319
+ A0033_10_64.bmp, -1, -1, -1
320
+ A0033_10_65.bmp, -1, -1, -1
321
+ A0033_10_66.bmp, -1, -1, -1
322
+ A0033_10_67.bmp, -1, -1, -1
323
+ A0033_10_68.bmp, -1, -1, -1
324
+ A0033_10_69.bmp, -1, -1, -1
325
+ A0033_10_70.bmp, -1, -1, -1
326
+ A0033_10_71.bmp, -1, -1, -1
327
+ A0033_10_72.bmp, -1, -1, -1
328
+ A0033_10_73.bmp, -1, -1, -1
329
+ A0033_10_74.bmp, -1, -1, -1
330
+ A0033_10_75.bmp, -1, -1, -1
331
+ A0036_10_00.bmp, -1, -1, -1
332
+ A0036_10_01.bmp, -1, -1, -1
333
+ A0036_10_02.bmp, -1, -1, -1
334
+ A0036_10_03.bmp, -1, -1, -1
335
+ A0036_10_04.bmp, -1, -1, -1
336
+ A0036_10_05.bmp, -1, -1, -1
337
+ A0036_10_06.bmp, -1, -1, -1
338
+ A0036_10_07.bmp, -1, -1, -1
339
+ A0036_10_08.bmp, -1, -1, -1
340
+ A0036_10_10.bmp, -1, -1, -1
341
+ A0036_10_11.bmp, -1, -1, -1
342
+ A0036_10_12.bmp, -1, -1, -1
343
+ A0036_10_14.bmp, -1, -1, -1
344
+ A0036_10_15.bmp, -1, -1, -1
345
+ A0036_10_16.bmp, -1, -1, -1
346
+ A0036_10_17.bmp, -1, -1, -1
347
+ A0036_10_18.bmp, -1, -1, -1
348
+ A0036_10_19.bmp, -1, -1, -1
349
+ A0036_10_20.bmp, -1, -1, -1
350
+ A0036_10_21.bmp, -1, -1, -1
351
+ A0036_10_24.bmp, -1, -1, -1
352
+ A0036_10_26.bmp, -1, -1, -1
353
+ A0036_10_27.bmp, -1, -1, -1
354
+ A0036_10_28.bmp, -1, -1, -1
355
+ A0036_10_29.bmp, -1, -1, -1
356
+ A0036_10_30.bmp, -1, -1, -1
357
+ A0036_10_31.bmp, -1, -1, -1
358
+ A0036_10_32.bmp, -1, -1, -1
359
+ A0036_10_34.bmp, -1, -1, -1
360
+ A0036_10_35.bmp, -1, -1, -1
361
+ A0036_10_36.bmp, -1, -1, -1
362
+ A0036_10_37.bmp, -1, -1, -1
363
+ A0036_10_38.bmp, -1, -1, -1
364
+ A0036_10_39.bmp, -1, -1, -1
365
+ A0036_10_40.bmp, -1, -1, -1
366
+ A0036_10_41.bmp, -1, -1, -1
367
+ A0036_10_42.bmp, -1, -1, -1
368
+ A0036_10_43.bmp, -1, -1, -1
369
+ A0036_10_44.bmp, -1, -1, -1
370
+ A0036_10_46.bmp, -1, -1, -1
371
+ A0036_10_47.bmp, -1, -1, -1
372
+ A0036_10_48.bmp, -1, -1, -1
373
+ A0036_10_49.bmp, -1, -1, -1
374
+ A0036_10_50.bmp, -1, -1, -1
375
+ A0036_10_51.bmp, -1, -1, -1
376
+ A0036_10_52.bmp, -1, -1, -1
377
+ A0036_10_53.bmp, -1, -1, -1
378
+ A0036_10_54.bmp, -1, -1, -1
379
+ A0036_10_55.bmp, -1, -1, -1
380
+ A0036_10_56.bmp, -1, -1, -1
381
+ A0036_10_58.bmp, -1, -1, -1
382
+ A0036_10_61.bmp, -1, -1, -1
383
+ A0036_10_62.bmp, -1, -1, -1
384
+ A0036_10_63.bmp, -1, -1, -1
385
+ A0036_10_64.bmp, -1, -1, -1
386
+ A0036_10_65.bmp, -1, -1, -1
387
+ A0036_10_66.bmp, -1, -1, -1
388
+ A0036_10_67.bmp, -1, -1, -1
389
+ A0036_10_68.bmp, -1, -1, -1
390
+ A0036_10_69.bmp, -1, -1, -1
391
+ A0036_10_70.bmp, -1, -1, -1
392
+ A0036_10_71.bmp, -1, -1, -1
393
+ A0036_10_72.bmp, -1, -1, -1
394
+ A0036_10_73.bmp, -1, -1, -1
395
+ A0036_10_74.bmp, -1, -1, -1
396
+ A0036_10_75.bmp, -1, -1, -1
397
+ A0071_10_00.bmp, -1, -1, -1
398
+ A0071_10_01.bmp, -1, -1, -1
399
+ A0071_10_02.bmp, -1, -1, -1
400
+ A0071_10_03.bmp, -1, -1, -1
401
+ A0071_10_04.bmp, -1, -1, -1
402
+ A0071_10_05.bmp, -1, -1, -1
403
+ A0071_10_06.bmp, -1, -1, -1
404
+ A0071_10_07.bmp, -1, -1, -1
405
+ A0071_10_08.bmp, -1, -1, -1
406
+ A0071_10_10.bmp, -1, -1, -1
407
+ A0071_10_11.bmp, -1, -1, -1
408
+ A0071_10_12.bmp, -1, -1, -1
409
+ A0071_10_14.bmp, -1, -1, -1
410
+ A0071_10_15.bmp, -1, -1, -1
411
+ A0071_10_16.bmp, -1, -1, -1
412
+ A0071_10_17.bmp, -1, -1, -1
413
+ A0071_10_18.bmp, -1, -1, -1
414
+ A0071_10_19.bmp, -1, -1, -1
415
+ A0071_10_20.bmp, -1, -1, -1
416
+ A0071_10_21.bmp, -1, -1, -1
417
+ A0071_10_24.bmp, -1, -1, -1
418
+ A0071_10_26.bmp, -1, -1, -1
419
+ A0071_10_27.bmp, -1, -1, -1
420
+ A0071_10_28.bmp, -1, -1, -1
421
+ A0071_10_29.bmp, -1, -1, -1
422
+ A0071_10_30.bmp, -1, -1, -1
423
+ A0071_10_31.bmp, -1, -1, -1
424
+ A0071_10_32.bmp, -1, -1, -1
425
+ A0071_10_34.bmp, -1, -1, -1
426
+ A0071_10_35.bmp, -1, -1, -1
427
+ A0071_10_36.bmp, -1, -1, -1
428
+ A0071_10_37.bmp, -1, -1, -1
429
+ A0071_10_38.bmp, -1, -1, -1
430
+ A0071_10_39.bmp, -1, -1, -1
431
+ A0071_10_40.bmp, -1, -1, -1
432
+ A0071_10_41.bmp, -1, -1, -1
433
+ A0071_10_42.bmp, -1, -1, -1
434
+ A0071_10_43.bmp, -1, -1, -1
435
+ A0071_10_44.bmp, -1, -1, -1
436
+ A0071_10_46.bmp, -1, -1, -1
437
+ A0071_10_47.bmp, -1, -1, -1
438
+ A0071_10_48.bmp, -1, -1, -1
439
+ A0071_10_49.bmp, -1, -1, -1
440
+ A0071_10_50.bmp, -1, -1, -1
441
+ A0071_10_51.bmp, -1, -1, -1
442
+ A0071_10_52.bmp, -1, -1, -1
443
+ A0071_10_53.bmp, -1, -1, -1
444
+ A0071_10_54.bmp, -1, -1, -1
445
+ A0071_10_55.bmp, -1, -1, -1
446
+ A0071_10_56.bmp, -1, -1, -1
447
+ A0071_10_58.bmp, -1, -1, -1
448
+ A0071_10_61.bmp, -1, -1, -1
449
+ A0071_10_62.bmp, -1, -1, -1
450
+ A0071_10_63.bmp, -1, -1, -1
451
+ A0071_10_64.bmp, -1, -1, -1
452
+ A0071_10_65.bmp, -1, -1, -1
453
+ A0071_10_66.bmp, -1, -1, -1
454
+ A0071_10_67.bmp, -1, -1, -1
455
+ A0071_10_68.bmp, -1, -1, -1
456
+ A0071_10_69.bmp, -1, -1, -1
457
+ A0071_10_70.bmp, -1, -1, -1
458
+ A0071_10_71.bmp, -1, -1, -1
459
+ A0071_10_72.bmp, -1, -1, -1
460
+ A0071_10_73.bmp, -1, -1, -1
461
+ A0071_10_74.bmp, -1, -1, -1
462
+ A0071_10_75.bmp, -1, -1, -1
463
+ A0089_10_00.bmp, -1, -1, -1
464
+ A0089_10_01.bmp, -1, -1, -1
465
+ A0089_10_02.bmp, -1, -1, -1
466
+ A0089_10_03.bmp, -1, -1, -1
467
+ A0089_10_04.bmp, -1, -1, -1
468
+ A0089_10_05.bmp, -1, -1, -1
469
+ A0089_10_06.bmp, -1, -1, -1
470
+ A0089_10_07.bmp, -1, -1, -1
471
+ A0089_10_08.bmp, -1, -1, -1
472
+ A0089_10_10.bmp, -1, -1, -1
473
+ A0089_10_11.bmp, -1, -1, -1
474
+ A0089_10_12.bmp, -1, -1, -1
475
+ A0089_10_14.bmp, -1, -1, -1
476
+ A0089_10_15.bmp, -1, -1, -1
477
+ A0089_10_16.bmp, -1, -1, -1
478
+ A0089_10_17.bmp, -1, -1, -1
479
+ A0089_10_18.bmp, -1, -1, -1
480
+ A0089_10_19.bmp, -1, -1, -1
481
+ A0089_10_20.bmp, -1, -1, -1
482
+ A0089_10_21.bmp, -1, -1, -1
483
+ A0089_10_24.bmp, -1, -1, -1
484
+ A0089_10_26.bmp, -1, -1, -1
485
+ A0089_10_27.bmp, -1, -1, -1
486
+ A0089_10_28.bmp, -1, -1, -1
487
+ A0089_10_29.bmp, -1, -1, -1
488
+ A0089_10_30.bmp, -1, -1, -1
489
+ A0089_10_31.bmp, -1, -1, -1
490
+ A0089_10_32.bmp, -1, -1, -1
491
+ A0089_10_34.bmp, -1, -1, -1
492
+ A0089_10_35.bmp, -1, -1, -1
493
+ A0089_10_36.bmp, -1, -1, -1
494
+ A0089_10_37.bmp, -1, -1, -1
495
+ A0089_10_38.bmp, -1, -1, -1
496
+ A0089_10_39.bmp, -1, -1, -1
497
+ A0089_10_40.bmp, -1, -1, -1
498
+ A0089_10_41.bmp, -1, -1, -1
499
+ A0089_10_42.bmp, -1, -1, -1
500
+ A0089_10_43.bmp, -1, -1, -1
501
+ A0089_10_44.bmp, -1, -1, -1
502
+ A0089_10_46.bmp, -1, -1, -1
503
+ A0089_10_47.bmp, -1, -1, -1
504
+ A0089_10_48.bmp, -1, -1, -1
505
+ A0089_10_49.bmp, -1, -1, -1
506
+ A0089_10_50.bmp, -1, -1, -1
507
+ A0089_10_51.bmp, -1, -1, -1
508
+ A0089_10_52.bmp, -1, -1, -1
509
+ A0089_10_53.bmp, -1, -1, -1
510
+ A0089_10_54.bmp, -1, -1, -1
511
+ A0089_10_55.bmp, -1, -1, -1
512
+ A0089_10_56.bmp, -1, -1, -1
513
+ A0089_10_58.bmp, -1, -1, -1
514
+ A0089_10_61.bmp, -1, -1, -1
515
+ A0089_10_62.bmp, -1, -1, -1
516
+ A0089_10_63.bmp, -1, -1, -1
517
+ A0089_10_64.bmp, -1, -1, -1
518
+ A0089_10_65.bmp, -1, -1, -1
519
+ A0089_10_66.bmp, -1, -1, -1
520
+ A0089_10_67.bmp, -1, -1, -1
521
+ A0089_10_68.bmp, -1, -1, -1
522
+ A0089_10_69.bmp, -1, -1, -1
523
+ A0089_10_70.bmp, -1, -1, -1
524
+ A0089_10_71.bmp, -1, -1, -1
525
+ A0089_10_72.bmp, -1, -1, -1
526
+ A0089_10_73.bmp, -1, -1, -1
527
+ A0089_10_74.bmp, -1, -1, -1
528
+ A0089_10_75.bmp, -1, -1, -1
529
+ A0095_10_00.bmp, -1, -1, -1
530
+ A0095_10_01.bmp, -1, -1, -1
531
+ A0095_10_02.bmp, -1, -1, -1
532
+ A0095_10_03.bmp, -1, -1, -1
533
+ A0095_10_04.bmp, -1, -1, -1
534
+ A0095_10_05.bmp, -1, -1, -1
535
+ A0095_10_06.bmp, -1, -1, -1
536
+ A0095_10_07.bmp, -1, -1, -1
537
+ A0095_10_08.bmp, -1, -1, -1
538
+ A0095_10_10.bmp, -1, -1, -1
539
+ A0095_10_11.bmp, -1, -1, -1
540
+ A0095_10_12.bmp, -1, -1, -1
541
+ A0095_10_14.bmp, -1, -1, -1
542
+ A0095_10_15.bmp, -1, -1, -1
543
+ A0095_10_16.bmp, -1, -1, -1
544
+ A0095_10_17.bmp, -1, -1, -1
545
+ A0095_10_18.bmp, -1, -1, -1
546
+ A0095_10_19.bmp, -1, -1, -1
547
+ A0095_10_20.bmp, -1, -1, -1
548
+ A0095_10_21.bmp, -1, -1, -1
549
+ A0095_10_24.bmp, -1, -1, -1
550
+ A0095_10_26.bmp, -1, -1, -1
551
+ A0095_10_27.bmp, -1, -1, -1
552
+ A0095_10_28.bmp, -1, -1, -1
553
+ A0095_10_29.bmp, -1, -1, -1
554
+ A0095_10_30.bmp, -1, -1, -1
555
+ A0095_10_31.bmp, -1, -1, -1
556
+ A0095_10_32.bmp, -1, -1, -1
557
+ A0095_10_34.bmp, -1, -1, -1
558
+ A0095_10_35.bmp, -1, -1, -1
559
+ A0095_10_36.bmp, -1, -1, -1
560
+ A0095_10_37.bmp, -1, -1, -1
561
+ A0095_10_38.bmp, -1, -1, -1
562
+ A0095_10_39.bmp, -1, -1, -1
563
+ A0095_10_40.bmp, -1, -1, -1
564
+ A0095_10_41.bmp, -1, -1, -1
565
+ A0095_10_42.bmp, -1, -1, -1
566
+ A0095_10_43.bmp, -1, -1, -1
567
+ A0095_10_44.bmp, -1, -1, -1
568
+ A0095_10_46.bmp, -1, -1, -1
569
+ A0095_10_47.bmp, -1, -1, -1
570
+ A0095_10_48.bmp, -1, -1, -1
571
+ A0095_10_49.bmp, -1, -1, -1
572
+ A0095_10_50.bmp, -1, -1, -1
573
+ A0095_10_51.bmp, -1, -1, -1
574
+ A0095_10_52.bmp, -1, -1, -1
575
+ A0095_10_53.bmp, -1, -1, -1
576
+ A0095_10_54.bmp, -1, -1, -1
577
+ A0095_10_55.bmp, -1, -1, -1
578
+ A0095_10_56.bmp, -1, -1, -1
579
+ A0095_10_58.bmp, -1, -1, -1
580
+ A0095_10_61.bmp, -1, -1, -1
581
+ A0095_10_62.bmp, -1, -1, -1
582
+ A0095_10_63.bmp, -1, -1, -1
583
+ A0095_10_64.bmp, -1, -1, -1
584
+ A0095_10_65.bmp, -1, -1, -1
585
+ A0095_10_66.bmp, -1, -1, -1
586
+ A0095_10_67.bmp, -1, -1, -1
587
+ A0095_10_68.bmp, -1, -1, -1
588
+ A0095_10_69.bmp, -1, -1, -1
589
+ A0095_10_70.bmp, -1, -1, -1
590
+ A0095_10_71.bmp, -1, -1, -1
591
+ A0095_10_72.bmp, -1, -1, -1
592
+ A0095_10_73.bmp, -1, -1, -1
593
+ A0095_10_74.bmp, -1, -1, -1
594
+ A0095_10_75.bmp, -1, -1, -1
595
+ A0111_10_00.bmp, -1, -1, -1
596
+ A0111_10_01.bmp, -1, -1, -1
597
+ A0111_10_02.bmp, -1, -1, -1
598
+ A0111_10_03.bmp, -1, -1, -1
599
+ A0111_10_04.bmp, -1, -1, -1
600
+ A0111_10_05.bmp, -1, -1, -1
601
+ A0111_10_06.bmp, -1, -1, -1
602
+ A0111_10_07.bmp, -1, -1, -1
603
+ A0111_10_08.bmp, -1, -1, -1
604
+ A0111_10_10.bmp, -1, -1, -1
605
+ A0111_10_11.bmp, -1, -1, -1
606
+ A0111_10_12.bmp, -1, -1, -1
607
+ A0111_10_14.bmp, -1, -1, -1
608
+ A0111_10_15.bmp, -1, -1, -1
609
+ A0111_10_16.bmp, -1, -1, -1
610
+ A0111_10_17.bmp, -1, -1, -1
611
+ A0111_10_18.bmp, -1, -1, -1
612
+ A0111_10_19.bmp, -1, -1, -1
613
+ A0111_10_20.bmp, -1, -1, -1
614
+ A0111_10_21.bmp, -1, -1, -1
615
+ A0111_10_24.bmp, -1, -1, -1
616
+ A0111_10_26.bmp, -1, -1, -1
617
+ A0111_10_27.bmp, -1, -1, -1
618
+ A0111_10_28.bmp, -1, -1, -1
619
+ A0111_10_29.bmp, -1, -1, -1
620
+ A0111_10_30.bmp, -1, -1, -1
621
+ A0111_10_31.bmp, -1, -1, -1
622
+ A0111_10_32.bmp, -1, -1, -1
623
+ A0111_10_34.bmp, -1, -1, -1
624
+ A0111_10_35.bmp, -1, -1, -1
625
+ A0111_10_36.bmp, -1, -1, -1
626
+ A0111_10_37.bmp, -1, -1, -1
627
+ A0111_10_38.bmp, -1, -1, -1
628
+ A0111_10_39.bmp, -1, -1, -1
629
+ A0111_10_40.bmp, -1, -1, -1
630
+ A0111_10_41.bmp, -1, -1, -1
631
+ A0111_10_42.bmp, -1, -1, -1
632
+ A0111_10_43.bmp, -1, -1, -1
633
+ A0111_10_44.bmp, -1, -1, -1
634
+ A0111_10_46.bmp, -1, -1, -1
635
+ A0111_10_47.bmp, -1, -1, -1
636
+ A0111_10_48.bmp, -1, -1, -1
637
+ A0111_10_49.bmp, -1, -1, -1
638
+ A0111_10_50.bmp, -1, -1, -1
639
+ A0111_10_51.bmp, -1, -1, -1
640
+ A0111_10_52.bmp, -1, -1, -1
641
+ A0111_10_53.bmp, -1, -1, -1
642
+ A0111_10_54.bmp, -1, -1, -1
643
+ A0111_10_55.bmp, -1, -1, -1
644
+ A0111_10_56.bmp, -1, -1, -1
645
+ A0111_10_58.bmp, -1, -1, -1
646
+ A0111_10_61.bmp, -1, -1, -1
647
+ A0111_10_62.bmp, -1, -1, -1
648
+ A0111_10_63.bmp, -1, -1, -1
649
+ A0111_10_64.bmp, -1, -1, -1
650
+ A0111_10_65.bmp, -1, -1, -1
651
+ A0111_10_66.bmp, -1, -1, -1
652
+ A0111_10_67.bmp, -1, -1, -1
653
+ A0111_10_68.bmp, -1, -1, -1
654
+ A0111_10_69.bmp, -1, -1, -1
655
+ A0111_10_70.bmp, -1, -1, -1
656
+ A0111_10_71.bmp, -1, -1, -1
657
+ A0111_10_72.bmp, -1, -1, -1
658
+ A0111_10_73.bmp, -1, -1, -1
659
+ A0111_10_74.bmp, -1, -1, -1
660
+ A0111_10_75.bmp, -1, -1, -1
661
+ A0122_10_00.bmp, -1, -1, -1
662
+ A0122_10_01.bmp, -1, -1, -1
663
+ A0122_10_02.bmp, -1, -1, -1
664
+ A0122_10_03.bmp, -1, -1, -1
665
+ A0122_10_04.bmp, -1, -1, -1
666
+ A0122_10_05.bmp, -1, -1, -1
667
+ A0122_10_06.bmp, -1, -1, -1
668
+ A0122_10_07.bmp, -1, -1, -1
669
+ A0122_10_08.bmp, -1, -1, -1
670
+ A0122_10_10.bmp, -1, -1, -1
671
+ A0122_10_11.bmp, -1, -1, -1
672
+ A0122_10_12.bmp, -1, -1, -1
673
+ A0122_10_14.bmp, -1, -1, -1
674
+ A0122_10_15.bmp, -1, -1, -1
675
+ A0122_10_16.bmp, -1, -1, -1
676
+ A0122_10_17.bmp, -1, -1, -1
677
+ A0122_10_18.bmp, -1, -1, -1
678
+ A0122_10_19.bmp, -1, -1, -1
679
+ A0122_10_20.bmp, -1, -1, -1
680
+ A0122_10_21.bmp, -1, -1, -1
681
+ A0122_10_24.bmp, -1, -1, -1
682
+ A0122_10_26.bmp, -1, -1, -1
683
+ A0122_10_27.bmp, -1, -1, -1
684
+ A0122_10_28.bmp, -1, -1, -1
685
+ A0122_10_29.bmp, -1, -1, -1
686
+ A0122_10_30.bmp, -1, -1, -1
687
+ A0122_10_31.bmp, -1, -1, -1
688
+ A0122_10_32.bmp, -1, -1, -1
689
+ A0122_10_34.bmp, -1, -1, -1
690
+ A0122_10_35.bmp, -1, -1, -1
691
+ A0122_10_36.bmp, -1, -1, -1
692
+ A0122_10_37.bmp, -1, -1, -1
693
+ A0122_10_38.bmp, -1, -1, -1
694
+ A0122_10_39.bmp, -1, -1, -1
695
+ A0122_10_40.bmp, -1, -1, -1
696
+ A0122_10_41.bmp, -1, -1, -1
697
+ A0122_10_42.bmp, -1, -1, -1
698
+ A0122_10_43.bmp, -1, -1, -1
699
+ A0122_10_44.bmp, -1, -1, -1
700
+ A0122_10_46.bmp, -1, -1, -1
701
+ A0122_10_47.bmp, -1, -1, -1
702
+ A0122_10_48.bmp, -1, -1, -1
703
+ A0122_10_49.bmp, -1, -1, -1
704
+ A0122_10_50.bmp, -1, -1, -1
705
+ A0122_10_51.bmp, -1, -1, -1
706
+ A0122_10_52.bmp, -1, -1, -1
707
+ A0122_10_53.bmp, -1, -1, -1
708
+ A0122_10_54.bmp, -1, -1, -1
709
+ A0122_10_55.bmp, -1, -1, -1
710
+ A0122_10_56.bmp, -1, -1, -1
711
+ A0122_10_58.bmp, -1, -1, -1
712
+ A0122_10_61.bmp, -1, -1, -1
713
+ A0122_10_62.bmp, -1, -1, -1
714
+ A0122_10_63.bmp, -1, -1, -1
715
+ A0122_10_64.bmp, -1, -1, -1
716
+ A0122_10_65.bmp, -1, -1, -1
717
+ A0122_10_66.bmp, -1, -1, -1
718
+ A0122_10_67.bmp, -1, -1, -1
719
+ A0122_10_68.bmp, -1, -1, -1
720
+ A0122_10_69.bmp, -1, -1, -1
721
+ A0122_10_70.bmp, -1, -1, -1
722
+ A0122_10_71.bmp, -1, -1, -1
723
+ A0122_10_72.bmp, -1, -1, -1
724
+ A0122_10_73.bmp, -1, -1, -1
725
+ A0122_10_74.bmp, -1, -1, -1
726
+ A0122_10_75.bmp, -1, -1, -1
727
+ A0126_10_00.bmp, -1, -1, -1
728
+ A0126_10_01.bmp, -1, -1, -1
729
+ A0126_10_02.bmp, -1, -1, -1
730
+ A0126_10_03.bmp, -1, -1, -1
731
+ A0126_10_04.bmp, -1, -1, -1
732
+ A0126_10_05.bmp, -1, -1, -1
733
+ A0126_10_06.bmp, -1, -1, -1
734
+ A0126_10_07.bmp, -1, -1, -1
735
+ A0126_10_08.bmp, -1, -1, -1
736
+ A0126_10_10.bmp, -1, -1, -1
737
+ A0126_10_11.bmp, -1, -1, -1
738
+ A0126_10_12.bmp, -1, -1, -1
739
+ A0126_10_14.bmp, -1, -1, -1
740
+ A0126_10_15.bmp, -1, -1, -1
741
+ A0126_10_16.bmp, -1, -1, -1
742
+ A0126_10_17.bmp, -1, -1, -1
743
+ A0126_10_18.bmp, -1, -1, -1
744
+ A0126_10_19.bmp, -1, -1, -1
745
+ A0126_10_20.bmp, -1, -1, -1
746
+ A0126_10_21.bmp, -1, -1, -1
747
+ A0126_10_24.bmp, -1, -1, -1
748
+ A0126_10_26.bmp, -1, -1, -1
749
+ A0126_10_27.bmp, -1, -1, -1
750
+ A0126_10_28.bmp, -1, -1, -1
751
+ A0126_10_29.bmp, -1, -1, -1
752
+ A0126_10_30.bmp, -1, -1, -1
753
+ A0126_10_31.bmp, -1, -1, -1
754
+ A0126_10_32.bmp, -1, -1, -1
755
+ A0126_10_34.bmp, -1, -1, -1
756
+ A0126_10_35.bmp, -1, -1, -1
757
+ A0126_10_36.bmp, -1, -1, -1
758
+ A0126_10_37.bmp, -1, -1, -1
759
+ A0126_10_38.bmp, -1, -1, -1
760
+ A0126_10_39.bmp, -1, -1, -1
761
+ A0126_10_40.bmp, -1, -1, -1
762
+ A0126_10_41.bmp, -1, -1, -1
763
+ A0126_10_42.bmp, -1, -1, -1
764
+ A0126_10_43.bmp, -1, -1, -1
765
+ A0126_10_44.bmp, -1, -1, -1
766
+ A0126_10_46.bmp, -1, -1, -1
767
+ A0126_10_47.bmp, -1, -1, -1
768
+ A0126_10_48.bmp, -1, -1, -1
769
+ A0126_10_49.bmp, -1, -1, -1
770
+ A0126_10_50.bmp, -1, -1, -1
771
+ A0126_10_51.bmp, -1, -1, -1
772
+ A0126_10_52.bmp, -1, -1, -1
773
+ A0126_10_53.bmp, -1, -1, -1
774
+ A0126_10_54.bmp, -1, -1, -1
775
+ A0126_10_55.bmp, -1, -1, -1
776
+ A0126_10_56.bmp, -1, -1, -1
777
+ A0126_10_58.bmp, -1, -1, -1
778
+ A0126_10_61.bmp, -1, -1, -1
779
+ A0126_10_62.bmp, -1, -1, -1
780
+ A0126_10_63.bmp, -1, -1, -1
781
+ A0126_10_64.bmp, -1, -1, -1
782
+ A0126_10_65.bmp, -1, -1, -1
783
+ A0126_10_66.bmp, -1, -1, -1
784
+ A0126_10_67.bmp, -1, -1, -1
785
+ A0126_10_68.bmp, -1, -1, -1
786
+ A0126_10_69.bmp, -1, -1, -1
787
+ A0126_10_70.bmp, -1, -1, -1
788
+ A0126_10_71.bmp, -1, -1, -1
789
+ A0126_10_72.bmp, -1, -1, -1
790
+ A0126_10_73.bmp, -1, -1, -1
791
+ A0126_10_74.bmp, -1, -1, -1
792
+ A0126_10_75.bmp, -1, -1, -1
793
+ A0127_10_00.bmp, -1, -1, -1
794
+ A0127_10_01.bmp, -1, -1, -1
795
+ A0127_10_02.bmp, -1, -1, -1
796
+ A0127_10_03.bmp, -1, -1, -1
797
+ A0127_10_04.bmp, -1, -1, -1
798
+ A0127_10_05.bmp, -1, -1, -1
799
+ A0127_10_06.bmp, -1, -1, -1
800
+ A0127_10_07.bmp, -1, -1, -1
801
+ A0127_10_08.bmp, -1, -1, -1
802
+ A0127_10_10.bmp, -1, -1, -1
803
+ A0127_10_11.bmp, -1, -1, -1
804
+ A0127_10_12.bmp, -1, -1, -1
805
+ A0127_10_14.bmp, -1, -1, -1
806
+ A0127_10_15.bmp, -1, -1, -1
807
+ A0127_10_16.bmp, -1, -1, -1
808
+ A0127_10_17.bmp, -1, -1, -1
809
+ A0127_10_18.bmp, -1, -1, -1
810
+ A0127_10_19.bmp, -1, -1, -1
811
+ A0127_10_20.bmp, -1, -1, -1
812
+ A0127_10_21.bmp, -1, -1, -1
813
+ A0127_10_24.bmp, -1, -1, -1
814
+ A0127_10_26.bmp, -1, -1, -1
815
+ A0127_10_27.bmp, -1, -1, -1
816
+ A0127_10_28.bmp, -1, -1, -1
817
+ A0127_10_29.bmp, -1, -1, -1
818
+ A0127_10_30.bmp, -1, -1, -1
819
+ A0127_10_31.bmp, -1, -1, -1
820
+ A0127_10_32.bmp, -1, -1, -1
821
+ A0127_10_34.bmp, -1, -1, -1
822
+ A0127_10_35.bmp, -1, -1, -1
823
+ A0127_10_36.bmp, -1, -1, -1
824
+ A0127_10_37.bmp, -1, -1, -1
825
+ A0127_10_38.bmp, -1, -1, -1
826
+ A0127_10_39.bmp, -1, -1, -1
827
+ A0127_10_40.bmp, -1, -1, -1
828
+ A0127_10_41.bmp, -1, -1, -1
829
+ A0127_10_42.bmp, -1, -1, -1
830
+ A0127_10_43.bmp, -1, -1, -1
831
+ A0127_10_44.bmp, -1, -1, -1
832
+ A0127_10_46.bmp, -1, -1, -1
833
+ A0127_10_47.bmp, -1, -1, -1
834
+ A0127_10_48.bmp, -1, -1, -1
835
+ A0127_10_49.bmp, -1, -1, -1
836
+ A0127_10_50.bmp, -1, -1, -1
837
+ A0127_10_51.bmp, -1, -1, -1
838
+ A0127_10_52.bmp, -1, -1, -1
839
+ A0127_10_53.bmp, -1, -1, -1
840
+ A0127_10_54.bmp, -1, -1, -1
841
+ A0127_10_55.bmp, -1, -1, -1
842
+ A0127_10_56.bmp, -1, -1, -1
843
+ A0127_10_58.bmp, -1, -1, -1
844
+ A0127_10_61.bmp, -1, -1, -1
845
+ A0127_10_62.bmp, -1, -1, -1
846
+ A0127_10_63.bmp, -1, -1, -1
847
+ A0127_10_64.bmp, -1, -1, -1
848
+ A0127_10_65.bmp, -1, -1, -1
849
+ A0127_10_66.bmp, -1, -1, -1
850
+ A0127_10_67.bmp, -1, -1, -1
851
+ A0127_10_68.bmp, -1, -1, -1
852
+ A0127_10_69.bmp, -1, -1, -1
853
+ A0127_10_70.bmp, -1, -1, -1
854
+ A0127_10_71.bmp, -1, -1, -1
855
+ A0127_10_72.bmp, -1, -1, -1
856
+ A0127_10_73.bmp, -1, -1, -1
857
+ A0127_10_74.bmp, -1, -1, -1
858
+ A0127_10_75.bmp, -1, -1, -1
859
+ A0129_10_00.bmp, -1, -1, -1
860
+ A0129_10_01.bmp, -1, -1, -1
861
+ A0129_10_02.bmp, -1, -1, -1
862
+ A0129_10_03.bmp, -1, -1, -1
863
+ A0129_10_04.bmp, -1, -1, -1
864
+ A0129_10_05.bmp, -1, -1, -1
865
+ A0129_10_06.bmp, -1, -1, -1
866
+ A0129_10_07.bmp, -1, -1, -1
867
+ A0129_10_08.bmp, -1, -1, -1
868
+ A0129_10_10.bmp, -1, -1, -1
869
+ A0129_10_11.bmp, -1, -1, -1
870
+ A0129_10_12.bmp, -1, -1, -1
871
+ A0129_10_14.bmp, -1, -1, -1
872
+ A0129_10_15.bmp, -1, -1, -1
873
+ A0129_10_16.bmp, -1, -1, -1
874
+ A0129_10_17.bmp, -1, -1, -1
875
+ A0129_10_18.bmp, -1, -1, -1
876
+ A0129_10_19.bmp, -1, -1, -1
877
+ A0129_10_20.bmp, -1, -1, -1
878
+ A0129_10_21.bmp, -1, -1, -1
879
+ A0129_10_24.bmp, -1, -1, -1
880
+ A0129_10_26.bmp, -1, -1, -1
881
+ A0129_10_27.bmp, -1, -1, -1
882
+ A0129_10_28.bmp, -1, -1, -1
883
+ A0129_10_29.bmp, -1, -1, -1
884
+ A0129_10_30.bmp, -1, -1, -1
885
+ A0129_10_31.bmp, -1, -1, -1
886
+ A0129_10_32.bmp, -1, -1, -1
887
+ A0129_10_34.bmp, -1, -1, -1
888
+ A0129_10_35.bmp, -1, -1, -1
889
+ A0129_10_36.bmp, -1, -1, -1
890
+ A0129_10_37.bmp, -1, -1, -1
891
+ A0129_10_38.bmp, -1, -1, -1
892
+ A0129_10_39.bmp, -1, -1, -1
893
+ A0129_10_40.bmp, -1, -1, -1
894
+ A0129_10_41.bmp, -1, -1, -1
895
+ A0129_10_42.bmp, -1, -1, -1
896
+ A0129_10_43.bmp, -1, -1, -1
897
+ A0129_10_44.bmp, -1, -1, -1
898
+ A0129_10_46.bmp, -1, -1, -1
899
+ A0129_10_47.bmp, -1, -1, -1
900
+ A0129_10_48.bmp, -1, -1, -1
901
+ A0129_10_49.bmp, -1, -1, -1
902
+ A0129_10_50.bmp, -1, -1, -1
903
+ A0129_10_51.bmp, -1, -1, -1
904
+ A0129_10_52.bmp, -1, -1, -1
905
+ A0129_10_53.bmp, -1, -1, -1
906
+ A0129_10_54.bmp, -1, -1, -1
907
+ A0129_10_55.bmp, -1, -1, -1
908
+ A0129_10_56.bmp, -1, -1, -1
909
+ A0129_10_58.bmp, -1, -1, -1
910
+ A0129_10_61.bmp, -1, -1, -1
911
+ A0129_10_62.bmp, -1, -1, -1
912
+ A0129_10_63.bmp, -1, -1, -1
913
+ A0129_10_64.bmp, -1, -1, -1
914
+ A0129_10_65.bmp, -1, -1, -1
915
+ A0129_10_66.bmp, -1, -1, -1
916
+ A0129_10_67.bmp, -1, -1, -1
917
+ A0129_10_68.bmp, -1, -1, -1
918
+ A0129_10_69.bmp, -1, -1, -1
919
+ A0129_10_70.bmp, -1, -1, -1
920
+ A0129_10_71.bmp, -1, -1, -1
921
+ A0129_10_72.bmp, -1, -1, -1
922
+ A0129_10_73.bmp, -1, -1, -1
923
+ A0129_10_74.bmp, -1, -1, -1
924
+ A0129_10_75.bmp, -1, -1, -1
925
+ A0144_10_00.bmp, -1, -1, -1
926
+ A0144_10_01.bmp, -1, -1, -1
927
+ A0144_10_02.bmp, -1, -1, -1
928
+ A0144_10_03.bmp, -1, -1, -1
929
+ A0144_10_04.bmp, -1, -1, -1
930
+ A0144_10_05.bmp, -1, -1, -1
931
+ A0144_10_06.bmp, -1, -1, -1
932
+ A0144_10_07.bmp, -1, -1, -1
933
+ A0144_10_08.bmp, -1, -1, -1
934
+ A0144_10_10.bmp, -1, -1, -1
935
+ A0144_10_11.bmp, -1, -1, -1
936
+ A0144_10_12.bmp, -1, -1, -1
937
+ A0144_10_14.bmp, -1, -1, -1
938
+ A0144_10_15.bmp, -1, -1, -1
939
+ A0144_10_16.bmp, -1, -1, -1
940
+ A0144_10_17.bmp, -1, -1, -1
941
+ A0144_10_18.bmp, -1, -1, -1
942
+ A0144_10_19.bmp, -1, -1, -1
943
+ A0144_10_20.bmp, -1, -1, -1
944
+ A0144_10_21.bmp, -1, -1, -1
945
+ A0144_10_24.bmp, -1, -1, -1
946
+ A0144_10_26.bmp, -1, -1, -1
947
+ A0144_10_27.bmp, -1, -1, -1
948
+ A0144_10_28.bmp, -1, -1, -1
949
+ A0144_10_29.bmp, -1, -1, -1
950
+ A0144_10_30.bmp, -1, -1, -1
951
+ A0144_10_31.bmp, -1, -1, -1
952
+ A0144_10_32.bmp, -1, -1, -1
953
+ A0144_10_34.bmp, -1, -1, -1
954
+ A0144_10_35.bmp, -1, -1, -1
955
+ A0144_10_36.bmp, -1, -1, -1
956
+ A0144_10_37.bmp, -1, -1, -1
957
+ A0144_10_38.bmp, -1, -1, -1
958
+ A0144_10_39.bmp, -1, -1, -1
959
+ A0144_10_40.bmp, -1, -1, -1
960
+ A0144_10_41.bmp, -1, -1, -1
961
+ A0144_10_42.bmp, -1, -1, -1
962
+ A0144_10_43.bmp, -1, -1, -1
963
+ A0144_10_44.bmp, -1, -1, -1
964
+ A0144_10_46.bmp, -1, -1, -1
965
+ A0144_10_47.bmp, -1, -1, -1
966
+ A0144_10_48.bmp, -1, -1, -1
967
+ A0144_10_49.bmp, -1, -1, -1
968
+ A0144_10_50.bmp, -1, -1, -1
969
+ A0144_10_51.bmp, -1, -1, -1
970
+ A0144_10_52.bmp, -1, -1, -1
971
+ A0144_10_53.bmp, -1, -1, -1
972
+ A0144_10_54.bmp, -1, -1, -1
973
+ A0144_10_55.bmp, -1, -1, -1
974
+ A0144_10_56.bmp, -1, -1, -1
975
+ A0144_10_58.bmp, -1, -1, -1
976
+ A0144_10_61.bmp, -1, -1, -1
977
+ A0144_10_62.bmp, -1, -1, -1
978
+ A0144_10_63.bmp, -1, -1, -1
979
+ A0144_10_64.bmp, -1, -1, -1
980
+ A0144_10_65.bmp, -1, -1, -1
981
+ A0144_10_66.bmp, -1, -1, -1
982
+ A0144_10_67.bmp, -1, -1, -1
983
+ A0144_10_68.bmp, -1, -1, -1
984
+ A0144_10_69.bmp, -1, -1, -1
985
+ A0144_10_70.bmp, -1, -1, -1
986
+ A0144_10_71.bmp, -1, -1, -1
987
+ A0144_10_72.bmp, -1, -1, -1
988
+ A0144_10_73.bmp, -1, -1, -1
989
+ A0144_10_74.bmp, -1, -1, -1
990
+ A0144_10_75.bmp, -1, -1, -1
991
+ A0147_10_00.bmp, -1, -1, -1
992
+ A0147_10_01.bmp, -1, -1, -1
993
+ A0147_10_02.bmp, -1, -1, -1
994
+ A0147_10_03.bmp, -1, -1, -1
995
+ A0147_10_04.bmp, -1, -1, -1
996
+ A0147_10_05.bmp, -1, -1, -1
997
+ A0147_10_06.bmp, -1, -1, -1
998
+ A0147_10_07.bmp, -1, -1, -1
999
+ A0147_10_08.bmp, -1, -1, -1
1000
+ A0147_10_10.bmp, -1, -1, -1
1001
+ A0147_10_11.bmp, -1, -1, -1
1002
+ A0147_10_12.bmp, -1, -1, -1
1003
+ A0147_10_14.bmp, -1, -1, -1
1004
+ A0147_10_15.bmp, -1, -1, -1
1005
+ A0147_10_16.bmp, -1, -1, -1
1006
+ A0147_10_17.bmp, -1, -1, -1
1007
+ A0147_10_18.bmp, -1, -1, -1
1008
+ A0147_10_19.bmp, -1, -1, -1
1009
+ A0147_10_20.bmp, -1, -1, -1
1010
+ A0147_10_21.bmp, -1, -1, -1
1011
+ A0147_10_24.bmp, -1, -1, -1
1012
+ A0147_10_26.bmp, -1, -1, -1
1013
+ A0147_10_27.bmp, -1, -1, -1
1014
+ A0147_10_28.bmp, -1, -1, -1
1015
+ A0147_10_29.bmp, -1, -1, -1
1016
+ A0147_10_30.bmp, -1, -1, -1
1017
+ A0147_10_31.bmp, -1, -1, -1
1018
+ A0147_10_32.bmp, -1, -1, -1
1019
+ A0147_10_34.bmp, -1, -1, -1
1020
+ A0147_10_35.bmp, -1, -1, -1
1021
+ A0147_10_36.bmp, -1, -1, -1
1022
+ A0147_10_37.bmp, -1, -1, -1
1023
+ A0147_10_38.bmp, -1, -1, -1
1024
+ A0147_10_39.bmp, -1, -1, -1
1025
+ A0147_10_40.bmp, -1, -1, -1
1026
+ A0147_10_41.bmp, -1, -1, -1
1027
+ A0147_10_42.bmp, -1, -1, -1
1028
+ A0147_10_43.bmp, -1, -1, -1
1029
+ A0147_10_44.bmp, -1, -1, -1
1030
+ A0147_10_46.bmp, -1, -1, -1
1031
+ A0147_10_47.bmp, -1, -1, -1
1032
+ A0147_10_48.bmp, -1, -1, -1
1033
+ A0147_10_49.bmp, -1, -1, -1
1034
+ A0147_10_50.bmp, -1, -1, -1
1035
+ A0147_10_51.bmp, -1, -1, -1
1036
+ A0147_10_52.bmp, -1, -1, -1
1037
+ A0147_10_53.bmp, -1, -1, -1
1038
+ A0147_10_54.bmp, -1, -1, -1
1039
+ A0147_10_55.bmp, -1, -1, -1
1040
+ A0147_10_56.bmp, -1, -1, -1
1041
+ A0147_10_58.bmp, -1, -1, -1
1042
+ A0147_10_61.bmp, -1, -1, -1
1043
+ A0147_10_62.bmp, -1, -1, -1
1044
+ A0147_10_63.bmp, -1, -1, -1
1045
+ A0147_10_64.bmp, -1, -1, -1
1046
+ A0147_10_65.bmp, -1, -1, -1
1047
+ A0147_10_66.bmp, -1, -1, -1
1048
+ A0147_10_67.bmp, -1, -1, -1
1049
+ A0147_10_68.bmp, -1, -1, -1
1050
+ A0147_10_69.bmp, -1, -1, -1
1051
+ A0147_10_70.bmp, -1, -1, -1
1052
+ A0147_10_71.bmp, -1, -1, -1
1053
+ A0147_10_72.bmp, -1, -1, -1
1054
+ A0147_10_73.bmp, -1, -1, -1
1055
+ A0147_10_74.bmp, -1, -1, -1
1056
+ A0147_10_75.bmp, -1, -1, -1
1057
+ A0154_10_00.bmp, -1, -1, -1
1058
+ A0154_10_01.bmp, -1, -1, -1
1059
+ A0154_10_02.bmp, -1, -1, -1
1060
+ A0154_10_03.bmp, -1, -1, -1
1061
+ A0154_10_04.bmp, -1, -1, -1
1062
+ A0154_10_05.bmp, -1, -1, -1
1063
+ A0154_10_06.bmp, -1, -1, -1
1064
+ A0154_10_07.bmp, -1, -1, -1
1065
+ A0154_10_08.bmp, -1, -1, -1
1066
+ A0154_10_10.bmp, -1, -1, -1
1067
+ A0154_10_11.bmp, -1, -1, -1
1068
+ A0154_10_12.bmp, -1, -1, -1
1069
+ A0154_10_14.bmp, -1, -1, -1
1070
+ A0154_10_15.bmp, -1, -1, -1
1071
+ A0154_10_16.bmp, -1, -1, -1
1072
+ A0154_10_17.bmp, -1, -1, -1
1073
+ A0154_10_18.bmp, -1, -1, -1
1074
+ A0154_10_19.bmp, -1, -1, -1
1075
+ A0154_10_20.bmp, -1, -1, -1
1076
+ A0154_10_21.bmp, -1, -1, -1
1077
+ A0154_10_24.bmp, -1, -1, -1
1078
+ A0154_10_26.bmp, -1, -1, -1
1079
+ A0154_10_27.bmp, -1, -1, -1
1080
+ A0154_10_28.bmp, -1, -1, -1
1081
+ A0154_10_29.bmp, -1, -1, -1
1082
+ A0154_10_30.bmp, -1, -1, -1
1083
+ A0154_10_31.bmp, -1, -1, -1
1084
+ A0154_10_32.bmp, -1, -1, -1
1085
+ A0154_10_34.bmp, -1, -1, -1
1086
+ A0154_10_35.bmp, -1, -1, -1
1087
+ A0154_10_36.bmp, -1, -1, -1
1088
+ A0154_10_37.bmp, -1, -1, -1
1089
+ A0154_10_38.bmp, -1, -1, -1
1090
+ A0154_10_39.bmp, -1, -1, -1
1091
+ A0154_10_40.bmp, -1, -1, -1
1092
+ A0154_10_41.bmp, -1, -1, -1
1093
+ A0154_10_42.bmp, -1, -1, -1
1094
+ A0154_10_43.bmp, -1, -1, -1
1095
+ A0154_10_44.bmp, -1, -1, -1
1096
+ A0154_10_46.bmp, -1, -1, -1
1097
+ A0154_10_47.bmp, -1, -1, -1
1098
+ A0154_10_48.bmp, -1, -1, -1
1099
+ A0154_10_49.bmp, -1, -1, -1
1100
+ A0154_10_50.bmp, -1, -1, -1
1101
+ A0154_10_51.bmp, -1, -1, -1
1102
+ A0154_10_52.bmp, -1, -1, -1
1103
+ A0154_10_53.bmp, -1, -1, -1
1104
+ A0154_10_54.bmp, -1, -1, -1
1105
+ A0154_10_55.bmp, -1, -1, -1
1106
+ A0154_10_56.bmp, -1, -1, -1
1107
+ A0154_10_58.bmp, -1, -1, -1
1108
+ A0154_10_61.bmp, -1, -1, -1
1109
+ A0154_10_62.bmp, -1, -1, -1
1110
+ A0154_10_63.bmp, -1, -1, -1
1111
+ A0154_10_64.bmp, -1, -1, -1
1112
+ A0154_10_65.bmp, -1, -1, -1
1113
+ A0154_10_66.bmp, -1, -1, -1
1114
+ A0154_10_67.bmp, -1, -1, -1
1115
+ A0154_10_68.bmp, -1, -1, -1
1116
+ A0154_10_69.bmp, -1, -1, -1
1117
+ A0154_10_70.bmp, -1, -1, -1
1118
+ A0154_10_71.bmp, -1, -1, -1
1119
+ A0154_10_72.bmp, -1, -1, -1
1120
+ A0154_10_73.bmp, -1, -1, -1
1121
+ A0154_10_74.bmp, -1, -1, -1
1122
+ A0154_10_75.bmp, -1, -1, -1
1123
+ A0161_10_00.bmp, -1, -1, -1
1124
+ A0161_10_01.bmp, -1, -1, -1
1125
+ A0161_10_02.bmp, -1, -1, -1
1126
+ A0161_10_03.bmp, -1, -1, -1
1127
+ A0161_10_04.bmp, -1, -1, -1
1128
+ A0161_10_05.bmp, -1, -1, -1
1129
+ A0161_10_06.bmp, -1, -1, -1
1130
+ A0161_10_07.bmp, -1, -1, -1
1131
+ A0161_10_08.bmp, -1, -1, -1
1132
+ A0161_10_10.bmp, -1, -1, -1
1133
+ A0161_10_11.bmp, -1, -1, -1
1134
+ A0161_10_12.bmp, -1, -1, -1
1135
+ A0161_10_14.bmp, -1, -1, -1
1136
+ A0161_10_15.bmp, -1, -1, -1
1137
+ A0161_10_16.bmp, -1, -1, -1
1138
+ A0161_10_17.bmp, -1, -1, -1
1139
+ A0161_10_18.bmp, -1, -1, -1
1140
+ A0161_10_19.bmp, -1, -1, -1
1141
+ A0161_10_20.bmp, -1, -1, -1
1142
+ A0161_10_21.bmp, -1, -1, -1
1143
+ A0161_10_24.bmp, -1, -1, -1
1144
+ A0161_10_26.bmp, -1, -1, -1
1145
+ A0161_10_27.bmp, -1, -1, -1
1146
+ A0161_10_28.bmp, -1, -1, -1
1147
+ A0161_10_29.bmp, -1, -1, -1
1148
+ A0161_10_30.bmp, -1, -1, -1
1149
+ A0161_10_31.bmp, -1, -1, -1
1150
+ A0161_10_32.bmp, -1, -1, -1
1151
+ A0161_10_34.bmp, -1, -1, -1
1152
+ A0161_10_35.bmp, -1, -1, -1
1153
+ A0161_10_36.bmp, -1, -1, -1
1154
+ A0161_10_37.bmp, -1, -1, -1
1155
+ A0161_10_38.bmp, -1, -1, -1
1156
+ A0161_10_39.bmp, -1, -1, -1
1157
+ A0161_10_40.bmp, -1, -1, -1
1158
+ A0161_10_41.bmp, -1, -1, -1
1159
+ A0161_10_42.bmp, -1, -1, -1
1160
+ A0161_10_43.bmp, -1, -1, -1
1161
+ A0161_10_44.bmp, -1, -1, -1
1162
+ A0161_10_46.bmp, -1, -1, -1
1163
+ A0161_10_47.bmp, -1, -1, -1
1164
+ A0161_10_48.bmp, -1, -1, -1
1165
+ A0161_10_49.bmp, -1, -1, -1
1166
+ A0161_10_50.bmp, -1, -1, -1
1167
+ A0161_10_51.bmp, -1, -1, -1
1168
+ A0161_10_52.bmp, -1, -1, -1
1169
+ A0161_10_53.bmp, -1, -1, -1
1170
+ A0161_10_54.bmp, -1, -1, -1
1171
+ A0161_10_55.bmp, -1, -1, -1
1172
+ A0161_10_56.bmp, -1, -1, -1
1173
+ A0161_10_58.bmp, -1, -1, -1
1174
+ A0161_10_61.bmp, -1, -1, -1
1175
+ A0161_10_62.bmp, -1, -1, -1
1176
+ A0161_10_63.bmp, -1, -1, -1
1177
+ A0161_10_64.bmp, -1, -1, -1
1178
+ A0161_10_65.bmp, -1, -1, -1
1179
+ A0161_10_66.bmp, -1, -1, -1
1180
+ A0161_10_67.bmp, -1, -1, -1
1181
+ A0161_10_68.bmp, -1, -1, -1
1182
+ A0161_10_69.bmp, -1, -1, -1
1183
+ A0161_10_70.bmp, -1, -1, -1
1184
+ A0161_10_71.bmp, -1, -1, -1
1185
+ A0161_10_72.bmp, -1, -1, -1
1186
+ A0161_10_73.bmp, -1, -1, -1
1187
+ A0161_10_74.bmp, -1, -1, -1
1188
+ A0161_10_75.bmp, -1, -1, -1
1189
+ A0192_10_00.bmp, -1, -1, -1
1190
+ A0192_10_01.bmp, -1, -1, -1
1191
+ A0192_10_02.bmp, -1, -1, -1
1192
+ A0192_10_03.bmp, -1, -1, -1
1193
+ A0192_10_04.bmp, -1, -1, -1
1194
+ A0192_10_05.bmp, -1, -1, -1
1195
+ A0192_10_06.bmp, -1, -1, -1
1196
+ A0192_10_07.bmp, -1, -1, -1
1197
+ A0192_10_08.bmp, -1, -1, -1
1198
+ A0192_10_10.bmp, -1, -1, -1
1199
+ A0192_10_11.bmp, -1, -1, -1
1200
+ A0192_10_12.bmp, -1, -1, -1
1201
+ A0192_10_14.bmp, -1, -1, -1
1202
+ A0192_10_15.bmp, -1, -1, -1
1203
+ A0192_10_16.bmp, -1, -1, -1
1204
+ A0192_10_17.bmp, -1, -1, -1
1205
+ A0192_10_18.bmp, -1, -1, -1
1206
+ A0192_10_19.bmp, -1, -1, -1
1207
+ A0192_10_20.bmp, -1, -1, -1
1208
+ A0192_10_21.bmp, -1, -1, -1
1209
+ A0192_10_24.bmp, -1, -1, -1
1210
+ A0192_10_26.bmp, -1, -1, -1
1211
+ A0192_10_27.bmp, -1, -1, -1
1212
+ A0192_10_28.bmp, -1, -1, -1
1213
+ A0192_10_29.bmp, -1, -1, -1
1214
+ A0192_10_30.bmp, -1, -1, -1
1215
+ A0192_10_31.bmp, -1, -1, -1
1216
+ A0192_10_32.bmp, -1, -1, -1
1217
+ A0192_10_34.bmp, -1, -1, -1
1218
+ A0192_10_35.bmp, -1, -1, -1
1219
+ A0192_10_36.bmp, -1, -1, -1
1220
+ A0192_10_37.bmp, -1, -1, -1
1221
+ A0192_10_38.bmp, -1, -1, -1
1222
+ A0192_10_39.bmp, -1, -1, -1
1223
+ A0192_10_40.bmp, -1, -1, -1
1224
+ A0192_10_41.bmp, -1, -1, -1
1225
+ A0192_10_42.bmp, -1, -1, -1
1226
+ A0192_10_43.bmp, -1, -1, -1
1227
+ A0192_10_44.bmp, -1, -1, -1
1228
+ A0192_10_46.bmp, -1, -1, -1
1229
+ A0192_10_47.bmp, -1, -1, -1
1230
+ A0192_10_48.bmp, -1, -1, -1
1231
+ A0192_10_49.bmp, -1, -1, -1
1232
+ A0192_10_50.bmp, -1, -1, -1
1233
+ A0192_10_51.bmp, -1, -1, -1
1234
+ A0192_10_52.bmp, -1, -1, -1
1235
+ A0192_10_53.bmp, -1, -1, -1
1236
+ A0192_10_54.bmp, -1, -1, -1
1237
+ A0192_10_55.bmp, -1, -1, -1
1238
+ A0192_10_56.bmp, -1, -1, -1
1239
+ A0192_10_58.bmp, -1, -1, -1
1240
+ A0192_10_61.bmp, -1, -1, -1
1241
+ A0192_10_62.bmp, -1, -1, -1
1242
+ A0192_10_63.bmp, -1, -1, -1
1243
+ A0192_10_64.bmp, -1, -1, -1
1244
+ A0192_10_65.bmp, -1, -1, -1
1245
+ A0192_10_66.bmp, -1, -1, -1
1246
+ A0192_10_67.bmp, -1, -1, -1
1247
+ A0192_10_68.bmp, -1, -1, -1
1248
+ A0192_10_69.bmp, -1, -1, -1
1249
+ A0192_10_70.bmp, -1, -1, -1
1250
+ A0192_10_71.bmp, -1, -1, -1
1251
+ A0192_10_72.bmp, -1, -1, -1
1252
+ A0192_10_73.bmp, -1, -1, -1
1253
+ A0192_10_74.bmp, -1, -1, -1
1254
+ A0192_10_75.bmp, -1, -1, -1
1255
+ A0196_10_00.bmp, -1, -1, -1
1256
+ A0196_10_01.bmp, -1, -1, -1
1257
+ A0196_10_02.bmp, -1, -1, -1
1258
+ A0196_10_03.bmp, -1, -1, -1
1259
+ A0196_10_04.bmp, -1, -1, -1
1260
+ A0196_10_05.bmp, -1, -1, -1
1261
+ A0196_10_06.bmp, -1, -1, -1
1262
+ A0196_10_07.bmp, -1, -1, -1
1263
+ A0196_10_08.bmp, -1, -1, -1
1264
+ A0196_10_10.bmp, -1, -1, -1
1265
+ A0196_10_11.bmp, -1, -1, -1
1266
+ A0196_10_12.bmp, -1, -1, -1
1267
+ A0196_10_14.bmp, -1, -1, -1
1268
+ A0196_10_15.bmp, -1, -1, -1
1269
+ A0196_10_16.bmp, -1, -1, -1
1270
+ A0196_10_17.bmp, -1, -1, -1
1271
+ A0196_10_18.bmp, -1, -1, -1
1272
+ A0196_10_19.bmp, -1, -1, -1
1273
+ A0196_10_20.bmp, -1, -1, -1
1274
+ A0196_10_21.bmp, -1, -1, -1
1275
+ A0196_10_24.bmp, -1, -1, -1
1276
+ A0196_10_26.bmp, -1, -1, -1
1277
+ A0196_10_27.bmp, -1, -1, -1
1278
+ A0196_10_28.bmp, -1, -1, -1
1279
+ A0196_10_29.bmp, -1, -1, -1
1280
+ A0196_10_30.bmp, -1, -1, -1
1281
+ A0196_10_31.bmp, -1, -1, -1
1282
+ A0196_10_32.bmp, -1, -1, -1
1283
+ A0196_10_34.bmp, -1, -1, -1
1284
+ A0196_10_35.bmp, -1, -1, -1
1285
+ A0196_10_36.bmp, -1, -1, -1
1286
+ A0196_10_37.bmp, -1, -1, -1
1287
+ A0196_10_38.bmp, -1, -1, -1
1288
+ A0196_10_39.bmp, -1, -1, -1
1289
+ A0196_10_40.bmp, -1, -1, -1
1290
+ A0196_10_41.bmp, -1, -1, -1
1291
+ A0196_10_42.bmp, -1, -1, -1
1292
+ A0196_10_43.bmp, -1, -1, -1
1293
+ A0196_10_44.bmp, -1, -1, -1
1294
+ A0196_10_46.bmp, -1, -1, -1
1295
+ A0196_10_47.bmp, -1, -1, -1
1296
+ A0196_10_48.bmp, -1, -1, -1
1297
+ A0196_10_49.bmp, -1, -1, -1
1298
+ A0196_10_50.bmp, -1, -1, -1
1299
+ A0196_10_51.bmp, -1, -1, -1
1300
+ A0196_10_52.bmp, -1, -1, -1
1301
+ A0196_10_53.bmp, -1, -1, -1
1302
+ A0196_10_54.bmp, -1, -1, -1
1303
+ A0196_10_55.bmp, -1, -1, -1
1304
+ A0196_10_56.bmp, -1, -1, -1
1305
+ A0196_10_58.bmp, -1, -1, -1
1306
+ A0196_10_61.bmp, -1, -1, -1
1307
+ A0196_10_62.bmp, -1, -1, -1
1308
+ A0196_10_63.bmp, -1, -1, -1
1309
+ A0196_10_64.bmp, -1, -1, -1
1310
+ A0196_10_65.bmp, -1, -1, -1
1311
+ A0196_10_66.bmp, -1, -1, -1
1312
+ A0196_10_67.bmp, -1, -1, -1
1313
+ A0196_10_68.bmp, -1, -1, -1
1314
+ A0196_10_69.bmp, -1, -1, -1
1315
+ A0196_10_70.bmp, -1, -1, -1
1316
+ A0196_10_71.bmp, -1, -1, -1
1317
+ A0196_10_72.bmp, -1, -1, -1
1318
+ A0196_10_73.bmp, -1, -1, -1
1319
+ A0196_10_74.bmp, -1, -1, -1
1320
+ A0196_10_75.bmp, -1, -1, -1
1321
+ A0202_10_00.bmp, -1, -1, -1
1322
+ A0202_10_01.bmp, -1, -1, -1
1323
+ A0202_10_02.bmp, -1, -1, -1
1324
+ A0202_10_03.bmp, -1, -1, -1
1325
+ A0202_10_04.bmp, -1, -1, -1
1326
+ A0202_10_05.bmp, -1, -1, -1
1327
+ A0202_10_06.bmp, -1, -1, -1
1328
+ A0202_10_07.bmp, -1, -1, -1
1329
+ A0202_10_08.bmp, -1, -1, -1
1330
+ A0202_10_10.bmp, -1, -1, -1
1331
+ A0202_10_11.bmp, -1, -1, -1
1332
+ A0202_10_12.bmp, -1, -1, -1
1333
+ A0202_10_14.bmp, -1, -1, -1
1334
+ A0202_10_15.bmp, -1, -1, -1
1335
+ A0202_10_16.bmp, -1, -1, -1
1336
+ A0202_10_17.bmp, -1, -1, -1
1337
+ A0202_10_18.bmp, -1, -1, -1
1338
+ A0202_10_19.bmp, -1, -1, -1
1339
+ A0202_10_20.bmp, -1, -1, -1
1340
+ A0202_10_21.bmp, -1, -1, -1
1341
+ A0202_10_24.bmp, -1, -1, -1
1342
+ A0202_10_26.bmp, -1, -1, -1
1343
+ A0202_10_27.bmp, -1, -1, -1
1344
+ A0202_10_28.bmp, -1, -1, -1
1345
+ A0202_10_29.bmp, -1, -1, -1
1346
+ A0202_10_30.bmp, -1, -1, -1
1347
+ A0202_10_31.bmp, -1, -1, -1
1348
+ A0202_10_32.bmp, -1, -1, -1
1349
+ A0202_10_34.bmp, -1, -1, -1
1350
+ A0202_10_35.bmp, -1, -1, -1
1351
+ A0202_10_36.bmp, -1, -1, -1
1352
+ A0202_10_37.bmp, -1, -1, -1
1353
+ A0202_10_38.bmp, -1, -1, -1
1354
+ A0202_10_39.bmp, -1, -1, -1
1355
+ A0202_10_40.bmp, -1, -1, -1
1356
+ A0202_10_41.bmp, -1, -1, -1
1357
+ A0202_10_42.bmp, -1, -1, -1
1358
+ A0202_10_43.bmp, -1, -1, -1
1359
+ A0202_10_44.bmp, -1, -1, -1
1360
+ A0202_10_46.bmp, -1, -1, -1
1361
+ A0202_10_47.bmp, -1, -1, -1
1362
+ A0202_10_48.bmp, -1, -1, -1
1363
+ A0202_10_49.bmp, -1, -1, -1
1364
+ A0202_10_50.bmp, -1, -1, -1
1365
+ A0202_10_51.bmp, -1, -1, -1
1366
+ A0202_10_52.bmp, -1, -1, -1
1367
+ A0202_10_53.bmp, -1, -1, -1
1368
+ A0202_10_54.bmp, -1, -1, -1
1369
+ A0202_10_55.bmp, -1, -1, -1
1370
+ A0202_10_56.bmp, -1, -1, -1
1371
+ A0202_10_58.bmp, -1, -1, -1
1372
+ A0202_10_61.bmp, -1, -1, -1
1373
+ A0202_10_62.bmp, -1, -1, -1
1374
+ A0202_10_63.bmp, -1, -1, -1
1375
+ A0202_10_64.bmp, -1, -1, -1
1376
+ A0202_10_65.bmp, -1, -1, -1
1377
+ A0202_10_66.bmp, -1, -1, -1
1378
+ A0202_10_67.bmp, -1, -1, -1
1379
+ A0202_10_68.bmp, -1, -1, -1
1380
+ A0202_10_69.bmp, -1, -1, -1
1381
+ A0202_10_70.bmp, -1, -1, -1
1382
+ A0202_10_71.bmp, -1, -1, -1
1383
+ A0202_10_72.bmp, -1, -1, -1
1384
+ A0202_10_73.bmp, -1, -1, -1
1385
+ A0202_10_74.bmp, -1, -1, -1
1386
+ A0202_10_75.bmp, -1, -1, -1
1387
+ A0217_10_00.bmp, -1, -1, -1
1388
+ A0217_10_01.bmp, -1, -1, -1
1389
+ A0217_10_02.bmp, -1, -1, -1
1390
+ A0217_10_03.bmp, -1, -1, -1
1391
+ A0217_10_04.bmp, -1, -1, -1
1392
+ A0217_10_05.bmp, -1, -1, -1
1393
+ A0217_10_06.bmp, -1, -1, -1
1394
+ A0217_10_07.bmp, -1, -1, -1
1395
+ A0217_10_08.bmp, -1, -1, -1
1396
+ A0217_10_10.bmp, -1, -1, -1
1397
+ A0217_10_11.bmp, -1, -1, -1
1398
+ A0217_10_12.bmp, -1, -1, -1
1399
+ A0217_10_14.bmp, -1, -1, -1
1400
+ A0217_10_15.bmp, -1, -1, -1
1401
+ A0217_10_16.bmp, -1, -1, -1
1402
+ A0217_10_17.bmp, -1, -1, -1
1403
+ A0217_10_18.bmp, -1, -1, -1
1404
+ A0217_10_19.bmp, -1, -1, -1
1405
+ A0217_10_20.bmp, -1, -1, -1
1406
+ A0217_10_21.bmp, -1, -1, -1
1407
+ A0217_10_24.bmp, -1, -1, -1
1408
+ A0217_10_26.bmp, -1, -1, -1
1409
+ A0217_10_27.bmp, -1, -1, -1
1410
+ A0217_10_28.bmp, -1, -1, -1
1411
+ A0217_10_29.bmp, -1, -1, -1
1412
+ A0217_10_30.bmp, -1, -1, -1
1413
+ A0217_10_31.bmp, -1, -1, -1
1414
+ A0217_10_32.bmp, -1, -1, -1
1415
+ A0217_10_34.bmp, -1, -1, -1
1416
+ A0217_10_35.bmp, -1, -1, -1
1417
+ A0217_10_36.bmp, -1, -1, -1
1418
+ A0217_10_37.bmp, -1, -1, -1
1419
+ A0217_10_38.bmp, -1, -1, -1
1420
+ A0217_10_39.bmp, -1, -1, -1
1421
+ A0217_10_40.bmp, -1, -1, -1
1422
+ A0217_10_41.bmp, -1, -1, -1
1423
+ A0217_10_42.bmp, -1, -1, -1
1424
+ A0217_10_43.bmp, -1, -1, -1
1425
+ A0217_10_44.bmp, -1, -1, -1
1426
+ A0217_10_46.bmp, -1, -1, -1
1427
+ A0217_10_47.bmp, -1, -1, -1
1428
+ A0217_10_48.bmp, -1, -1, -1
1429
+ A0217_10_49.bmp, -1, -1, -1
1430
+ A0217_10_50.bmp, -1, -1, -1
1431
+ A0217_10_51.bmp, -1, -1, -1
1432
+ A0217_10_52.bmp, -1, -1, -1
1433
+ A0217_10_53.bmp, -1, -1, -1
1434
+ A0217_10_54.bmp, -1, -1, -1
1435
+ A0217_10_55.bmp, -1, -1, -1
1436
+ A0217_10_56.bmp, -1, -1, -1
1437
+ A0217_10_58.bmp, -1, -1, -1
1438
+ A0217_10_61.bmp, -1, -1, -1
1439
+ A0217_10_62.bmp, -1, -1, -1
1440
+ A0217_10_63.bmp, -1, -1, -1
1441
+ A0217_10_64.bmp, -1, -1, -1
1442
+ A0217_10_65.bmp, -1, -1, -1
1443
+ A0217_10_66.bmp, -1, -1, -1
1444
+ A0217_10_67.bmp, -1, -1, -1
1445
+ A0217_10_68.bmp, -1, -1, -1
1446
+ A0217_10_69.bmp, -1, -1, -1
1447
+ A0217_10_70.bmp, -1, -1, -1
1448
+ A0217_10_71.bmp, -1, -1, -1
1449
+ A0217_10_72.bmp, -1, -1, -1
1450
+ A0217_10_73.bmp, -1, -1, -1
1451
+ A0217_10_74.bmp, -1, -1, -1
1452
+ A0217_10_75.bmp, -1, -1, -1
1453
+ A0219_10_00.bmp, -1, -1, -1
1454
+ A0219_10_01.bmp, -1, -1, -1
1455
+ A0219_10_02.bmp, -1, -1, -1
1456
+ A0219_10_03.bmp, -1, -1, -1
1457
+ A0219_10_04.bmp, -1, -1, -1
1458
+ A0219_10_05.bmp, -1, -1, -1
1459
+ A0219_10_06.bmp, -1, -1, -1
1460
+ A0219_10_07.bmp, -1, -1, -1
1461
+ A0219_10_08.bmp, -1, -1, -1
1462
+ A0219_10_10.bmp, -1, -1, -1
1463
+ A0219_10_11.bmp, -1, -1, -1
1464
+ A0219_10_12.bmp, -1, -1, -1
1465
+ A0219_10_14.bmp, -1, -1, -1
1466
+ A0219_10_15.bmp, -1, -1, -1
1467
+ A0219_10_16.bmp, -1, -1, -1
1468
+ A0219_10_17.bmp, -1, -1, -1
1469
+ A0219_10_18.bmp, -1, -1, -1
1470
+ A0219_10_19.bmp, -1, -1, -1
1471
+ A0219_10_20.bmp, -1, -1, -1
1472
+ A0219_10_21.bmp, -1, -1, -1
1473
+ A0219_10_24.bmp, -1, -1, -1
1474
+ A0219_10_26.bmp, -1, -1, -1
1475
+ A0219_10_27.bmp, -1, -1, -1
1476
+ A0219_10_28.bmp, -1, -1, -1
1477
+ A0219_10_29.bmp, -1, -1, -1
1478
+ A0219_10_30.bmp, -1, -1, -1
1479
+ A0219_10_31.bmp, -1, -1, -1
1480
+ A0219_10_32.bmp, -1, -1, -1
1481
+ A0219_10_34.bmp, -1, -1, -1
1482
+ A0219_10_35.bmp, -1, -1, -1
1483
+ A0219_10_36.bmp, -1, -1, -1
1484
+ A0219_10_37.bmp, -1, -1, -1
1485
+ A0219_10_38.bmp, -1, -1, -1
1486
+ A0219_10_39.bmp, -1, -1, -1
1487
+ A0219_10_40.bmp, -1, -1, -1
1488
+ A0219_10_41.bmp, -1, -1, -1
1489
+ A0219_10_42.bmp, -1, -1, -1
1490
+ A0219_10_43.bmp, -1, -1, -1
1491
+ A0219_10_44.bmp, -1, -1, -1
1492
+ A0219_10_46.bmp, -1, -1, -1
1493
+ A0219_10_47.bmp, -1, -1, -1
1494
+ A0219_10_48.bmp, -1, -1, -1
1495
+ A0219_10_49.bmp, -1, -1, -1
1496
+ A0219_10_50.bmp, -1, -1, -1
1497
+ A0219_10_51.bmp, -1, -1, -1
1498
+ A0219_10_52.bmp, -1, -1, -1
1499
+ A0219_10_53.bmp, -1, -1, -1
1500
+ A0219_10_54.bmp, -1, -1, -1
1501
+ A0219_10_55.bmp, -1, -1, -1
1502
+ A0219_10_56.bmp, -1, -1, -1
1503
+ A0219_10_58.bmp, -1, -1, -1
1504
+ A0219_10_61.bmp, -1, -1, -1
1505
+ A0219_10_62.bmp, -1, -1, -1
1506
+ A0219_10_63.bmp, -1, -1, -1
1507
+ A0219_10_64.bmp, -1, -1, -1
1508
+ A0219_10_65.bmp, -1, -1, -1
1509
+ A0219_10_66.bmp, -1, -1, -1
1510
+ A0219_10_67.bmp, -1, -1, -1
1511
+ A0219_10_68.bmp, -1, -1, -1
1512
+ A0219_10_69.bmp, -1, -1, -1
1513
+ A0219_10_70.bmp, -1, -1, -1
1514
+ A0219_10_71.bmp, -1, -1, -1
1515
+ A0219_10_72.bmp, -1, -1, -1
1516
+ A0219_10_73.bmp, -1, -1, -1
1517
+ A0219_10_74.bmp, -1, -1, -1
1518
+ A0219_10_75.bmp, -1, -1, -1
1519
+ A0235_10_00.bmp, -1, -1, -1
1520
+ A0235_10_01.bmp, -1, -1, -1
1521
+ A0235_10_02.bmp, -1, -1, -1
1522
+ A0235_10_03.bmp, -1, -1, -1
1523
+ A0235_10_04.bmp, -1, -1, -1
1524
+ A0235_10_05.bmp, -1, -1, -1
1525
+ A0235_10_06.bmp, -1, -1, -1
1526
+ A0235_10_07.bmp, -1, -1, -1
1527
+ A0235_10_08.bmp, -1, -1, -1
1528
+ A0235_10_10.bmp, -1, -1, -1
1529
+ A0235_10_11.bmp, -1, -1, -1
1530
+ A0235_10_12.bmp, -1, -1, -1
1531
+ A0235_10_14.bmp, -1, -1, -1
1532
+ A0235_10_15.bmp, -1, -1, -1
1533
+ A0235_10_16.bmp, -1, -1, -1
1534
+ A0235_10_17.bmp, -1, -1, -1
1535
+ A0235_10_18.bmp, -1, -1, -1
1536
+ A0235_10_19.bmp, -1, -1, -1
1537
+ A0235_10_20.bmp, -1, -1, -1
1538
+ A0235_10_21.bmp, -1, -1, -1
1539
+ A0235_10_24.bmp, -1, -1, -1
1540
+ A0235_10_26.bmp, -1, -1, -1
1541
+ A0235_10_27.bmp, -1, -1, -1
1542
+ A0235_10_28.bmp, -1, -1, -1
1543
+ A0235_10_29.bmp, -1, -1, -1
1544
+ A0235_10_30.bmp, -1, -1, -1
1545
+ A0235_10_31.bmp, -1, -1, -1
1546
+ A0235_10_32.bmp, -1, -1, -1
1547
+ A0235_10_34.bmp, -1, -1, -1
1548
+ A0235_10_35.bmp, -1, -1, -1
1549
+ A0235_10_36.bmp, -1, -1, -1
1550
+ A0235_10_37.bmp, -1, -1, -1
1551
+ A0235_10_38.bmp, -1, -1, -1
1552
+ A0235_10_39.bmp, -1, -1, -1
1553
+ A0235_10_40.bmp, -1, -1, -1
1554
+ A0235_10_41.bmp, -1, -1, -1
1555
+ A0235_10_42.bmp, -1, -1, -1
1556
+ A0235_10_43.bmp, -1, -1, -1
1557
+ A0235_10_44.bmp, -1, -1, -1
1558
+ A0235_10_46.bmp, -1, -1, -1
1559
+ A0235_10_47.bmp, -1, -1, -1
1560
+ A0235_10_48.bmp, -1, -1, -1
1561
+ A0235_10_49.bmp, -1, -1, -1
1562
+ A0235_10_50.bmp, -1, -1, -1
1563
+ A0235_10_51.bmp, -1, -1, -1
1564
+ A0235_10_52.bmp, -1, -1, -1
1565
+ A0235_10_53.bmp, -1, -1, -1
1566
+ A0235_10_54.bmp, -1, -1, -1
1567
+ A0235_10_55.bmp, -1, -1, -1
1568
+ A0235_10_56.bmp, -1, -1, -1
1569
+ A0235_10_58.bmp, -1, -1, -1
1570
+ A0235_10_61.bmp, -1, -1, -1
1571
+ A0235_10_62.bmp, -1, -1, -1
1572
+ A0235_10_63.bmp, -1, -1, -1
1573
+ A0235_10_64.bmp, -1, -1, -1
1574
+ A0235_10_65.bmp, -1, -1, -1
1575
+ A0235_10_66.bmp, -1, -1, -1
1576
+ A0235_10_67.bmp, -1, -1, -1
1577
+ A0235_10_68.bmp, -1, -1, -1
1578
+ A0235_10_69.bmp, -1, -1, -1
1579
+ A0235_10_70.bmp, -1, -1, -1
1580
+ A0235_10_71.bmp, -1, -1, -1
1581
+ A0235_10_72.bmp, -1, -1, -1
1582
+ A0235_10_73.bmp, -1, -1, -1
1583
+ A0235_10_74.bmp, -1, -1, -1
1584
+ A0235_10_75.bmp, -1, -1, -1
1585
+ A0249_10_00.bmp, -1, -1, -1
1586
+ A0249_10_01.bmp, -1, -1, -1
1587
+ A0249_10_02.bmp, -1, -1, -1
1588
+ A0249_10_03.bmp, -1, -1, -1
1589
+ A0249_10_04.bmp, -1, -1, -1
1590
+ A0249_10_05.bmp, -1, -1, -1
1591
+ A0249_10_06.bmp, -1, -1, -1
1592
+ A0249_10_07.bmp, -1, -1, -1
1593
+ A0249_10_08.bmp, -1, -1, -1
1594
+ A0249_10_10.bmp, -1, -1, -1
1595
+ A0249_10_11.bmp, -1, -1, -1
1596
+ A0249_10_12.bmp, -1, -1, -1
1597
+ A0249_10_14.bmp, -1, -1, -1
1598
+ A0249_10_15.bmp, -1, -1, -1
1599
+ A0249_10_16.bmp, -1, -1, -1
1600
+ A0249_10_17.bmp, -1, -1, -1
1601
+ A0249_10_18.bmp, -1, -1, -1
1602
+ A0249_10_19.bmp, -1, -1, -1
1603
+ A0249_10_20.bmp, -1, -1, -1
1604
+ A0249_10_21.bmp, -1, -1, -1
1605
+ A0249_10_24.bmp, -1, -1, -1
1606
+ A0249_10_26.bmp, -1, -1, -1
1607
+ A0249_10_27.bmp, -1, -1, -1
1608
+ A0249_10_28.bmp, -1, -1, -1
1609
+ A0249_10_29.bmp, -1, -1, -1
1610
+ A0249_10_30.bmp, -1, -1, -1
1611
+ A0249_10_31.bmp, -1, -1, -1
1612
+ A0249_10_32.bmp, -1, -1, -1
1613
+ A0249_10_34.bmp, -1, -1, -1
1614
+ A0249_10_35.bmp, -1, -1, -1
1615
+ A0249_10_36.bmp, -1, -1, -1
1616
+ A0249_10_37.bmp, -1, -1, -1
1617
+ A0249_10_38.bmp, -1, -1, -1
1618
+ A0249_10_39.bmp, -1, -1, -1
1619
+ A0249_10_40.bmp, -1, -1, -1
1620
+ A0249_10_41.bmp, -1, -1, -1
1621
+ A0249_10_42.bmp, -1, -1, -1
1622
+ A0249_10_43.bmp, -1, -1, -1
1623
+ A0249_10_44.bmp, -1, -1, -1
1624
+ A0249_10_46.bmp, -1, -1, -1
1625
+ A0249_10_47.bmp, -1, -1, -1
1626
+ A0249_10_48.bmp, -1, -1, -1
1627
+ A0249_10_49.bmp, -1, -1, -1
1628
+ A0249_10_50.bmp, -1, -1, -1
1629
+ A0249_10_51.bmp, -1, -1, -1
1630
+ A0249_10_52.bmp, -1, -1, -1
1631
+ A0249_10_53.bmp, -1, -1, -1
1632
+ A0249_10_54.bmp, -1, -1, -1
1633
+ A0249_10_55.bmp, -1, -1, -1
1634
+ A0249_10_56.bmp, -1, -1, -1
1635
+ A0249_10_58.bmp, -1, -1, -1
1636
+ A0249_10_61.bmp, -1, -1, -1
1637
+ A0249_10_62.bmp, -1, -1, -1
1638
+ A0249_10_63.bmp, -1, -1, -1
1639
+ A0249_10_64.bmp, -1, -1, -1
1640
+ A0249_10_65.bmp, -1, -1, -1
1641
+ A0249_10_66.bmp, -1, -1, -1
1642
+ A0249_10_67.bmp, -1, -1, -1
1643
+ A0249_10_68.bmp, -1, -1, -1
1644
+ A0249_10_69.bmp, -1, -1, -1
1645
+ A0249_10_70.bmp, -1, -1, -1
1646
+ A0249_10_71.bmp, -1, -1, -1
1647
+ A0249_10_72.bmp, -1, -1, -1
1648
+ A0249_10_73.bmp, -1, -1, -1
1649
+ A0249_10_74.bmp, -1, -1, -1
1650
+ A0249_10_75.bmp, -1, -1, -1
examplar_data_labels/YouTubeUGC/labels.txt ADDED
@@ -0,0 +1,1147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Sports_2160P-4e9f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.405
2
+ Animation_480P-7a31_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.487
3
+ CoverSong_480P-59f2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.418
4
+ Sports_360P-32d3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.012
5
+ LiveMusic_480P-38f3_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.973
6
+ Animation_480P-4b86_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.495
7
+ Sports_2160P-2568_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.401
8
+ NewsClip_480P-6615_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.510
9
+ Gaming_480P-6a5a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.468
10
+ Vlog_2160P-62b2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.390
11
+ NewsClip_360P-12fc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.902
12
+ Gaming_360P-0b98_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.140
13
+ Lecture_720P-4c87_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.743
14
+ Gaming_480P-61ee_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.631
15
+ HowTo_360P-041c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.214
16
+ CoverSong_1080P-0a86_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.228
17
+ Gaming_480P-0dbc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.973
18
+ Sports_360P-1d5c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.781
19
+ Vlog_1080P-1609_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.528
20
+ Gaming_1080P-51fc_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.377
21
+ HDR_2160P-664d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.753
22
+ Animation_480P-35ee_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.290
23
+ HowTo_720P-2cc6_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.814
24
+ Animation_480P-073c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.407
25
+ HDR_1080P-33e3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.450
26
+ Gaming_1080P-0ce6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.968
27
+ Sports_2160P-3d85_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.517
28
+ TelevisionClip_360P-74dd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.580
29
+ Sports_2160P-1ddc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.466
30
+ Sports_720P-5e39_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.685
31
+ Animation_1080P-3e01_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.045
32
+ Gaming_720P-0fdb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.263
33
+ VerticalVideo_720P-665d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.023
34
+ Animation_360P-7856_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.751
35
+ Animation_1080P-209f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.189
36
+ Sports_360P-11b7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.401
37
+ Vlog_480P-5dfe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.800
38
+ NewsClip_480P-5a3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.237
39
+ LiveMusic_480P-4f88_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.267
40
+ NewsClip_1080P-7816_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.855
41
+ HowTo_360P-2074_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.536
42
+ Lecture_480P-4bc3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.930
43
+ Lecture_360P-3590_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.703
44
+ LyricVideo_720P-59ed_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.315
45
+ Animation_1080P-05f8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.901
46
+ Gaming_480P-14fc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.822
47
+ NewsClip_720P-6016_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.522
48
+ HDR_2160P-4581_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.979
49
+ MusicVideo_720P-7bcf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.536
50
+ VerticalVideo_1080P-6e19_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.840
51
+ Vlog_360P-22a0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.286
52
+ HowTo_480P-15c1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.264
53
+ MusicVideo_480P-3c8b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.908
54
+ Sports_2160P-49f1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.287
55
+ LiveMusic_480P-0638_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.909
56
+ LiveMusic_480P-58fb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.502
57
+ HDR_2160P-15e2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.191
58
+ Gaming_1080P-698a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.743
59
+ CoverSong_1080P-0188_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.383
60
+ Gaming_2160P-3a25_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.502
61
+ TelevisionClip_1080P-3b9b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.251
62
+ HDR_1080P-3a4a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.824
63
+ Gaming_1080P-12d4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.494
64
+ Gaming_480P-75f7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.798
65
+ Gaming_2160P-6cd8_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.310
66
+ Animation_720P-412a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.249
67
+ CoverSong_720P-0239_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.520
68
+ NewsClip_480P-606e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.921
69
+ Sports_2160P-300d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.670
70
+ VerticalVideo_360P-42f1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.603
71
+ MusicVideo_480P-12fb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.059
72
+ Sports_2160P-69b9_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.564
73
+ VerticalVideo_480P-550d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.832
74
+ Animation_360P-4edc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.682
75
+ TelevisionClip_1080P-3e42_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.989
76
+ CoverSong_360P-11f9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.790
77
+ TelevisionClip_720P-1b61_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.299
78
+ NewsClip_360P-311a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.447
79
+ CoverSong_720P-6b8c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.988
80
+ Vlog_2160P-6629_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.270
81
+ LyricVideo_720P-068d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.825
82
+ HowTo_480P-4b6a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.763
83
+ Animation_1080P-646f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.376
84
+ Sports_360P-3960_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.787
85
+ CoverSong_360P-59d1_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.828
86
+ VerticalVideo_1080P-2f00_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.360
87
+ Vlog_360P-4697_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.551
88
+ Vlog_480P-206f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.779
89
+ Sports_2160P-1733_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.373
90
+ MusicVideo_1080P-0860_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.963
91
+ Gaming_360P-3794_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.475
92
+ VerticalVideo_720P-1ada_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.532
93
+ CoverSong_1080P-3499_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.193
94
+ CoverSong_480P-0170_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.118
95
+ VerticalVideo_480P-790a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.248
96
+ Gaming_720P-1cbb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.010
97
+ Animation_480P-1da7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.183
98
+ Gaming_1080P-72c8_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.366
99
+ Lecture_480P-71c0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.249
100
+ VerticalVideo_360P-0d29_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.809
101
+ Gaming_1080P-6dc6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.602
102
+ LiveMusic_360P-6640_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.110
103
+ Vlog_720P-6d56_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.776
104
+ HowTo_720P-4d9e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.688
105
+ LiveMusic_360P-5b57_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.773
106
+ NewsClip_720P-4e87_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.368
107
+ NewsClip_480P-7232_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.991
108
+ HDR_1080P-206d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.791
109
+ LiveMusic_360P-48d5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.325
110
+ Animation_1080P-58d3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.243
111
+ LiveMusic_480P-6542_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.249
112
+ HDR_1080P-49d6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.589
113
+ Gaming_480P-0a03_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.676
114
+ Lecture_720P-2f8c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.839
115
+ NewsClip_480P-4e77_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.450
116
+ Gaming_360P-48b0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.427
117
+ TelevisionClip_720P-31ce_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.324
118
+ Animation_720P-2525_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.730
119
+ Lecture_480P-181a_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.785
120
+ CoverSong_1080P-1963_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.882
121
+ HDR_1080P-7825_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.514
122
+ NewsClip_1080P-5b53_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.450
123
+ Gaming_2160P-31f6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.311
124
+ VerticalVideo_480P-572a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.499
125
+ LiveMusic_720P-6343_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.304
126
+ CoverSong_360P-2b4d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.796
127
+ Gaming_1080P-5ae7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.328
128
+ LiveMusic_1080P-3549_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.906
129
+ VerticalVideo_360P-3936_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.794
130
+ HowTo_480P-7c11_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.267
131
+ Lecture_480P-02f2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.261
132
+ Gaming_2160P-2436_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.473
133
+ Vlog_720P-561e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.438
134
+ Animation_1080P-2fbe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.346
135
+ Vlog_720P-372d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.047
136
+ HowTo_1080P-7cf2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.717
137
+ Vlog_480P-59dc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.206
138
+ HowTo_1080P-7399_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.194
139
+ MusicVideo_1080P-24f5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.283
140
+ Vlog_480P-7754_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.944
141
+ MusicVideo_480P-0448_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.453
142
+ TelevisionClip_1080P-3d10_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.052
143
+ LiveMusic_360P-2508_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.004
144
+ Sports_720P-058f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.280
145
+ Gaming_1080P-4865_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.355
146
+ LyricVideo_720P-739a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.877
147
+ Gaming_480P-7ccb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.581
148
+ Vlog_2160P-19f9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.919
149
+ HowTo_720P-21c6_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.838
150
+ Vlog_480P-6395_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.755
151
+ VerticalVideo_360P-6490_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.973
152
+ Animation_720P-7e7d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.589
153
+ LyricVideo_480P-4346_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.193
154
+ MusicVideo_720P-62df_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.246
155
+ Sports_2160P-5ef3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.209
156
+ TelevisionClip_1080P-525d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.273
157
+ TelevisionClip_480P-4c6b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.850
158
+ CoverSong_480P-60a6_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.369
159
+ CoverSong_720P-6d6a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.520
160
+ Lecture_720P-6016_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.043
161
+ Sports_480P-1056_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.149
162
+ Vlog_360P-433e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.198
163
+ HDR_1080P-35f4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.160
164
+ Gaming_720P-469a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.124
165
+ MusicVideo_360P-5f8d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.437
166
+ Sports_2160P-2eda_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.307
167
+ LyricVideo_480P-1484_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.145
168
+ NewsClip_360P-66ae_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.218
169
+ LyricVideo_1080P-0625_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.300
170
+ Lecture_360P-30eb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.350
171
+ Gaming_2160P-387f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.513
172
+ Vlog_1080P-7062_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.944
173
+ Vlog_2160P-2953_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.467
174
+ Lecture_720P-2f38_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.552
175
+ Sports_360P-3e68_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.508
176
+ NewsClip_480P-543f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.423
177
+ LiveMusic_720P-0d9f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.319
178
+ LyricVideo_720P-2d24_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.199
179
+ CoverSong_1080P-1b08_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.812
180
+ Gaming_1080P-6530_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.920
181
+ LiveMusic_480P-3d03_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.729
182
+ Sports_720P-38a1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.165
183
+ Lecture_1080P-0201_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.826
184
+ Vlog_2160P-7b5c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.610
185
+ Sports_480P-5f38_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.771
186
+ Animation_1080P-2fff_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.369
187
+ Sports_1080P-76a2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.054
188
+ HDR_1080P-1be2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.756
189
+ VerticalVideo_1080P-3d96_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.452
190
+ Sports_2160P-391a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.281
191
+ VerticalVideo_720P-0f61_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.522
192
+ Vlog_1080P-18da_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.442
193
+ Vlog_2160P-030a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.448
194
+ Gaming_1080P-35fa_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.035
195
+ VerticalVideo_1080P-1105_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.828
196
+ CoverSong_360P-5a24_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.789
197
+ LyricVideo_480P-0a5b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.247
198
+ MusicVideo_360P-7b94_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.440
199
+ Sports_480P-3195_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.537
200
+ Sports_2160P-3794_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.158
201
+ Lecture_360P-6d29_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.990
202
+ MusicVideo_1080P-2d1c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.616
203
+ LiveMusic_360P-3be0_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.720
204
+ MusicVideo_360P-5699_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.776
205
+ CoverSong_1080P-5430_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.969
206
+ Gaming_1080P-190d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.898
207
+ MusicVideo_360P-24b0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.512
208
+ Vlog_720P-135c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.872
209
+ Sports_1080P-08e1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.224
210
+ NewsClip_480P-2642_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.688
211
+ NewsClip_1080P-22b3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.812
212
+ Gaming_720P-324d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.145
213
+ Lecture_1080P-1969_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.649
214
+ Sports_360P-1803_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.593
215
+ HDR_1080P-548b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.626
216
+ Sports_360P-6c6f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.556
217
+ Sports_480P-3404_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.533
218
+ NewsClip_720P-5d89_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.480
219
+ NewsClip_360P-1eae_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.467
220
+ Vlog_720P-5d08_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.783
221
+ HowTo_480P-56a5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.085
222
+ Gaming_480P-7a08_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.792
223
+ Animation_720P-13b7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.318
224
+ VerticalVideo_1080P-1c86_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.848
225
+ LiveMusic_720P-653e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.112
226
+ TelevisionClip_1080P-0604_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.765
227
+ Sports_2160P-7a11_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.373
228
+ TelevisionClip_1080P-6282_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.199
229
+ CoverSong_720P-4006_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.414
230
+ CoverSong_1080P-0f3a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.358
231
+ LiveMusic_1080P-6fe2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.281
232
+ Animation_1080P-6a33_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.052
233
+ Vlog_720P-32b2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.365
234
+ NewsClip_360P-1b1c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.514
235
+ VerticalVideo_360P-2fa3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.331
236
+ MusicVideo_1080P-453f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.372
237
+ Gaming_1080P-2927_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.441
238
+ Animation_720P-7b29_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.862
239
+ Lecture_360P-27db_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.092
240
+ NewsClip_720P-6106_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.996
241
+ Animation_2160P-6f3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.082
242
+ Vlog_480P-2cad_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.244
243
+ Sports_2160P-279f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.526
244
+ TelevisionClip_480P-59f0_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.896
245
+ CoverSong_480P-64d0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.917
246
+ HowTo_480P-470b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.858
247
+ Lecture_480P-41b7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.328
248
+ VerticalVideo_480P-2aa1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.402
249
+ TelevisionClip_480P-19d3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.442
250
+ HDR_1080P-6eb7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.046
251
+ Sports_720P-3eb4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.802
252
+ HDR_2160P-6eeb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.402
253
+ Vlog_2160P-6f92_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.886
254
+ Animation_1080P-0c4f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.940
255
+ Animation_720P-79ee_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.197
256
+ TelevisionClip_720P-5e93_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.292
257
+ Lecture_720P-1033_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.541
258
+ Gaming_480P-4560_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.904
259
+ Lecture_720P-07e0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.082
260
+ HowTo_720P-06eb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.130
261
+ CoverSong_1080P-5e69_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.290
262
+ Lecture_480P-74ea_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.258
263
+ Gaming_720P-5973_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.067
264
+ TelevisionClip_480P-436c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.823
265
+ Sports_480P-0827_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.914
266
+ Gaming_1080P-13e3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.434
267
+ LiveMusic_720P-3320_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.450
268
+ HDR_2160P-4dcd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.763
269
+ LiveMusic_1080P-28fe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.693
270
+ Vlog_1080P-52fe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.838
271
+ HowTo_720P-37d0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.429
272
+ LiveMusic_1080P-6bbe_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.329
273
+ Animation_1080P-18f5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.158
274
+ CoverSong_720P-7539_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.348
275
+ Lecture_720P-11e7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.569
276
+ LiveMusic_480P-4c3a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.907
277
+ Gaming_1080P-223e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.283
278
+ NewsClip_360P-22ce_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.698
279
+ Lecture_480P-2ed0_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.453
280
+ Animation_720P-4268_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.846
281
+ MusicVideo_480P-3aa2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.767
282
+ NewsClip_480P-4072_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.481
283
+ Vlog_720P-0d79_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.805
284
+ Sports_1080P-1bf7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.490
285
+ Vlog_1080P-62fc_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.255
286
+ CoverSong_720P-6626_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.316
287
+ HowTo_360P-127f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.332
288
+ Animation_480P-6ef6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.607
289
+ Sports_1080P-0063_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.447
290
+ Gaming_720P-7afd_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.486
291
+ NewsClip_360P-4288_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.393
292
+ HowTo_1080P-36a9_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.427
293
+ Lecture_720P-2b0f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.852
294
+ Gaming_480P-6f4b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.670
295
+ Sports_720P-00a1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.061
296
+ MusicVideo_720P-2d7d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.988
297
+ Gaming_360P-215f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.869
298
+ Gaming_360P-73c7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.247
299
+ Lecture_360P-2276_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.742
300
+ CoverSong_360P-4add_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.855
301
+ MusicVideo_480P-5830_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.045
302
+ Gaming_720P-4cda_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.620
303
+ Sports_2160P-0455_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.287
304
+ TelevisionClip_1080P-68c6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.561
305
+ LiveMusic_480P-2a5e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.089
306
+ Lecture_720P-003a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.812
307
+ MusicVideo_480P-1eee_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.061
308
+ Gaming_720P-2dbe_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.327
309
+ Vlog_1080P-3744_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.948
310
+ Gaming_2160P-34a1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.414
311
+ MusicVideo_720P-4895_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.173
312
+ NewsClip_360P-0ff8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.518
313
+ Gaming_1080P-6d53_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.821
314
+ MusicVideo_360P-4de7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.968
315
+ Animation_360P-4b4c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.003
316
+ Lecture_720P-1f22_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.233
317
+ HowTo_480P-04f1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.454
318
+ Lecture_480P-6191_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.502
319
+ TelevisionClip_360P-7b23_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.997
320
+ Vlog_480P-0ddc_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.075
321
+ Vlog_2160P-7f05_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.111
322
+ TelevisionClip_480P-2ead_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.371
323
+ NewsClip_720P-6cde_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.297
324
+ Gaming_1080P-777b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.050
325
+ Sports_720P-69a0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.878
326
+ Gaming_1080P-3bf7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.245
327
+ Vlog_1080P-35cd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.196
328
+ NewsClip_720P-6a19_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.547
329
+ Vlog_2160P-56e4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.289
330
+ Gaming_2160P-7e80_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.108
331
+ Sports_480P-11ec_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.612
332
+ TelevisionClip_720P-19de_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.346
333
+ NewsClip_1080P-632e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.517
334
+ CoverSong_360P-69ef_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.145
335
+ Lecture_720P-53a1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.586
336
+ Lecture_480P-11df_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.514
337
+ MusicVideo_1080P-6c88_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.318
338
+ LyricVideo_480P-3b2c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.472
339
+ TelevisionClip_720P-7c06_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.396
340
+ Vlog_2160P-09c9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.876
341
+ NewsClip_480P-36d0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.764
342
+ LyricVideo_1080P-5461_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.031
343
+ LiveMusic_1080P-14af_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.450
344
+ HowTo_1080P-52bb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.272
345
+ MusicVideo_480P-6026_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.033
346
+ TelevisionClip_1080P-39e3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.607
347
+ Gaming_480P-7e76_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.397
348
+ Gaming_360P-3eb6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.184
349
+ Lecture_480P-71d6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.275
350
+ Sports_480P-41a5_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.970
351
+ LyricVideo_1080P-1d75_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.299
352
+ HDR_2160P-6ed1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.724
353
+ Vlog_1080P-2394_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.641
354
+ HowTo_480P-7579_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.079
355
+ VerticalVideo_480P-1bb9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.825
356
+ Gaming_1080P-29b1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.829
357
+ NewsClip_360P-0376_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.095
358
+ HowTo_720P-111e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.938
359
+ LyricVideo_480P-3ccf_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.639
360
+ Gaming_1080P-45fc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.695
361
+ Gaming_2160P-4139_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.923
362
+ Lecture_360P-6656_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.458
363
+ LyricVideo_720P-4253_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.066
364
+ Lecture_720P-3b7f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.106
365
+ LyricVideo_720P-5d3a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.939
366
+ Vlog_480P-535d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.846
367
+ HowTo_360P-7fb1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.322
368
+ HowTo_720P-12f7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.300
369
+ Sports_720P-2c06_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.978
370
+ HowTo_1080P-13aa_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.336
371
+ Sports_720P-7ba7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.566
372
+ Vlog_1080P-1e70_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.442
373
+ Vlog_2160P-342a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.321
374
+ Animation_360P-631c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.236
375
+ NewsClip_480P-15fa_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.581
376
+ HowTo_720P-0c47_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.364
377
+ Sports_720P-3338_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.483
378
+ Sports_360P-5ded_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.784
379
+ NewsClip_1080P-67dc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.977
380
+ Gaming_480P-6548_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.871
381
+ LyricVideo_480P-200a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.063
382
+ LyricVideo_720P-1c89_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.930
383
+ NewsClip_1080P-3c7c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.462
384
+ CoverSong_1080P-3409_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.331
385
+ Gaming_1080P-6e45_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.517
386
+ LiveMusic_720P-65f6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.019
387
+ HowTo_720P-0b01_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.111
388
+ Gaming_360P-2164_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.245
389
+ Vlog_2160P-3019_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.578
390
+ MusicVideo_360P-17e4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.233
391
+ Vlog_2160P-4419_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.324
392
+ TelevisionClip_1080P-5e68_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.604
393
+ HDR_1080P-2d32_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.503
394
+ Lecture_480P-369f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.193
395
+ Animation_480P-66c0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.453
396
+ CoverSong_480P-3e4b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.877
397
+ Sports_1080P-0d0c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.980
398
+ Lecture_480P-7205_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.417
399
+ LyricVideo_720P-6fc9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.671
400
+ HowTo_480P-2610_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.981
401
+ NewsClip_720P-1971_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.272
402
+ Lecture_360P-7a57_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.493
403
+ Sports_360P-02c3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.396
404
+ Gaming_720P-0fba_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.089
405
+ NewsClip_360P-1e1c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.254
406
+ Sports_360P-6f62_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.594
407
+ VerticalVideo_480P-3a6a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.337
408
+ Sports_480P-5224_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.866
409
+ Gaming_2160P-3ce4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.529
410
+ CoverSong_1080P-5cae_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.209
411
+ Gaming_480P-6cdc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.425
412
+ Lecture_360P-7f7e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.679
413
+ HowTo_1080P-0267_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.234
414
+ Gaming_360P-043e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.582
415
+ Sports_720P-17fb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.591
416
+ VerticalVideo_720P-7859_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.603
417
+ LyricVideo_480P-2c50_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.985
418
+ Vlog_2160P-522f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.324
419
+ Lecture_360P-03bc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.762
420
+ Gaming_480P-0d23_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.466
421
+ Gaming_1080P-0ef8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.781
422
+ Gaming_480P-09ab_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.795
423
+ Sports_1080P-28a6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.452
424
+ Vlog_1080P-1a53_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.474
425
+ HowTo_480P-0d00_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.998
426
+ Gaming_1080P-6578_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.399
427
+ MusicVideo_1080P-55af_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.748
428
+ Sports_720P-2191_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.789
429
+ VerticalVideo_2160P-6d62_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.199
430
+ MusicVideo_1080P-65c6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.066
431
+ Vlog_720P-11c5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.816
432
+ Animation_2160P-41dc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.512
433
+ Sports_720P-5ea4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.238
434
+ MusicVideo_720P-4ad2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.756
435
+ CoverSong_1080P-5456_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.736
436
+ CoverSong_480P-53f4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.169
437
+ Sports_1080P-7584_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.755
438
+ Vlog_360P-4795_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.479
439
+ HowTo_360P-7dcd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.035
440
+ Vlog_1080P-5904_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.365
441
+ MusicVideo_480P-5461_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.272
442
+ Gaming_720P-221d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.252
443
+ Vlog_1080P-45c9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.888
444
+ MusicVideo_360P-5f07_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.921
445
+ VerticalVideo_480P-2fa4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.133
446
+ Sports_1080P-43e2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.414
447
+ HowTo_480P-32c9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.510
448
+ Sports_720P-0b9e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.942
449
+ Animation_720P-0116_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.054
450
+ Gaming_720P-25aa_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.952
451
+ Vlog_2160P-700c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.881
452
+ Animation_360P-3e52_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.651
453
+ LyricVideo_1080P-2946_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.177
454
+ CoverSong_1080P-3df8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.827
455
+ Gaming_2160P-348d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.443
456
+ VerticalVideo_1080P-766b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.305
457
+ Animation_480P-4e36_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.303
458
+ VerticalVideo_480P-51b7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.299
459
+ TelevisionClip_480P-3284_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.169
460
+ HDR_2160P-6fab_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.443
461
+ LyricVideo_1080P-0075_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.814
462
+ Sports_720P-4c6f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.105
463
+ Lecture_360P-2d1f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.620
464
+ LiveMusic_1080P-2f7f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.326
465
+ LyricVideo_480P-6fca_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.572
466
+ Vlog_2160P-4655_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.887
467
+ Lecture_720P-6951_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.163
468
+ TelevisionClip_480P-373d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.044
469
+ NewsClip_720P-72d2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.357
470
+ LiveMusic_360P-3b96_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.358
471
+ Sports_360P-2ace_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.444
472
+ CoverSong_480P-2142_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.250
473
+ Lecture_480P-42c3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.451
474
+ Lecture_720P-0f64_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.580
475
+ HowTo_480P-4948_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.551
476
+ CoverSong_720P-449f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.496
477
+ LyricVideo_720P-0940_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.045
478
+ Vlog_1080P-34cb_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.428
479
+ Vlog_1080P-7e8c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.042
480
+ LyricVideo_360P-5e87_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.999
481
+ Gaming_480P-5a5a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.478
482
+ CoverSong_480P-1109_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.826
483
+ Animation_480P-3fdf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.465
484
+ MusicVideo_720P-14f0_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.382
485
+ Gaming_1080P-173a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.013
486
+ Animation_720P-06a6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.942
487
+ Gaming_360P-2330_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.607
488
+ Sports_720P-531c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.555
489
+ NewsClip_480P-437c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.011
490
+ Sports_360P-27b0_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.820
491
+ VerticalVideo_360P-7ba8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.563
492
+ Animation_720P-2abf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.615
493
+ Lecture_480P-4272_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.457
494
+ Lecture_480P-3bdc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.168
495
+ HDR_1080P-0d31_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.373
496
+ CoverSong_720P-10f1_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.805
497
+ TelevisionClip_1080P-5278_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.099
498
+ CoverSong_1080P-237f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.893
499
+ TelevisionClip_480P-27ca_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.475
500
+ Gaming_1080P-2e97_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.391
501
+ NewsClip_720P-739b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.007
502
+ VerticalVideo_720P-7c1d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.502
503
+ HowTo_720P-7878_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.674
504
+ Vlog_480P-6c60_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.487
505
+ Animation_720P-3adc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.575
506
+ Lecture_480P-1ec3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.186
507
+ Sports_480P-7f7e_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.953
508
+ TelevisionClip_480P-3617_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.312
509
+ MusicVideo_480P-66b2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.709
510
+ NewsClip_480P-31bd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.610
511
+ LiveMusic_360P-2b0f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.401
512
+ LiveMusic_480P-2019_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.631
513
+ Animation_480P-046c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.150
514
+ Vlog_720P-329f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.700
515
+ Gaming_1080P-564e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.322
516
+ Animation_360P-69e0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.382
517
+ MusicVideo_720P-0752_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.365
518
+ LiveMusic_1080P-7948_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.838
519
+ CoverSong_360P-6d6a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.273
520
+ HowTo_1080P-6cbf_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.915
521
+ Vlog_1080P-4f26_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.962
522
+ Sports_360P-0c66_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.097
523
+ Lecture_360P-506c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.168
524
+ MusicVideo_1080P-04b6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.288
525
+ HDR_1080P-3521_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.895
526
+ Sports_360P-6b4f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.981
527
+ Animation_720P-620f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.522
528
+ Sports_2160P-6b28_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.391
529
+ MusicVideo_480P-6fb6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.611
530
+ Lecture_720P-10bc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.116
531
+ Sports_1080P-3a3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.669
532
+ NewsClip_1080P-4241_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.936
533
+ LiveMusic_360P-7483_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.063
534
+ HDR_2160P-70ca_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.905
535
+ Animation_720P-41d6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.148
536
+ CoverSong_360P-0aa2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.511
537
+ TelevisionClip_480P-3c64_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.456
538
+ Sports_2160P-2a83_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.002
539
+ Vlog_2160P-255c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.443
540
+ CoverSong_480P-5b62_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.056
541
+ Gaming_720P-5ba2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.281
542
+ VerticalVideo_360P-7ec3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.909
543
+ Animation_1080P-4be3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.565
544
+ LiveMusic_480P-61ef_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.153
545
+ NewsClip_360P-7a96_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.291
546
+ Sports_480P-4cf8_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.023
547
+ Gaming_480P-0109_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.436
548
+ Animation_360P-794f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.362
549
+ Vlog_2160P-7bfb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.206
550
+ CoverSong_480P-2d2c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.837
551
+ VerticalVideo_720P-6580_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.979
552
+ NewsClip_480P-696e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.102
553
+ VerticalVideo_720P-7517_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.271
554
+ Gaming_720P-312f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.826
555
+ TelevisionClip_480P-723e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.882
556
+ CoverSong_480P-3e92_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.235
557
+ LyricVideo_1080P-16b6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.381
558
+ VerticalVideo_360P-1424_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.732
559
+ HowTo_720P-0a67_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.315
560
+ Gaming_2160P-2cb1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.481
561
+ NewsClip_720P-672c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.987
562
+ NewsClip_720P-04ba_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.490
563
+ CoverSong_360P-5d20_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.943
564
+ Vlog_2160P-3a75_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.212
565
+ Vlog_360P-6279_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.260
566
+ Vlog_360P-1e5b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.266
567
+ VerticalVideo_1080P-3a9b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.432
568
+ Sports_480P-44e5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.170
569
+ HDR_2160P-06ae_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.474
570
+ Sports_480P-35ea_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.668
571
+ MusicVideo_720P-3c4c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.135
572
+ Vlog_1080P-19bb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.076
573
+ NewsClip_360P-439a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.282
574
+ Vlog_720P-03d5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.605
575
+ Gaming_2160P-5c53_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.309
576
+ TelevisionClip_1080P-401e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.227
577
+ CoverSong_480P-35e1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.608
578
+ Sports_2160P-7af7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.950
579
+ Sports_360P-5252_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.538
580
+ Sports_480P-3ca4_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.389
581
+ Vlog_2160P-13a1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.859
582
+ Gaming_2160P-4258_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.067
583
+ Animation_1080P-0cdf_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.245
584
+ Lecture_360P-4f00_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.172
585
+ Animation_720P-0acc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.876
586
+ CoverSong_360P-67ec_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.404
587
+ VerticalVideo_360P-634f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.523
588
+ NewsClip_360P-5f3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.298
589
+ MusicVideo_360P-462f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.510
590
+ Sports_360P-50fd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.011
591
+ CoverSong_1080P-1b0c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.763
592
+ Sports_720P-3ffe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.011
593
+ Sports_1080P-4e05_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.445
594
+ Vlog_2160P-1ff6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.290
595
+ Gaming_1080P-6db2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.475
596
+ Gaming_480P-3a44_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.043
597
+ NewsClip_480P-0ce5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.265
598
+ Sports_360P-4802_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.582
599
+ Sports_720P-2632_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.884
600
+ NewsClip_480P-2b9b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.421
601
+ HDR_1080P-687e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.629
602
+ Animation_480P-791b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.286
603
+ HowTo_360P-09f8_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.951
604
+ MusicVideo_720P-575d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.242
605
+ Vlog_480P-1b39_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.325
606
+ HDR_1080P-601b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.318
607
+ Sports_2160P-210c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.462
608
+ HowTo_1080P-63ec_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.229
609
+ HowTo_360P-4276_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.448
610
+ Vlog_480P-4beb_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.512
611
+ NewsClip_720P-579b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.866
612
+ LyricVideo_1080P-1b04_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.803
613
+ HowTo_720P-6791_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.263
614
+ LiveMusic_360P-54d0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.559
615
+ Vlog_480P-279d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.373
616
+ VerticalVideo_1080P-2195_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.891
617
+ NewsClip_1080P-00c3_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.421
618
+ HowTo_1080P-03fd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.517
619
+ NewsClip_360P-2986_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.902
620
+ NewsClip_1080P-06df_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.489
621
+ LyricVideo_1080P-4b40_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.636
622
+ CoverSong_1080P-2207_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.675
623
+ MusicVideo_1080P-16e6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.349
624
+ Sports_2160P-7165_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.351
625
+ CoverSong_720P-6d81_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.145
626
+ MusicVideo_720P-5c9c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.471
627
+ Gaming_480P-1542_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.776
628
+ Vlog_360P-7efe_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.912
629
+ LyricVideo_720P-09ed_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.026
630
+ HowTo_360P-6a0e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.104
631
+ HDR_2160P-3663_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.340
632
+ LiveMusic_720P-0cd5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.042
633
+ VerticalVideo_720P-4ca7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.442
634
+ VerticalVideo_360P-145e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.606
635
+ Lecture_360P-5aea_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.088
636
+ LiveMusic_360P-6a65_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.427
637
+ HowTo_360P-2a3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.589
638
+ Sports_480P-2dfe_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.401
639
+ LiveMusic_1080P-21dd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.819
640
+ Sports_1080P-7dba_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.718
641
+ Vlog_720P-343d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.667
642
+ Sports_480P-0af9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.765
643
+ Sports_2160P-4201_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.012
644
+ Sports_480P-0623_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.106
645
+ HowTo_720P-7782_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.761
646
+ VerticalVideo_720P-4730_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.107
647
+ NewsClip_360P-5752_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.346
648
+ Sports_2160P-086d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.376
649
+ LyricVideo_480P-0f72_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.064
650
+ Vlog_1080P-64b6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.192
651
+ LiveMusic_1080P-3f95_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.050
652
+ TelevisionClip_480P-09d8_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.908
653
+ Sports_1080P-19d8_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.472
654
+ Gaming_720P-6625_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.813
655
+ MusicVideo_1080P-7706_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.925
656
+ LyricVideo_360P-3afc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.685
657
+ NewsClip_720P-7e56_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.410
658
+ HowTo_1080P-1f3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.185
659
+ NewsClip_480P-2ba7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.791
660
+ Vlog_480P-7d0c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.577
661
+ Lecture_480P-6b1e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.368
662
+ Sports_480P-6508_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.448
663
+ VerticalVideo_720P-0750_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.710
664
+ NewsClip_720P-7b97_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.803
665
+ LyricVideo_720P-47a9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.816
666
+ Lecture_720P-3f9f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.479
667
+ Sports_2160P-2e1d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.457
668
+ LiveMusic_360P-6266_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.383
669
+ VerticalVideo_360P-2bb0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.073
670
+ Animation_720P-01b3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.638
671
+ MusicVideo_480P-184c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.726
672
+ TelevisionClip_720P-3f4c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.515
673
+ Sports_2160P-1b70_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.508
674
+ Gaming_360P-63e6_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.636
675
+ Sports_2160P-4024_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.054
676
+ Sports_360P-2725_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.862
677
+ Vlog_1080P-1df9_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.134
678
+ Gaming_2160P-2b92_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.632
679
+ CoverSong_360P-14b9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.633
680
+ Gaming_360P-586d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.026
681
+ Gaming_720P-40b2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.855
682
+ Gaming_720P-6658_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.715
683
+ TelevisionClip_720P-44d1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.839
684
+ Gaming_720P-64b0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.396
685
+ MusicVideo_1080P-0706_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.084
686
+ Vlog_1080P-5f28_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.811
687
+ Sports_720P-5ae1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.333
688
+ VerticalVideo_1080P-7cf3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.955
689
+ Sports_1080P-2a21_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.231
690
+ Animation_360P-08c9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.652
691
+ NewsClip_720P-7c67_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.434
692
+ NewsClip_360P-67ce_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.196
693
+ Animation_1080P-21dd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.791
694
+ LyricVideo_1080P-63d9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.512
695
+ MusicVideo_1080P-5072_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.190
696
+ Sports_360P-65f3_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.826
697
+ NewsClip_720P-2882_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.667
698
+ CoverSong_480P-6c3e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.502
699
+ Lecture_480P-7d77_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.598
700
+ Lecture_360P-7550_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.033
701
+ Gaming_2160P-6bb6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.537
702
+ Vlog_720P-5364_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.794
703
+ Lecture_480P-2513_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.879
704
+ HowTo_720P-4d55_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.947
705
+ MusicVideo_480P-2de0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.470
706
+ Animation_480P-087e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.525
707
+ NewsClip_1080P-3427_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.382
708
+ Vlog_1080P-010b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.185
709
+ NewsClip_720P-0c81_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.893
710
+ Sports_360P-4ad7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.327
711
+ TelevisionClip_480P-1b49_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.183
712
+ LyricVideo_720P-36fb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.839
713
+ NewsClip_480P-4a9f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.983
714
+ NewsClip_480P-6bfa_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.793
715
+ MusicVideo_1080P-18f5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.829
716
+ Sports_1080P-6710_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.335
717
+ NewsClip_360P-5bcc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.864
718
+ Gaming_1080P-3d58_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.782
719
+ HowTo_720P-017a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.762
720
+ Gaming_480P-6491_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.013
721
+ HowTo_360P-18e7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.780
722
+ LyricVideo_480P-7919_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.508
723
+ NewsClip_480P-7a0d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.754
724
+ HowTo_480P-60b3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.576
725
+ MusicVideo_360P-648c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.272
726
+ LiveMusic_1080P-2930_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.776
727
+ TelevisionClip_360P-11d5_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.842
728
+ Lecture_720P-094d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.616
729
+ Gaming_1080P-2221_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.239
730
+ CoverSong_480P-3076_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.710
731
+ HDR_1080P-3181_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.777
732
+ CoverSong_1080P-5f92_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.043
733
+ TelevisionClip_360P-0951_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.511
734
+ Sports_2160P-4aae_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.323
735
+ Lecture_720P-5c7e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.846
736
+ Gaming_1080P-7a1e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.024
737
+ HDR_1080P-13eb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.761
738
+ LiveMusic_720P-2620_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.721
739
+ MusicVideo_1080P-3a50_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.690
740
+ Vlog_1080P-6686_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.926
741
+ Sports_480P-211f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.715
742
+ Animation_1080P-5083_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.252
743
+ Lecture_1080P-0c8a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.550
744
+ TelevisionClip_1080P-63e6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.228
745
+ Gaming_720P-493e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.567
746
+ NewsClip_360P-0c17_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.968
747
+ Gaming_480P-626a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.786
748
+ Lecture_720P-1e62_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.958
749
+ CoverSong_720P-60d3_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.991
750
+ Sports_2160P-349c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.302
751
+ Animation_480P-0d04_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.111
752
+ Vlog_720P-033a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.302
753
+ HowTo_720P-269e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.643
754
+ VerticalVideo_1080P-4925_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.496
755
+ VerticalVideo_1080P-7c5c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.780
756
+ VerticalVideo_1080P-4b92_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.119
757
+ Vlog_2160P-77d8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.667
758
+ Gaming_720P-6a45_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.967
759
+ LiveMusic_360P-5281_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.596
760
+ LiveMusic_720P-58db_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.382
761
+ Gaming_1080P-71a5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.482
762
+ MusicVideo_720P-44c1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.499
763
+ Lecture_720P-5725_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.262
764
+ Lecture_360P-114f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.172
765
+ MusicVideo_1080P-106d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.259
766
+ LiveMusic_1080P-77e8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.346
767
+ Vlog_480P-5e1c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.581
768
+ HowTo_720P-7c38_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.074
769
+ NewsClip_480P-0269_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.040
770
+ TelevisionClip_1080P-4c24_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.334
771
+ MusicVideo_1080P-7265_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.566
772
+ LiveMusic_360P-3979_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.838
773
+ Vlog_2160P-0577_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.270
774
+ LyricVideo_1080P-41ee_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.071
775
+ NewsClip_720P-4603_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.454
776
+ HDR_1080P-4f4a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.510
777
+ HowTo_360P-06be_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.601
778
+ HDR_2160P-5926_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.386
779
+ HowTo_480P-221e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.283
780
+ Animation_720P-7ff2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.124
781
+ Vlog_2160P-2b2d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.906
782
+ VerticalVideo_720P-2efc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.523
783
+ Sports_1080P-7203_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.910
784
+ Gaming_2160P-416f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.332
785
+ MusicVideo_720P-0355_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.907
786
+ LyricVideo_720P-74a0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.415
787
+ VerticalVideo_480P-34b9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.527
788
+ Sports_720P-2234_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.160
789
+ HowTo_480P-0eb3_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.507
790
+ Vlog_480P-476b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.431
791
+ Gaming_2160P-3aec_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.646
792
+ Sports_2160P-7af8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.563
793
+ VerticalVideo_720P-669d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.176
794
+ NewsClip_1080P-0abf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.672
795
+ Animation_360P-5712_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.552
796
+ LiveMusic_480P-459c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.755
797
+ Gaming_360P-5e0f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.012
798
+ MusicVideo_1080P-2b2b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.436
799
+ Gaming_1080P-57ca_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.409
800
+ Sports_480P-5871_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.698
801
+ HowTo_480P-4a28_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.659
802
+ Sports_2160P-3a9a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.633
803
+ MusicVideo_1080P-34ff_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.658
804
+ HowTo_360P-2fd5_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.049
805
+ LyricVideo_360P-0ac7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.964
806
+ MusicVideo_360P-5578_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.873
807
+ MusicVideo_480P-41ce_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.766
808
+ Lecture_360P-5d0a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.416
809
+ Vlog_720P-155f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.316
810
+ MusicVideo_1080P-7f2e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.750
811
+ NewsClip_1080P-5be1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.065
812
+ CoverSong_480P-7f6d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.048
813
+ CoverSong_360P-13d2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.578
814
+ LiveMusic_480P-1a91_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.173
815
+ CoverSong_1080P-0cdc_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.741
816
+ CoverSong_360P-1b2b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.409
817
+ Lecture_480P-4c86_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.666
818
+ Sports_720P-50cf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.372
819
+ Animation_480P-52af_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.656
820
+ HowTo_480P-63a2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.559
821
+ NewsClip_480P-2407_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.688
822
+ Vlog_2160P-7324_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.194
823
+ Sports_720P-4b69_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.903
824
+ HDR_1080P-3749_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.080
825
+ HDR_2160P-40ab_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.105
826
+ Gaming_480P-6d1e_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.674
827
+ Sports_1080P-6571_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.652
828
+ LiveMusic_1080P-541f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.174
829
+ Gaming_720P-3524_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.110
830
+ HowTo_1080P-7f18_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.718
831
+ Animation_1080P-4ea8_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.121
832
+ LyricVideo_720P-6f0c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.803
833
+ Lecture_360P-20c3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.133
834
+ TelevisionClip_1080P-6026_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.415
835
+ Lecture_480P-5aee_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.606
836
+ Lecture_480P-74cf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.428
837
+ Vlog_480P-34ba_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.666
838
+ Vlog_2160P-327a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.983
839
+ Gaming_1080P-58ba_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.952
840
+ LiveMusic_720P-267b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.525
841
+ Lecture_480P-2655_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.261
842
+ Lecture_480P-5cd7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.024
843
+ Vlog_2160P-70d6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.067
844
+ Gaming_720P-103a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.056
845
+ Lecture_1080P-1709_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.172
846
+ HowTo_1080P-64f7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.454
847
+ HowTo_720P-3a5d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.536
848
+ NewsClip_720P-35d9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.017
849
+ Gaming_360P-21d2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.372
850
+ CoverSong_1080P-0f33_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.221
851
+ Vlog_480P-7237_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.890
852
+ Sports_480P-19e4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.835
853
+ VerticalVideo_360P-6ff2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.543
854
+ Vlog_1080P-687a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.374
855
+ Sports_1080P-53a0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.429
856
+ HowTo_1080P-63e4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.948
857
+ NewsClip_1080P-2eb0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.484
858
+ Sports_1080P-1d78_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.378
859
+ HowTo_1080P-4d7b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.199
860
+ Sports_480P-1396_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.459
861
+ CoverSong_1080P-3aac_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.381
862
+ Animation_480P-6e23_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.731
863
+ NewsClip_720P-5564_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.377
864
+ Sports_1080P-49c5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.142
865
+ MusicVideo_480P-7643_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.093
866
+ HowTo_480P-3435_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.930
867
+ MusicVideo_720P-3b11_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.600
868
+ NewsClip_1080P-1db0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.141
869
+ LiveMusic_1080P-6d1a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.780
870
+ HDR_2160P-2a72_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.113
871
+ Vlog_2160P-4f98_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.470
872
+ VerticalVideo_720P-19fa_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.269
873
+ Sports_720P-6bb7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.882
874
+ NewsClip_720P-7b7e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.145
875
+ NewsClip_360P-3451_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.121
876
+ HowTo_360P-3aa6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.019
877
+ Sports_1080P-241e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.411
878
+ Vlog_360P-76ae_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.606
879
+ VerticalVideo_480P-7278_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.005
880
+ Gaming_2160P-67b0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.204
881
+ CoverSong_360P-3218_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.427
882
+ LyricVideo_1080P-756a_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.228
883
+ LiveMusic_480P-474a_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.491
884
+ VerticalVideo_360P-54f7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.146
885
+ MusicVideo_1080P-4671_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.083
886
+ Lecture_360P-5779_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.069
887
+ VerticalVideo_1080P-1ac1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.979
888
+ Gaming_1080P-277c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.884
889
+ Sports_720P-62ba_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.965
890
+ Sports_360P-4545_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.432
891
+ Lecture_360P-2c55_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.717
892
+ LyricVideo_360P-17ce_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.830
893
+ HowTo_1080P-763c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.957
894
+ Sports_360P-2e48_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.744
895
+ Sports_1080P-3eb0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.819
896
+ Gaming_720P-6403_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.023
897
+ HowTo_360P-0562_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.261
898
+ Gaming_360P-56fe_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.439
899
+ HDR_1080P-69de_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.181
900
+ Lecture_1080P-011f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.590
901
+ Vlog_360P-3bc2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.944
902
+ Animation_720P-41cf_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.862
903
+ LiveMusic_720P-289b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.436
904
+ HowTo_360P-4e4c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.005
905
+ Lecture_720P-7e40_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.246
906
+ Gaming_2160P-5c33_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.834
907
+ Gaming_1080P-66d2_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.142
908
+ TelevisionClip_1080P-3758_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.313
909
+ Lecture_720P-2442_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.390
910
+ NewsClip_1080P-02fd_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.159
911
+ HowTo_480P-4c99_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.940
912
+ NewsClip_480P-28eb_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.998
913
+ Vlog_720P-3e9c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.882
914
+ Sports_1080P-679d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.366
915
+ NewsClip_720P-5787_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.483
916
+ LiveMusic_360P-22c5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.231
917
+ HDR_2160P-5e25_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.209
918
+ Lecture_480P-7e55_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.538
919
+ Vlog_480P-08c7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.591
920
+ MusicVideo_1080P-6fb1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.492
921
+ HowTo_720P-483c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.485
922
+ NewsClip_480P-72b8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.407
923
+ CoverSong_720P-1393_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.059
924
+ Vlog_2160P-5874_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.101
925
+ MusicVideo_360P-2fcb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.619
926
+ Vlog_2160P-13fe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.992
927
+ Sports_720P-2c80_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.136
928
+ Sports_1080P-1ae3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.352
929
+ NewsClip_480P-49c9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.975
930
+ VerticalVideo_360P-579c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.058
931
+ NewsClip_360P-274e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.332
932
+ Sports_480P-3dfd_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.498
933
+ NewsClip_1080P-27fb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.002
934
+ MusicVideo_720P-3698_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.378
935
+ MusicVideo_720P-734f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.848
936
+ Vlog_1080P-1f0a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.872
937
+ Vlog_2160P-416c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.019
938
+ Animation_360P-47cc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.282
939
+ HowTo_360P-6093_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.205
940
+ NewsClip_480P-41b1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.623
941
+ Vlog_1080P-21f5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.327
942
+ Sports_720P-5bfd_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.677
943
+ CoverSong_720P-5ba2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.834
944
+ Gaming_1080P-26dc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.541
945
+ CoverSong_720P-5689_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.317
946
+ LiveMusic_720P-66df_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.592
947
+ Animation_480P-70d4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.848
948
+ MusicVideo_360P-5358_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.069
949
+ LyricVideo_480P-51e4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.179
950
+ LiveMusic_720P-4ae2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.285
951
+ VerticalVideo_1080P-3709_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.367
952
+ Sports_1080P-47e9_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.106
953
+ Vlog_1080P-23cb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.069
954
+ Lecture_720P-5120_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.295
955
+ Gaming_360P-7975_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.761
956
+ NewsClip_480P-3672_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.802
957
+ MusicVideo_360P-08cb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.401
958
+ Animation_720P-6372_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.422
959
+ Sports_720P-675b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.201
960
+ LiveMusic_1080P-59b3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.443
961
+ TelevisionClip_360P-3b9a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.883
962
+ Lecture_720P-50b9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.579
963
+ Gaming_360P-6207_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.349
964
+ Gaming_2160P-2dc4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.383
965
+ HowTo_360P-55e9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.496
966
+ NewsClip_360P-4417_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.924
967
+ HowTo_480P-118d_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.462
968
+ Gaming_2160P-28de_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.349
969
+ Sports_480P-3f50_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.234
970
+ Sports_480P-0efe_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.673
971
+ MusicVideo_480P-4cc8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.346
972
+ Vlog_720P-141f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.128
973
+ Sports_480P-1fe9_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.706
974
+ Gaming_1080P-1704_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.295
975
+ Animation_360P-5de0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.429
976
+ TelevisionClip_360P-29f1_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.581
977
+ VerticalVideo_1080P-04d4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.150
978
+ Sports_2160P-1261_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.114
979
+ Sports_1080P-5d25_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.478
980
+ LyricVideo_480P-5c17_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.344
981
+ Animation_360P-24d4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.435
982
+ NewsClip_360P-5052_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.095
983
+ Sports_720P-6d04_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.783
984
+ TelevisionClip_480P-30b6_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.609
985
+ Animation_720P-1a6d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.283
986
+ Lecture_360P-311d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.603
987
+ Gaming_360P-4f09_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.506
988
+ Vlog_1080P-26dc_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.685
989
+ Gaming_360P-2f99_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.281
990
+ Vlog_1080P-2cda_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.429
991
+ VerticalVideo_720P-456c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.711
992
+ Sports_2160P-7bbe_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.343
993
+ VerticalVideo_480P-419c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.780
994
+ Sports_1080P-2524_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.662
995
+ HDR_2160P-5275_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.206
996
+ VerticalVideo_360P-52c1_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.352
997
+ LiveMusic_480P-65ca_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.691
998
+ NewsClip_1080P-48ae_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.887
999
+ LiveMusic_360P-265c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.339
1000
+ LiveMusic_360P-121f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.430
1001
+ Gaming_1080P-3a9d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.872
1002
+ Lecture_360P-4bb4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.822
1003
+ MusicVideo_480P-7955_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.509
1004
+ Animation_1080P-3dbf_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.444
1005
+ VerticalVideo_1080P-73b6_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.034
1006
+ HDR_1080P-1e5b_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.242
1007
+ Lecture_720P-7189_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.869
1008
+ MusicVideo_480P-61ba_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.756
1009
+ Sports_720P-33c6_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.985
1010
+ LyricVideo_1080P-584f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.188
1011
+ LyricVideo_720P-0ae4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.717
1012
+ CoverSong_480P-41a5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.270
1013
+ Sports_360P-61f6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.393
1014
+ Sports_480P-1019_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.316
1015
+ Animation_480P-08a5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.420
1016
+ Vlog_2160P-310b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.857
1017
+ Gaming_480P-20ca_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.891
1018
+ Animation_720P-57d9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.522
1019
+ Lecture_480P-7eec_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.015
1020
+ MusicVideo_1080P-6260_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.042
1021
+ Gaming_480P-445b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.714
1022
+ Gaming_2160P-7ff0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.238
1023
+ TelevisionClip_720P-4edb_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.015
1024
+ HowTo_480P-0cb3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.592
1025
+ Animation_720P-431c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.396
1026
+ CoverSong_720P-7360_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.798
1027
+ Vlog_2160P-408f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.790
1028
+ MusicVideo_1080P-1a53_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.577
1029
+ VerticalVideo_360P-3b4e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.576
1030
+ Gaming_360P-187a_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.133
1031
+ Sports_1080P-3db7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.021
1032
+ MusicVideo_480P-483b_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.539
1033
+ VerticalVideo_1080P-4da5_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.257
1034
+ VerticalVideo_360P-5429_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.616
1035
+ Sports_720P-07d0_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.863
1036
+ HDR_1080P-55c4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.106
1037
+ Gaming_360P-7acb_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.059
1038
+ LiveMusic_480P-559d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.695
1039
+ Gaming_2160P-3002_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.337
1040
+ Vlog_720P-2929_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.918
1041
+ Vlog_480P-0980_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.755
1042
+ Sports_480P-77e3_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.053
1043
+ Gaming_1080P-44c4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.839
1044
+ CoverSong_1080P-033d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.433
1045
+ HDR_1080P-549e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.064
1046
+ VerticalVideo_1080P-34ba_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.382
1047
+ Vlog_720P-60f8_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.335
1048
+ Sports_1080P-46ed_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.493
1049
+ VerticalVideo_720P-42f2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.199
1050
+ Gaming_360P-279f_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.841
1051
+ CoverSong_480P-1019_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.815
1052
+ Vlog_360P-3121_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.775
1053
+ Sports_480P-0bb9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.419
1054
+ Sports_1080P-15d1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.570
1055
+ TelevisionClip_480P-0e46_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.881
1056
+ CoverSong_360P-6da7_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.763
1057
+ Lecture_360P-5594_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.367
1058
+ HowTo_360P-2d35_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.847
1059
+ Sports_480P-2053_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.149
1060
+ Lecture_360P-051a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.640
1061
+ LyricVideo_720P-7f44_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.196
1062
+ Lecture_480P-5f3a_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.324
1063
+ CoverSong_1080P-6f26_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.288
1064
+ NewsClip_360P-2c91_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.859
1065
+ CoverSong_360P-2146_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.273
1066
+ CoverSong_360P-53a6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.235
1067
+ LiveMusic_1080P-7ead_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.559
1068
+ Vlog_2160P-059c_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.441
1069
+ Sports_360P-301d_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.667
1070
+ Sports_720P-3072_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.180
1071
+ HowTo_720P-3813_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.072
1072
+ Sports_720P-5833_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.205
1073
+ TelevisionClip_720P-1862_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.238
1074
+ HowTo_1080P-55d1_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.102
1075
+ NewsClip_720P-7745_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.528
1076
+ VerticalVideo_720P-0dac_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.934
1077
+ Sports_2160P-324e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.317
1078
+ Sports_2160P-2626_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.388
1079
+ LiveMusic_1080P-1ace_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.162
1080
+ LyricVideo_360P-5868_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.107
1081
+ CoverSong_1080P-4bad_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.298
1082
+ Vlog_2160P-4362_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.554
1083
+ NewsClip_360P-1093_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.100
1084
+ VerticalVideo_1080P-4591_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.439
1085
+ LyricVideo_1080P-63d4_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.221
1086
+ Animation_360P-3e40_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.353
1087
+ HowTo_360P-5da7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.425
1088
+ HDR_1080P-68c8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.804
1089
+ Gaming_480P-6c92_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.348
1090
+ Vlog_2160P-7b10_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.864
1091
+ Vlog_480P-5275_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.120
1092
+ Animation_1080P-5dd8_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.958
1093
+ NewsClip_480P-5e7f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.105
1094
+ VerticalVideo_360P-70a9_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.814
1095
+ CoverSong_480P-2207_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.934
1096
+ Vlog_720P-03f9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.972
1097
+ CoverSong_480P-3f3b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.027
1098
+ Lecture_360P-44c2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.389
1099
+ CoverSong_720P-2d05_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.224
1100
+ MusicVideo_1080P-6c1d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.323
1101
+ HowTo_480P-64a2_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.836
1102
+ TelevisionClip_720P-4af1_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.213
1103
+ HowTo_720P-6323_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.732
1104
+ Vlog_1080P-4ba9_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.908
1105
+ CoverSong_480P-4d34_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.677
1106
+ Vlog_720P-1003_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.604
1107
+ HowTo_720P-479b_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.620
1108
+ TelevisionClip_1080P-3d83_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.382
1109
+ Sports_1080P-0640_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.524
1110
+ Gaming_1080P-45af_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.373
1111
+ Animation_480P-7ef2_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.164
1112
+ Vlog_480P-7615_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.588
1113
+ TelevisionClip_480P-415c_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.544
1114
+ Vlog_720P-4e3d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.743
1115
+ Sports_360P-0dda_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.868
1116
+ LyricVideo_1080P-3803_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.950
1117
+ Sports_480P-1d1b_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.177
1118
+ Vlog_1080P-25de_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.914
1119
+ CoverSong_480P-6c50_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.729
1120
+ LiveMusic_1080P-51f6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.819
1121
+ Sports_480P-6e41_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.306
1122
+ Sports_720P-0104_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.223
1123
+ LiveMusic_360P-1d94_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.028
1124
+ Sports_360P-7f50_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.963
1125
+ Vlog_360P-2973_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.902
1126
+ HowTo_360P-1dba_crf_10_ss_00_t_20.0.mp4, -1, -1, 1.907
1127
+ VerticalVideo_1080P-360f_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.189
1128
+ NewsClip_720P-2182_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.494
1129
+ Lecture_480P-73f6_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.256
1130
+ Animation_1080P-3d67_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.778
1131
+ VerticalVideo_480P-467e_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.383
1132
+ NewsClip_720P-23e0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.110
1133
+ Animation_360P-188f_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.702
1134
+ CoverSong_720P-014c_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.618
1135
+ NewsClip_720P-37f7_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.716
1136
+ Animation_1080P-4214_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.510
1137
+ VerticalVideo_720P-6bf7_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.261
1138
+ LiveMusic_720P-6452_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.098
1139
+ HDR_1080P-46a4_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.582
1140
+ LiveMusic_1080P-157b_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.360
1141
+ Vlog_360P-2e9d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.746
1142
+ Animation_1080P-6ec0_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.124
1143
+ CoverSong_360P-0a9d_crf_10_ss_00_t_20.0.mp4, -1, -1, 2.538
1144
+ Gaming_720P-4813_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.290
1145
+ LyricVideo_1080P-725e_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.399
1146
+ LiveMusic_720P-71c5_crf_10_ss_00_t_20.0.mp4, -1, -1, 3.605
1147
+ Gaming_2160P-673d_crf_10_ss_00_t_20.0.mp4, -1, -1, 4.165
examplar_data_labels/train_labels.txt ADDED
The diff for this file is too large to render. See raw diff
 
requirements.txt ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ torch~=1.13
2
+ torchvision
3
+ opencv-python
4
+ decord
5
+ matplotlib
6
+ scipy
7
+ numpy
8
+ tqdm
9
+ timm
10
+ einops
11
+ wandb
12
+ scikit-video
13
+ thop==0.0.31-2005241907
14
+ onnx
15
+ ptflops
setup.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ from setuptools import find_packages, setup
4
+
5
+ version_file = "cover/version.py"
6
+
7
+
8
+ def readme():
9
+ with open("README.md", encoding="utf-8") as f:
10
+ content = f.read()
11
+ return content
12
+
13
+
14
+ def get_version():
15
+ with open(version_file, "r") as f:
16
+ exec(compile(f.read(), version_file, "exec"))
17
+ return locals()["__version__"]
18
+
19
+
20
+ def get_requirements(filename="requirements.txt"):
21
+ here = os.path.dirname(os.path.realpath(__file__))
22
+ with open(os.path.join(here, filename), "r") as f:
23
+ requires = [line.replace("\n", "") for line in f.readlines()]
24
+ return requires
25
+
26
+
27
+ setup(
28
+ name="cover",
29
+ version=get_version(),
30
+ description="Disentangled Video Quality Evaluator",
31
+ long_description=readme(),
32
+ long_description_content_type="text/markdown",
33
+ author="Teo (Timothy) Wu Hao Ning",
34
+ author_email="realtimothyhwu@gmail.com",
35
+ keywords="computer vision, video quality assessment",
36
+ url="https://github.com/twowu/cover",
37
+ include_package_data=True,
38
+ packages=find_packages(exclude=("examplar_data_labels", "figs")),
39
+ classifiers=[
40
+ "Development Status :: 4 - Beta",
41
+ "License :: OSI Approved :: Apache Software License",
42
+ "Operating System :: OS Independent",
43
+ "Programming Language :: Python :: 3",
44
+ "Programming Language :: Python :: 3.7",
45
+ "Programming Language :: Python :: 3.8",
46
+ ],
47
+ license="MIT License",
48
+ setup_requires=["numpy"],
49
+ install_requires=get_requirements(),
50
+ ext_modules=[],
51
+ cmdclass={},
52
+ zip_safe=False,
53
+ )
train_one_dataset.py ADDED
@@ -0,0 +1,616 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ import cv2
4
+ import random
5
+ import os.path as osp
6
+ import argparse
7
+ from scipy.stats import spearmanr, pearsonr
8
+ from scipy.stats.stats import kendalltau as kendallr
9
+ import numpy as np
10
+ from time import time
11
+ from tqdm import tqdm
12
+ import pickle
13
+ import math
14
+ import wandb
15
+ import yaml
16
+ from collections import OrderedDict
17
+
18
+ from functools import reduce
19
+ from thop import profile
20
+ import copy
21
+
22
+
23
+ import cover.models as models
24
+ import cover.datasets as datasets
25
+
26
+
27
+ def train_test_split(dataset_path, ann_file, ratio=0.8, seed=42):
28
+ random.seed(seed)
29
+ print(seed)
30
+ video_infos = []
31
+ with open(ann_file, "r") as fin:
32
+ for line in fin.readlines():
33
+ line_split = line.strip().split(",")
34
+ filename, _, _, label = line_split
35
+ label = float(label)
36
+ filename = osp.join(dataset_path, filename)
37
+ video_infos.append(dict(filename=filename, label=label))
38
+ random.shuffle(video_infos)
39
+ return (
40
+ video_infos[: int(ratio * len(video_infos))],
41
+ video_infos[int(ratio * len(video_infos)) :],
42
+ )
43
+
44
+
45
+ def rank_loss(y_pred, y):
46
+ ranking_loss = torch.nn.functional.relu(
47
+ (y_pred - y_pred.t()) * torch.sign((y.t() - y))
48
+ )
49
+ scale = 1 + torch.max(ranking_loss)
50
+ return (
51
+ torch.sum(ranking_loss) / y_pred.shape[0] / (y_pred.shape[0] - 1) / scale
52
+ ).float()
53
+
54
+
55
+ def gaussian(y, eps=1e-8):
56
+ return (y - y.mean()) / (y.std() + 1e-8)
57
+
58
+
59
+ def plcc_loss(y_pred, y):
60
+ sigma_hat, m_hat = torch.std_mean(y_pred, unbiased=False)
61
+ y_pred = (y_pred - m_hat) / (sigma_hat + 1e-8)
62
+ sigma, m = torch.std_mean(y, unbiased=False)
63
+ y = (y - m) / (sigma + 1e-8)
64
+ loss0 = torch.nn.functional.mse_loss(y_pred, y) / 4
65
+ rho = torch.mean(y_pred * y)
66
+ loss1 = torch.nn.functional.mse_loss(rho * y_pred, y) / 4
67
+ return ((loss0 + loss1) / 2).float()
68
+
69
+
70
+ def rescaled_l2_loss(y_pred, y):
71
+ y_pred_rs = (y_pred - y_pred.mean()) / y_pred.std()
72
+ y_rs = (y - y.mean()) / (y.std() + eps)
73
+ return torch.nn.functional.mse_loss(y_pred_rs, y_rs)
74
+
75
+
76
+ def rplcc_loss(y_pred, y, eps=1e-8):
77
+ ## Literally (1 - PLCC) / 2
78
+ y_pred, y = gaussian(y_pred), gaussian(y)
79
+ cov = torch.sum(y_pred * y) / y_pred.shape[0]
80
+ # std = (torch.std(y_pred) + eps) * (torch.std(y) + eps)
81
+ return (1 - cov) / 2
82
+
83
+
84
+ def self_similarity_loss(f, f_hat, f_hat_detach=False):
85
+ if f_hat_detach:
86
+ f_hat = f_hat.detach()
87
+ return 1 - torch.nn.functional.cosine_similarity(f, f_hat, dim=1).mean()
88
+
89
+
90
+ def contrastive_similarity_loss(f, f_hat, f_hat_detach=False, eps=1e-8):
91
+ if f_hat_detach:
92
+ f_hat = f_hat.detach()
93
+ intra_similarity = torch.nn.functional.cosine_similarity(f, f_hat, dim=1).mean()
94
+ cross_similarity = torch.nn.functional.cosine_similarity(f, f_hat, dim=0).mean()
95
+ return (1 - intra_similarity) / (1 - cross_similarity + eps)
96
+
97
+
98
+ def rescale(pr, gt=None):
99
+ if gt is None:
100
+ pr = (pr - np.mean(pr)) / np.std(pr)
101
+ else:
102
+ pr = ((pr - np.mean(pr)) / np.std(pr)) * np.std(gt) + np.mean(gt)
103
+ return pr
104
+
105
+ sample_types = ["semantic", "technical", "aesthetic"]
106
+
107
+
108
+ def finetune_epoch(
109
+ ft_loader,
110
+ model,
111
+ model_ema,
112
+ optimizer,
113
+ scheduler,
114
+ device,
115
+ epoch=-1,
116
+ need_upsampled=False,
117
+ need_feat=False,
118
+ need_fused=False,
119
+ need_separate_sup=True,
120
+ ):
121
+ model.train()
122
+ for i, data in enumerate(tqdm(ft_loader, desc=f"Training in epoch {epoch}")):
123
+ optimizer.zero_grad()
124
+ video = {}
125
+ for key in sample_types:
126
+ if key in data:
127
+ video[key] = data[key].to(device)
128
+
129
+ y = data["gt_label"].float().detach().to(device).unsqueeze(-1)
130
+
131
+ scores = model(video, inference=False, reduce_scores=False)
132
+ if len(scores) > 1:
133
+ y_pred = reduce(lambda x, y: x + y, scores)
134
+ else:
135
+ y_pred = scores[0]
136
+ y_pred = y_pred.mean((-3, -2, -1))
137
+
138
+ frame_inds = data["frame_inds"]
139
+
140
+
141
+ loss = 0 # p_loss + 0.3 * r_loss
142
+
143
+ if need_separate_sup:
144
+ p_loss_a = plcc_loss(scores[0].mean((-3, -2, -1)), y)
145
+ p_loss_b = plcc_loss(scores[1].mean((-3, -2, -1)), y)
146
+ p_loss_c = plcc_loss(scores[2].mean((-3, -2, -1)), y)
147
+ r_loss_a = rank_loss(scores[0].mean((-3, -2, -1)), y)
148
+ r_loss_b = rank_loss(scores[1].mean((-3, -2, -1)), y)
149
+ r_loss_c = rank_loss(scores[2].mean((-3, -2, -1)), y)
150
+ loss += (
151
+ p_loss_a + p_loss_b + p_loss_c + 0.3 * r_loss_a + 0.3 * r_loss_b + 0.3 * r_loss_c
152
+ ) # + 0.2 * o_loss
153
+ wandb.log(
154
+ {
155
+ "train/plcc_loss_a": p_loss_a.item(),
156
+ "train/plcc_loss_b": p_loss_b.item(),
157
+ "train/plcc_loss_c": p_loss_c.item(),
158
+ }
159
+ )
160
+
161
+ wandb.log(
162
+ {"train/total_loss": loss.item(),}
163
+ )
164
+
165
+ loss.backward()
166
+ optimizer.step()
167
+ scheduler.step()
168
+
169
+ # ft_loader.dataset.refresh_hypers()
170
+
171
+ if model_ema is not None:
172
+ model_params = dict(model.named_parameters())
173
+ model_ema_params = dict(model_ema.named_parameters())
174
+ for k in model_params.keys():
175
+ model_ema_params[k].data.mul_(0.999).add_(
176
+ model_params[k].data, alpha=1 - 0.999
177
+ )
178
+ model.eval()
179
+
180
+
181
+ def profile_inference(inf_set, model, device):
182
+ video = {}
183
+ data = inf_set[0]
184
+ for key in sample_types:
185
+ if key in data:
186
+ video[key] = data[key].to(device).unsqueeze(0)
187
+ with torch.no_grad():
188
+
189
+ flops, params = profile(model, (video,))
190
+ print(
191
+ f"The FLOps of the Variant is {flops/1e9:.1f}G, with Params {params/1e6:.2f}M."
192
+ )
193
+
194
+
195
+ def inference_set(
196
+ inf_loader,
197
+ model,
198
+ device,
199
+ best_,
200
+ save_model=False,
201
+ suffix="s",
202
+ save_name="divide",
203
+ save_type="head",
204
+ ):
205
+
206
+ results = []
207
+
208
+ best_s, best_p, best_k, best_r = best_
209
+
210
+ for i, data in enumerate(tqdm(inf_loader, desc="Validating")):
211
+ result = dict()
212
+ video, video_up = {}, {}
213
+ for key in sample_types:
214
+ if key in data:
215
+ video[key] = data[key].to(device)
216
+ ## Reshape into clips
217
+ b, c, t, h, w = video[key].shape
218
+ video[key] = (
219
+ video[key]
220
+ .reshape(
221
+ b, c, data["num_clips"][key], t // data["num_clips"][key], h, w
222
+ )
223
+ .permute(0, 2, 1, 3, 4, 5)
224
+ .reshape(
225
+ b * data["num_clips"][key], c, t // data["num_clips"][key], h, w
226
+ )
227
+ )
228
+ if key + "_up" in data:
229
+ video_up[key] = data[key + "_up"].to(device)
230
+ ## Reshape into clips
231
+ b, c, t, h, w = video_up[key].shape
232
+ video_up[key] = (
233
+ video_up[key]
234
+ .reshape(b, c, data["num_clips"], t // data["num_clips"], h, w)
235
+ .permute(0, 2, 1, 3, 4, 5)
236
+ .reshape(b * data["num_clips"], c, t // data["num_clips"], h, w)
237
+ )
238
+ # .unsqueeze(0)
239
+ with torch.no_grad():
240
+ result["pr_labels"] = model(video, reduce_scores=True).cpu().numpy()
241
+ if len(list(video_up.keys())) > 0:
242
+ result["pr_labels_up"] = model(video_up).cpu().numpy()
243
+
244
+ result["gt_label"] = data["gt_label"].item()
245
+ del video, video_up
246
+ results.append(result)
247
+
248
+ ## generate the demo video for video quality localization
249
+ gt_labels = [r["gt_label"] for r in results]
250
+ pr_labels = [np.mean(r["pr_labels"][:]) for r in results]
251
+ pr_labels = rescale(pr_labels, gt_labels)
252
+
253
+ s = spearmanr(gt_labels, pr_labels)[0]
254
+ p = pearsonr(gt_labels, pr_labels)[0]
255
+ k = kendallr(gt_labels, pr_labels)[0]
256
+ r = np.sqrt(((gt_labels - pr_labels) ** 2).mean())
257
+
258
+ wandb.log(
259
+ {
260
+ f"val_{suffix}/SRCC-{suffix}": s,
261
+ f"val_{suffix}/PLCC-{suffix}": p,
262
+ f"val_{suffix}/KRCC-{suffix}": k,
263
+ f"val_{suffix}/RMSE-{suffix}": r,
264
+ }
265
+ )
266
+
267
+ del results, result # , video, video_up
268
+ torch.cuda.empty_cache()
269
+
270
+ if s + p > best_s + best_p and save_model:
271
+ state_dict = model.state_dict()
272
+
273
+ if save_type == "head":
274
+ head_state_dict = OrderedDict()
275
+ for key, v in state_dict.items():
276
+ if "backbone" in key:
277
+ continue
278
+ else:
279
+ head_state_dict[key] = v
280
+ print("Following keys are saved :", head_state_dict.keys())
281
+ torch.save(
282
+ {"state_dict": head_state_dict, "validation_results": best_,},
283
+ f"pretrained_weights/{save_name}_{suffix}_finetuned.pth",
284
+ )
285
+ else:
286
+ torch.save(
287
+ {"state_dict": state_dict, "validation_results": best_,},
288
+ f"pretrained_weights/{save_name}_{suffix}_finetuned.pth",
289
+ )
290
+
291
+ best_s, best_p, best_k, best_r = (
292
+ max(best_s, s),
293
+ max(best_p, p),
294
+ max(best_k, k),
295
+ min(best_r, r),
296
+ )
297
+
298
+ wandb.log(
299
+ {
300
+ f"val_{suffix}/best_SRCC-{suffix}": best_s,
301
+ f"val_{suffix}/best_PLCC-{suffix}": best_p,
302
+ f"val_{suffix}/best_KRCC-{suffix}": best_k,
303
+ f"val_{suffix}/best_RMSE-{suffix}": best_r,
304
+ }
305
+ )
306
+
307
+ print(
308
+ f"For {len(inf_loader)} videos, \nthe accuracy of the model: [{suffix}] is as follows:\n SROCC: {s:.4f} best: {best_s:.4f} \n PLCC: {p:.4f} best: {best_p:.4f} \n KROCC: {k:.4f} best: {best_k:.4f} \n RMSE: {r:.4f} best: {best_r:.4f}."
309
+ )
310
+
311
+ return best_s, best_p, best_k, best_r
312
+
313
+ # torch.save(results, f'{args.save_dir}/results_{dataset.lower()}_s{32}*{32}_ens{args.famount}.pkl')
314
+
315
+
316
+ def main():
317
+
318
+ parser = argparse.ArgumentParser()
319
+ parser.add_argument(
320
+ "-o", "--opt", type=str, default="cover.yml", help="the option file"
321
+ )
322
+
323
+ parser.add_argument(
324
+ "-t", "--target_set", type=str, default="val-kv1k", help="target_set"
325
+ )
326
+
327
+ parser.add_argument('-n', "--name", type=str, default="COVER_TMP", help='model name to save checkpoint')
328
+ parser.add_argument('-uh', "--usehead", type=int, default=0, help='wheather to load header weight from checkpoint')
329
+
330
+ args = parser.parse_args()
331
+ with open(args.opt, "r") as f:
332
+ opt = yaml.safe_load(f)
333
+ print(opt)
334
+
335
+ ## adaptively choose the device
336
+
337
+ device = "cuda" if torch.cuda.is_available() else "cpu"
338
+
339
+ ## defining model and loading checkpoint
340
+
341
+ bests_ = []
342
+
343
+ if opt.get("split_seed", -1) > 0:
344
+ num_splits = 10
345
+ else:
346
+ num_splits = 1
347
+
348
+ print(opt["split_seed"])
349
+
350
+ for split in range(10):
351
+ model = getattr(models, opt["model"]["type"])(**opt["model"]["args"]).to(device)
352
+ if opt.get("split_seed", -1) > 0:
353
+ opt["data"]["train"] = copy.deepcopy(opt["data"][args.target_set])
354
+ opt["data"]["eval"] = copy.deepcopy(opt["data"][args.target_set])
355
+
356
+ split_duo = train_test_split(
357
+ opt["data"][args.target_set]["args"]["data_prefix"],
358
+ opt["data"][args.target_set]["args"]["anno_file"],
359
+ seed=opt["split_seed"] * (split + 1),
360
+ )
361
+ (
362
+ opt["data"]["train"]["args"]["anno_file"],
363
+ opt["data"]["eval"]["args"]["anno_file"],
364
+ ) = split_duo
365
+ opt["data"]["train"]["args"]["sample_types"]["technical"]["num_clips"] = 1
366
+
367
+ train_datasets = {}
368
+ for key in opt["data"]:
369
+ if key.startswith("train"):
370
+ train_dataset = getattr(datasets, opt["data"][key]["type"])(
371
+ opt["data"][key]["args"]
372
+ )
373
+ train_datasets[key] = train_dataset
374
+ print(len(train_dataset.video_infos))
375
+
376
+ train_loaders = {}
377
+ for key, train_dataset in train_datasets.items():
378
+ train_loaders[key] = torch.utils.data.DataLoader(
379
+ train_dataset,
380
+ batch_size=opt["batch_size"],
381
+ num_workers=opt["num_workers"],
382
+ shuffle=True,
383
+ )
384
+
385
+ val_datasets = {}
386
+ for key in opt["data"]:
387
+ if key.startswith("eval"):
388
+ val_dataset = getattr(datasets, opt["data"][key]["type"])(
389
+ opt["data"][key]["args"]
390
+ )
391
+ print(len(val_dataset.video_infos))
392
+ val_datasets[key] = val_dataset
393
+
394
+ val_loaders = {}
395
+ for key, val_dataset in val_datasets.items():
396
+ val_loaders[key] = torch.utils.data.DataLoader(
397
+ val_dataset,
398
+ batch_size=1,
399
+ num_workers=opt["num_workers"],
400
+ pin_memory=True,
401
+ )
402
+
403
+ run = wandb.init(
404
+ project=opt["wandb"]["project_name"],
405
+ name=opt["name"] + f"_target_{args.target_set}_split_{split}"
406
+ if num_splits > 1
407
+ else opt["name"],
408
+ reinit=True,
409
+ settings=wandb.Settings(start_method="thread"),
410
+ )
411
+
412
+ state_dict = torch.load(opt["test_load_path"], map_location=device)
413
+
414
+ # Load fine_tuned header from checkpoint
415
+ if args.usehead:
416
+ state_dict_head = torch.load(opt["test_load_header_path"], map_location=device)
417
+ for key in state_dict_head['state_dict'].keys():
418
+ state_dict[key] = state_dict_head['state_dict'][key]
419
+
420
+ # Allowing empty head weight
421
+ model.load_state_dict(state_dict, strict=False)
422
+
423
+ if opt["ema"]:
424
+ from copy import deepcopy
425
+
426
+ model_ema = deepcopy(model)
427
+ else:
428
+ model_ema = None
429
+
430
+ # profile_inference(val_dataset, model, device)
431
+
432
+ # finetune the model
433
+
434
+ param_groups = []
435
+
436
+ for key, value in dict(model.named_children()).items():
437
+ if "backbone" in key:
438
+ param_groups += [
439
+ {
440
+ "params": value.parameters(),
441
+ "lr": opt["optimizer"]["lr"]
442
+ * opt["optimizer"]["backbone_lr_mult"],
443
+ }
444
+ ]
445
+ else:
446
+ param_groups += [
447
+ {"params": value.parameters(), "lr": opt["optimizer"]["lr"]}
448
+ ]
449
+
450
+ optimizer = torch.optim.AdamW(
451
+ lr=opt["optimizer"]["lr"],
452
+ params=param_groups,
453
+ weight_decay=opt["optimizer"]["wd"],
454
+ )
455
+ warmup_iter = 0
456
+ for train_loader in train_loaders.values():
457
+ warmup_iter += int(opt["warmup_epochs"] * len(train_loader))
458
+ max_iter = int((opt["num_epochs"] + opt["l_num_epochs"]) * len(train_loader))
459
+ lr_lambda = (
460
+ lambda cur_iter: cur_iter / warmup_iter
461
+ if cur_iter <= warmup_iter
462
+ else 0.5 * (1 + math.cos(math.pi * (cur_iter - warmup_iter) / max_iter))
463
+ )
464
+
465
+ scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lr_lambda,)
466
+
467
+ bests = {}
468
+ bests_n = {}
469
+ for key in val_loaders:
470
+ bests[key] = -1, -1, -1, 1000
471
+ bests_n[key] = -1, -1, -1, 1000
472
+
473
+ for key, value in dict(model.named_children()).items():
474
+ if "backbone" in key:
475
+ for param in value.parameters():
476
+ param.requires_grad = False
477
+
478
+ for epoch in range(opt["l_num_epochs"]):
479
+ print(f"Linear Epoch {epoch}:")
480
+ for key, train_loader in train_loaders.items():
481
+ finetune_epoch(
482
+ train_loader,
483
+ model,
484
+ model_ema,
485
+ optimizer,
486
+ scheduler,
487
+ device,
488
+ epoch,
489
+ opt.get("need_upsampled", False),
490
+ opt.get("need_feat", False),
491
+ opt.get("need_fused", False),
492
+ )
493
+ for key in val_loaders:
494
+ bests[key] = inference_set(
495
+ val_loaders[key],
496
+ model_ema if model_ema is not None else model,
497
+ device,
498
+ bests[key],
499
+ save_model=opt["save_model"],
500
+ save_name=args.name + "_head_" + args.target_set + f"_{split}",
501
+ suffix=key + "_s",
502
+ )
503
+ if model_ema is not None:
504
+ bests_n[key] = inference_set(
505
+ val_loaders[key],
506
+ model,
507
+ device,
508
+ bests_n[key],
509
+ save_model=opt["save_model"],
510
+ save_name=args.name
511
+ + "_head_"
512
+ + args.target_set
513
+ + f"_{split}",
514
+ suffix=key + "_n",
515
+ )
516
+ else:
517
+ bests_n[key] = bests[key]
518
+
519
+ if opt["l_num_epochs"] >= 0:
520
+ for key in val_loaders:
521
+ print(
522
+ f"""For the linear transfer process on {key} with {len(val_loaders[key])} videos,
523
+ the best validation accuracy of the model-s is as follows:
524
+ SROCC: {bests[key][0]:.4f}
525
+ PLCC: {bests[key][1]:.4f}
526
+ KROCC: {bests[key][2]:.4f}
527
+ RMSE: {bests[key][3]:.4f}."""
528
+ )
529
+
530
+ print(
531
+ f"""For the linear transfer process on {key} with {len(val_loaders[key])} videos,
532
+ the best validation accuracy of the model-n is as follows:
533
+ SROCC: {bests_n[key][0]:.4f}
534
+ PLCC: {bests_n[key][1]:.4f}
535
+ KROCC: {bests_n[key][2]:.4f}
536
+ RMSE: {bests_n[key][3]:.4f}."""
537
+ )
538
+
539
+ for key, value in dict(model.named_children()).items():
540
+ if "backbone" in key:
541
+ for param in value.parameters():
542
+ param.requires_grad = True
543
+
544
+ for epoch in range(opt["num_epochs"]):
545
+ print(f"End-to-end Epoch {epoch}:")
546
+ for key, train_loader in train_loaders.items():
547
+ finetune_epoch(
548
+ train_loader,
549
+ model,
550
+ model_ema,
551
+ optimizer,
552
+ scheduler,
553
+ device,
554
+ epoch,
555
+ opt.get("need_upsampled", False),
556
+ opt.get("need_feat", False),
557
+ opt.get("need_fused", False),
558
+ )
559
+ for key in val_loaders:
560
+ bests[key] = inference_set(
561
+ val_loaders[key],
562
+ model_ema if model_ema is not None else model,
563
+ device,
564
+ bests[key],
565
+ save_model=opt["save_model"],
566
+ save_name=args.name + "_head_" + args.target_set + f"_{split}",
567
+ suffix=key + "_s",
568
+ save_type="full",
569
+ )
570
+ if model_ema is not None:
571
+ bests_n[key] = inference_set(
572
+ val_loaders[key],
573
+ model,
574
+ device,
575
+ bests_n[key],
576
+ save_model=opt["save_model"],
577
+ save_name=args.name
578
+ + "_head_"
579
+ + args.target_set
580
+ + f"_{split}",
581
+ suffix=key + "_n",
582
+ save_type="full",
583
+ )
584
+ else:
585
+ bests_n[key] = bests[key]
586
+
587
+ if opt["num_epochs"] >= 0:
588
+ for key in val_loaders:
589
+ print(
590
+ f"""For the end-to-end transfer process on {key} with {len(val_loaders[key])} videos,
591
+ the best validation accuracy of the model-s is as follows:
592
+ SROCC: {bests[key][0]:.4f}
593
+ PLCC: {bests[key][1]:.4f}
594
+ KROCC: {bests[key][2]:.4f}
595
+ RMSE: {bests[key][3]:.4f}."""
596
+ )
597
+
598
+ print(
599
+ f"""For the end-to-end transfer process on {key} with {len(val_loaders[key])} videos,
600
+ the best validation accuracy of the model-n is as follows:
601
+ SROCC: {bests_n[key][0]:.4f}
602
+ PLCC: {bests_n[key][1]:.4f}
603
+ KROCC: {bests_n[key][2]:.4f}
604
+ RMSE: {bests_n[key][3]:.4f}."""
605
+ )
606
+
607
+ for key, value in dict(model.named_children()).items():
608
+ if "backbone" in key:
609
+ for param in value.parameters():
610
+ param.requires_grad = True
611
+
612
+ run.finish()
613
+
614
+
615
+ if __name__ == "__main__":
616
+ main()