rcfeng commited on
Commit
135075d
·
1 Parent(s): 741a6a7

load from git

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitignore +135 -0
  2. .gitmodules +3 -0
  3. LICENSE +35 -0
  4. README.md +2 -2
  5. basicsr/VERSION +1 -0
  6. basicsr/__init__.py +11 -0
  7. basicsr/archs/__init__.py +27 -0
  8. basicsr/archs/arcface_arch.py +245 -0
  9. basicsr/archs/arch_util.py +318 -0
  10. basicsr/archs/correlation.py +426 -0
  11. basicsr/archs/gmflow/.gitignore +160 -0
  12. basicsr/archs/gmflow/LICENSE +201 -0
  13. basicsr/archs/gmflow/README.md +239 -0
  14. basicsr/archs/gmflow/data/__init__.py +7 -0
  15. basicsr/archs/gmflow/data/chairs_split.txt +22872 -0
  16. basicsr/archs/gmflow/data/datasets.py +312 -0
  17. basicsr/archs/gmflow/data/transforms.py +284 -0
  18. basicsr/archs/gmflow/environment.yml +162 -0
  19. basicsr/archs/gmflow/evaluate.py +689 -0
  20. basicsr/archs/gmflow/gmflow/__init__.py +0 -0
  21. basicsr/archs/gmflow/gmflow/backbone.py +117 -0
  22. basicsr/archs/gmflow/gmflow/geometry.py +96 -0
  23. basicsr/archs/gmflow/gmflow/gmflow.py +170 -0
  24. basicsr/archs/gmflow/gmflow/matching.py +83 -0
  25. basicsr/archs/gmflow/gmflow/position.py +46 -0
  26. basicsr/archs/gmflow/gmflow/transformer.py +409 -0
  27. basicsr/archs/gmflow/gmflow/trident_conv.py +90 -0
  28. basicsr/archs/gmflow/gmflow/utils.py +86 -0
  29. basicsr/archs/gmflow/loss.py +37 -0
  30. basicsr/archs/gmflow/main.py +557 -0
  31. basicsr/archs/gmflow/scripts/demo.sh +63 -0
  32. basicsr/archs/gmflow/scripts/evaluate.sh +83 -0
  33. basicsr/archs/gmflow/scripts/submission.sh +67 -0
  34. basicsr/archs/gmflow/scripts/train_gmflow.sh +108 -0
  35. basicsr/archs/gmflow/scripts/train_gmflow_with_refine.sh +128 -0
  36. basicsr/archs/gmflow/utils/dist_utils.py +99 -0
  37. basicsr/archs/gmflow/utils/flow_viz.py +291 -0
  38. basicsr/archs/gmflow/utils/frame_utils.py +131 -0
  39. basicsr/archs/gmflow/utils/logger.py +68 -0
  40. basicsr/archs/gmflow/utils/misc.py +42 -0
  41. basicsr/archs/gmflow/utils/utils.py +58 -0
  42. basicsr/archs/gmflow_arch.py +82 -0
  43. basicsr/archs/keep_arch.py +936 -0
  44. basicsr/archs/rrdbnet_arch.py +119 -0
  45. basicsr/archs/spectral_norm_arch.py +288 -0
  46. basicsr/archs/vgg_arch.py +161 -0
  47. basicsr/archs/vqgan_arch.py +597 -0
  48. basicsr/data/__init__.py +100 -0
  49. basicsr/data/data_sampler.py +48 -0
  50. basicsr/data/data_util.py +392 -0
.gitignore ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .vscode
2
+
3
+ # ignored files
4
+ version.py
5
+
6
+ # ignored files with suffix
7
+ *.html
8
+ *.png
9
+ *.jpeg
10
+ *.jpg
11
+ *.pt
12
+ *.gif
13
+ *.pth
14
+ *.dat
15
+ *.zip
16
+ *.so
17
+
18
+ # template
19
+
20
+ # Byte-compiled / optimized / DLL files
21
+ __pycache__/
22
+ *.py[cod]
23
+ *$py.class
24
+
25
+ # C extensions
26
+ *.so
27
+
28
+ # Distribution / packaging
29
+ .Python
30
+ build/
31
+ develop-eggs/
32
+ dist/
33
+ downloads/
34
+ eggs/
35
+ .eggs/
36
+ lib/
37
+ lib64/
38
+ parts/
39
+ sdist/
40
+ var/
41
+ wheels/
42
+ *.egg-info/
43
+ .installed.cfg
44
+ *.egg
45
+ MANIFEST
46
+
47
+ # PyInstaller
48
+ # Usually these files are written by a python script from a template
49
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
50
+ *.manifest
51
+ *.spec
52
+
53
+ # Installer logs
54
+ pip-log.txt
55
+ pip-delete-this-directory.txt
56
+
57
+ # Unit test / coverage reports
58
+ htmlcov/
59
+ .tox/
60
+ .coverage
61
+ .coverage.*
62
+ .cache
63
+ nosetests.xml
64
+ coverage.xml
65
+ *.cover
66
+ .hypothesis/
67
+ .pytest_cache/
68
+
69
+ # Translations
70
+ *.mo
71
+ *.pot
72
+
73
+ # Django stuff:
74
+ *.log
75
+ local_settings.py
76
+ db.sqlite3
77
+
78
+ # Flask stuff:
79
+ instance/
80
+ .webassets-cache
81
+
82
+ # Scrapy stuff:
83
+ .scrapy
84
+
85
+ # Sphinx documentation
86
+ docs/_build/
87
+
88
+ # PyBuilder
89
+ target/
90
+
91
+ # Jupyter Notebook
92
+ .ipynb_checkpoints
93
+
94
+ # pyenv
95
+ .python-version
96
+
97
+ # celery beat schedule file
98
+ celerybeat-schedule
99
+
100
+ # SageMath parsed files
101
+ *.sage.py
102
+
103
+ # Environments
104
+ .env
105
+ .venv
106
+ env/
107
+ venv/
108
+ ENV/
109
+ env.bak/
110
+ venv.bak/
111
+
112
+ # Spyder project settings
113
+ .spyderproject
114
+ .spyproject
115
+
116
+ # Rope project settings
117
+ .ropeproject
118
+
119
+ # mkdocs documentation
120
+ /site
121
+
122
+ # mypy
123
+ .mypy_cache/
124
+
125
+ # project
126
+ results/
127
+ experiments/
128
+ tb_logger/
129
+ build/
130
+
131
+ run.sh
132
+ *debug*
133
+ *_old*
134
+
135
+ *.swp
.gitmodules ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [submodule "basicsr/archs/gmflow"]
2
+ path = basicsr/archs/gmflow
3
+ url = https://github.com/haofeixu/gmflow
LICENSE ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ S-Lab License 1.0
2
+
3
+ Copyright 2024 S-Lab
4
+
5
+ Redistribution and use for non-commercial purpose in source and
6
+ binary forms, with or without modification, are permitted provided
7
+ that the following conditions are met:
8
+
9
+ 1. Redistributions of source code must retain the above copyright
10
+ notice, this list of conditions and the following disclaimer.
11
+
12
+ 2. Redistributions in binary form must reproduce the above copyright
13
+ notice, this list of conditions and the following disclaimer in
14
+ the documentation and/or other materials provided with the
15
+ distribution.
16
+
17
+ 3. Neither the name of the copyright holder nor the names of its
18
+ contributors may be used to endorse or promote products derived
19
+ from this software without specific prior written permission.
20
+
21
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25
+ HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32
+
33
+ In the event that redistribution and/or use for commercial purpose in
34
+ source or binary forms, with or without modification is required,
35
+ please contact the contributor(s) of the work.
README.md CHANGED
@@ -4,8 +4,8 @@ emoji: 🏢
4
  colorFrom: blue
5
  colorTo: red
6
  sdk: gradio
7
- sdk_version: 5.29.0
8
- app_file: app.py
9
  pinned: false
10
  license: other
11
  short_description: Official demo of KEEP (ECCV'24) for face video SR
 
4
  colorFrom: blue
5
  colorTo: red
6
  sdk: gradio
7
+ sdk_version: 5.4.0
8
+ app_file: hugging_face/app.py
9
  pinned: false
10
  license: other
11
  short_description: Official demo of KEEP (ECCV'24) for face video SR
basicsr/VERSION ADDED
@@ -0,0 +1 @@
 
 
1
+ 1.3.2
basicsr/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/xinntao/BasicSR
2
+ # flake8: noqa
3
+ from .archs import *
4
+ from .data import *
5
+ from .losses import *
6
+ from .metrics import *
7
+ from .models import *
8
+ from .ops import *
9
+ from .train import *
10
+ from .utils import *
11
+ from .version import __gitsha__, __version__
basicsr/archs/__init__.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import importlib
2
+ from copy import deepcopy
3
+ from os import path as osp
4
+
5
+ from basicsr.utils import get_root_logger, scandir
6
+ from basicsr.utils.registry import ARCH_REGISTRY
7
+
8
+ __all__ = ['build_network']
9
+
10
+ # automatically scan and import arch modules for registry
11
+ # scan all the files under the 'archs' folder and collect files ending with
12
+ # '_arch.py'
13
+ arch_folder = osp.dirname(osp.abspath(__file__))
14
+ arch_filenames = [osp.splitext(osp.basename(v))[0]
15
+ for v in scandir(arch_folder) if v.endswith('_arch.py')]
16
+ # import all the arch modules
17
+ _arch_modules = [importlib.import_module(
18
+ f'basicsr.archs.{file_name}') for file_name in arch_filenames]
19
+
20
+
21
+ def build_network(opt):
22
+ opt = deepcopy(opt)
23
+ network_type = opt.pop('type')
24
+ net = ARCH_REGISTRY.get(network_type)(**opt)
25
+ logger = get_root_logger()
26
+ logger.info(f'Network [{net.__class__.__name__}] is created.')
27
+ return net
basicsr/archs/arcface_arch.py ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch.nn as nn
2
+ from basicsr.utils.registry import ARCH_REGISTRY
3
+
4
+
5
+ def conv3x3(inplanes, outplanes, stride=1):
6
+ """A simple wrapper for 3x3 convolution with padding.
7
+
8
+ Args:
9
+ inplanes (int): Channel number of inputs.
10
+ outplanes (int): Channel number of outputs.
11
+ stride (int): Stride in convolution. Default: 1.
12
+ """
13
+ return nn.Conv2d(inplanes, outplanes, kernel_size=3, stride=stride, padding=1, bias=False)
14
+
15
+
16
+ class BasicBlock(nn.Module):
17
+ """Basic residual block used in the ResNetArcFace architecture.
18
+
19
+ Args:
20
+ inplanes (int): Channel number of inputs.
21
+ planes (int): Channel number of outputs.
22
+ stride (int): Stride in convolution. Default: 1.
23
+ downsample (nn.Module): The downsample module. Default: None.
24
+ """
25
+ expansion = 1 # output channel expansion ratio
26
+
27
+ def __init__(self, inplanes, planes, stride=1, downsample=None):
28
+ super(BasicBlock, self).__init__()
29
+ self.conv1 = conv3x3(inplanes, planes, stride)
30
+ self.bn1 = nn.BatchNorm2d(planes)
31
+ self.relu = nn.ReLU(inplace=True)
32
+ self.conv2 = conv3x3(planes, planes)
33
+ self.bn2 = nn.BatchNorm2d(planes)
34
+ self.downsample = downsample
35
+ self.stride = stride
36
+
37
+ def forward(self, x):
38
+ residual = x
39
+
40
+ out = self.conv1(x)
41
+ out = self.bn1(out)
42
+ out = self.relu(out)
43
+
44
+ out = self.conv2(out)
45
+ out = self.bn2(out)
46
+
47
+ if self.downsample is not None:
48
+ residual = self.downsample(x)
49
+
50
+ out += residual
51
+ out = self.relu(out)
52
+
53
+ return out
54
+
55
+
56
+ class IRBlock(nn.Module):
57
+ """Improved residual block (IR Block) used in the ResNetArcFace architecture.
58
+
59
+ Args:
60
+ inplanes (int): Channel number of inputs.
61
+ planes (int): Channel number of outputs.
62
+ stride (int): Stride in convolution. Default: 1.
63
+ downsample (nn.Module): The downsample module. Default: None.
64
+ use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True.
65
+ """
66
+ expansion = 1 # output channel expansion ratio
67
+
68
+ def __init__(self, inplanes, planes, stride=1, downsample=None, use_se=True):
69
+ super(IRBlock, self).__init__()
70
+ self.bn0 = nn.BatchNorm2d(inplanes)
71
+ self.conv1 = conv3x3(inplanes, inplanes)
72
+ self.bn1 = nn.BatchNorm2d(inplanes)
73
+ self.prelu = nn.PReLU()
74
+ self.conv2 = conv3x3(inplanes, planes, stride)
75
+ self.bn2 = nn.BatchNorm2d(planes)
76
+ self.downsample = downsample
77
+ self.stride = stride
78
+ self.use_se = use_se
79
+ if self.use_se:
80
+ self.se = SEBlock(planes)
81
+
82
+ def forward(self, x):
83
+ residual = x
84
+ out = self.bn0(x)
85
+ out = self.conv1(out)
86
+ out = self.bn1(out)
87
+ out = self.prelu(out)
88
+
89
+ out = self.conv2(out)
90
+ out = self.bn2(out)
91
+ if self.use_se:
92
+ out = self.se(out)
93
+
94
+ if self.downsample is not None:
95
+ residual = self.downsample(x)
96
+
97
+ out += residual
98
+ out = self.prelu(out)
99
+
100
+ return out
101
+
102
+
103
+ class Bottleneck(nn.Module):
104
+ """Bottleneck block used in the ResNetArcFace architecture.
105
+
106
+ Args:
107
+ inplanes (int): Channel number of inputs.
108
+ planes (int): Channel number of outputs.
109
+ stride (int): Stride in convolution. Default: 1.
110
+ downsample (nn.Module): The downsample module. Default: None.
111
+ """
112
+ expansion = 4 # output channel expansion ratio
113
+
114
+ def __init__(self, inplanes, planes, stride=1, downsample=None):
115
+ super(Bottleneck, self).__init__()
116
+ self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
117
+ self.bn1 = nn.BatchNorm2d(planes)
118
+ self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
119
+ self.bn2 = nn.BatchNorm2d(planes)
120
+ self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False)
121
+ self.bn3 = nn.BatchNorm2d(planes * self.expansion)
122
+ self.relu = nn.ReLU(inplace=True)
123
+ self.downsample = downsample
124
+ self.stride = stride
125
+
126
+ def forward(self, x):
127
+ residual = x
128
+
129
+ out = self.conv1(x)
130
+ out = self.bn1(out)
131
+ out = self.relu(out)
132
+
133
+ out = self.conv2(out)
134
+ out = self.bn2(out)
135
+ out = self.relu(out)
136
+
137
+ out = self.conv3(out)
138
+ out = self.bn3(out)
139
+
140
+ if self.downsample is not None:
141
+ residual = self.downsample(x)
142
+
143
+ out += residual
144
+ out = self.relu(out)
145
+
146
+ return out
147
+
148
+
149
+ class SEBlock(nn.Module):
150
+ """The squeeze-and-excitation block (SEBlock) used in the IRBlock.
151
+
152
+ Args:
153
+ channel (int): Channel number of inputs.
154
+ reduction (int): Channel reduction ration. Default: 16.
155
+ """
156
+
157
+ def __init__(self, channel, reduction=16):
158
+ super(SEBlock, self).__init__()
159
+ self.avg_pool = nn.AdaptiveAvgPool2d(1) # pool to 1x1 without spatial information
160
+ self.fc = nn.Sequential(
161
+ nn.Linear(channel, channel // reduction), nn.PReLU(), nn.Linear(channel // reduction, channel),
162
+ nn.Sigmoid())
163
+
164
+ def forward(self, x):
165
+ b, c, _, _ = x.size()
166
+ y = self.avg_pool(x).view(b, c)
167
+ y = self.fc(y).view(b, c, 1, 1)
168
+ return x * y
169
+
170
+
171
+ @ARCH_REGISTRY.register()
172
+ class ResNetArcFace(nn.Module):
173
+ """ArcFace with ResNet architectures.
174
+
175
+ Ref: ArcFace: Additive Angular Margin Loss for Deep Face Recognition.
176
+
177
+ Args:
178
+ block (str): Block used in the ArcFace architecture.
179
+ layers (tuple(int)): Block numbers in each layer.
180
+ use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True.
181
+ """
182
+
183
+ def __init__(self, block, layers, use_se=True):
184
+ if block == 'IRBlock':
185
+ block = IRBlock
186
+ self.inplanes = 64
187
+ self.use_se = use_se
188
+ super(ResNetArcFace, self).__init__()
189
+
190
+ self.conv1 = nn.Conv2d(1, 64, kernel_size=3, padding=1, bias=False)
191
+ self.bn1 = nn.BatchNorm2d(64)
192
+ self.prelu = nn.PReLU()
193
+ self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2)
194
+ self.layer1 = self._make_layer(block, 64, layers[0])
195
+ self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
196
+ self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
197
+ self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
198
+ self.bn4 = nn.BatchNorm2d(512)
199
+ self.dropout = nn.Dropout()
200
+ self.fc5 = nn.Linear(512 * 8 * 8, 512)
201
+ self.bn5 = nn.BatchNorm1d(512)
202
+
203
+ # initialization
204
+ for m in self.modules():
205
+ if isinstance(m, nn.Conv2d):
206
+ nn.init.xavier_normal_(m.weight)
207
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
208
+ nn.init.constant_(m.weight, 1)
209
+ nn.init.constant_(m.bias, 0)
210
+ elif isinstance(m, nn.Linear):
211
+ nn.init.xavier_normal_(m.weight)
212
+ nn.init.constant_(m.bias, 0)
213
+
214
+ def _make_layer(self, block, planes, num_blocks, stride=1):
215
+ downsample = None
216
+ if stride != 1 or self.inplanes != planes * block.expansion:
217
+ downsample = nn.Sequential(
218
+ nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False),
219
+ nn.BatchNorm2d(planes * block.expansion),
220
+ )
221
+ layers = []
222
+ layers.append(block(self.inplanes, planes, stride, downsample, use_se=self.use_se))
223
+ self.inplanes = planes
224
+ for _ in range(1, num_blocks):
225
+ layers.append(block(self.inplanes, planes, use_se=self.use_se))
226
+
227
+ return nn.Sequential(*layers)
228
+
229
+ def forward(self, x):
230
+ x = self.conv1(x)
231
+ x = self.bn1(x)
232
+ x = self.prelu(x)
233
+ x = self.maxpool(x)
234
+
235
+ x = self.layer1(x)
236
+ x = self.layer2(x)
237
+ x = self.layer3(x)
238
+ x = self.layer4(x)
239
+ x = self.bn4(x)
240
+ x = self.dropout(x)
241
+ x = x.view(x.size(0), -1)
242
+ x = self.fc5(x)
243
+ x = self.bn5(x)
244
+
245
+ return x
basicsr/archs/arch_util.py ADDED
@@ -0,0 +1,318 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections.abc
2
+ import math
3
+ import torch
4
+ import torchvision
5
+ import warnings
6
+ from distutils.version import LooseVersion
7
+ from itertools import repeat
8
+ from torch import nn as nn
9
+ from torch.nn import functional as F
10
+ from torch.nn import init as init
11
+ from torch.nn.modules.batchnorm import _BatchNorm
12
+
13
+ from basicsr.ops.dcn import ModulatedDeformConvPack, modulated_deform_conv
14
+ from basicsr.utils import get_root_logger
15
+
16
+
17
+ @torch.no_grad()
18
+ def default_init_weights(module_list, scale=1, bias_fill=0, **kwargs):
19
+ """Initialize network weights.
20
+
21
+ Args:
22
+ module_list (list[nn.Module] | nn.Module): Modules to be initialized.
23
+ scale (float): Scale initialized weights, especially for residual
24
+ blocks. Default: 1.
25
+ bias_fill (float): The value to fill bias. Default: 0
26
+ kwargs (dict): Other arguments for initialization function.
27
+ """
28
+ if not isinstance(module_list, list):
29
+ module_list = [module_list]
30
+ for module in module_list:
31
+ for m in module.modules():
32
+ if isinstance(m, nn.Conv2d):
33
+ init.kaiming_normal_(m.weight, **kwargs)
34
+ m.weight.data *= scale
35
+ if m.bias is not None:
36
+ m.bias.data.fill_(bias_fill)
37
+ elif isinstance(m, nn.Linear):
38
+ init.kaiming_normal_(m.weight, **kwargs)
39
+ m.weight.data *= scale
40
+ if m.bias is not None:
41
+ m.bias.data.fill_(bias_fill)
42
+ elif isinstance(m, _BatchNorm):
43
+ init.constant_(m.weight, 1)
44
+ if m.bias is not None:
45
+ m.bias.data.fill_(bias_fill)
46
+
47
+
48
+ def make_layer(basic_block, num_basic_block, **kwarg):
49
+ """Make layers by stacking the same blocks.
50
+
51
+ Args:
52
+ basic_block (nn.module): nn.module class for basic block.
53
+ num_basic_block (int): number of blocks.
54
+
55
+ Returns:
56
+ nn.Sequential: Stacked blocks in nn.Sequential.
57
+ """
58
+ layers = []
59
+ for _ in range(num_basic_block):
60
+ layers.append(basic_block(**kwarg))
61
+ return nn.Sequential(*layers)
62
+
63
+
64
+ class ResidualBlockNoBN(nn.Module):
65
+ """Residual block without BN.
66
+
67
+ It has a style of:
68
+ ---Conv-ReLU-Conv-+-
69
+ |________________|
70
+
71
+ Args:
72
+ num_feat (int): Channel number of intermediate features.
73
+ Default: 64.
74
+ res_scale (float): Residual scale. Default: 1.
75
+ pytorch_init (bool): If set to True, use pytorch default init,
76
+ otherwise, use default_init_weights. Default: False.
77
+ """
78
+
79
+ def __init__(self, num_feat=64, res_scale=1, pytorch_init=False):
80
+ super(ResidualBlockNoBN, self).__init__()
81
+ self.res_scale = res_scale
82
+ self.conv1 = nn.Conv2d(num_feat, num_feat, 3, 1, 1, bias=True)
83
+ self.conv2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1, bias=True)
84
+ self.relu = nn.ReLU(inplace=True)
85
+
86
+ if not pytorch_init:
87
+ default_init_weights([self.conv1, self.conv2], 0.1)
88
+
89
+ def forward(self, x):
90
+ identity = x
91
+ out = self.conv2(self.relu(self.conv1(x)))
92
+ return identity + out * self.res_scale
93
+
94
+
95
+ class Upsample(nn.Sequential):
96
+ """Upsample module.
97
+
98
+ Args:
99
+ scale (int): Scale factor. Supported scales: 2^n and 3.
100
+ num_feat (int): Channel number of intermediate features.
101
+ """
102
+
103
+ def __init__(self, scale, num_feat):
104
+ m = []
105
+ if (scale & (scale - 1)) == 0: # scale = 2^n
106
+ for _ in range(int(math.log(scale, 2))):
107
+ m.append(nn.Conv2d(num_feat, 4 * num_feat, 3, 1, 1))
108
+ m.append(nn.PixelShuffle(2))
109
+ elif scale == 3:
110
+ m.append(nn.Conv2d(num_feat, 9 * num_feat, 3, 1, 1))
111
+ m.append(nn.PixelShuffle(3))
112
+ else:
113
+ raise ValueError(f'scale {scale} is not supported. Supported scales: 2^n and 3.')
114
+ super(Upsample, self).__init__(*m)
115
+
116
+
117
+ def flow_warp(x, flow, interp_mode='bilinear', padding_mode='zeros', align_corners=True):
118
+ """Warp an image or feature map with optical flow.
119
+
120
+ Args:
121
+ x (Tensor): Tensor with size (n, c, h, w).
122
+ flow (Tensor): Tensor with size (n, h, w, 2), normal value.
123
+ interp_mode (str): 'nearest' or 'bilinear'. Default: 'bilinear'.
124
+ padding_mode (str): 'zeros' or 'border' or 'reflection'.
125
+ Default: 'zeros'.
126
+ align_corners (bool): Before pytorch 1.3, the default value is
127
+ align_corners=True. After pytorch 1.3, the default value is
128
+ align_corners=False. Here, we use the True as default.
129
+
130
+ Returns:
131
+ Tensor: Warped image or feature map.
132
+ """
133
+ assert x.size()[-2:] == flow.size()[1:3]
134
+ _, _, h, w = x.size()
135
+ # create mesh grid
136
+ grid_y, grid_x = torch.meshgrid(torch.arange(0, h).type_as(x), torch.arange(0, w).type_as(x))
137
+ grid = torch.stack((grid_x, grid_y), 2).float() # W(x), H(y), 2
138
+ grid.requires_grad = False
139
+
140
+ vgrid = grid + flow
141
+ # scale grid to [-1,1]
142
+ vgrid_x = 2.0 * vgrid[:, :, :, 0] / max(w - 1, 1) - 1.0
143
+ vgrid_y = 2.0 * vgrid[:, :, :, 1] / max(h - 1, 1) - 1.0
144
+ vgrid_scaled = torch.stack((vgrid_x, vgrid_y), dim=3)
145
+ output = F.grid_sample(x, vgrid_scaled, mode=interp_mode, padding_mode=padding_mode, align_corners=align_corners)
146
+
147
+ # TODO, what if align_corners=False
148
+ return output
149
+
150
+
151
+ def resize_flow(flow, size_type, sizes, interp_mode='bilinear', align_corners=False):
152
+ """Resize a flow according to ratio or shape.
153
+
154
+ Args:
155
+ flow (Tensor): Precomputed flow. shape [N, 2, H, W].
156
+ size_type (str): 'ratio' or 'shape'.
157
+ sizes (list[int | float]): the ratio for resizing or the final output
158
+ shape.
159
+ 1) The order of ratio should be [ratio_h, ratio_w]. For
160
+ downsampling, the ratio should be smaller than 1.0 (i.e., ratio
161
+ < 1.0). For upsampling, the ratio should be larger than 1.0 (i.e.,
162
+ ratio > 1.0).
163
+ 2) The order of output_size should be [out_h, out_w].
164
+ interp_mode (str): The mode of interpolation for resizing.
165
+ Default: 'bilinear'.
166
+ align_corners (bool): Whether align corners. Default: False.
167
+
168
+ Returns:
169
+ Tensor: Resized flow.
170
+ """
171
+ _, _, flow_h, flow_w = flow.size()
172
+ if size_type == 'ratio':
173
+ output_h, output_w = int(flow_h * sizes[0]), int(flow_w * sizes[1])
174
+ elif size_type == 'shape':
175
+ output_h, output_w = sizes[0], sizes[1]
176
+ else:
177
+ raise ValueError(f'Size type should be ratio or shape, but got type {size_type}.')
178
+
179
+ input_flow = flow.clone()
180
+ ratio_h = output_h / flow_h
181
+ ratio_w = output_w / flow_w
182
+ input_flow[:, 0, :, :] *= ratio_w
183
+ input_flow[:, 1, :, :] *= ratio_h
184
+ resized_flow = F.interpolate(
185
+ input=input_flow, size=(output_h, output_w), mode=interp_mode, align_corners=align_corners)
186
+ return resized_flow
187
+
188
+
189
+ # TODO: may write a cpp file
190
+ def pixel_unshuffle(x, scale):
191
+ """ Pixel unshuffle.
192
+
193
+ Args:
194
+ x (Tensor): Input feature with shape (b, c, hh, hw).
195
+ scale (int): Downsample ratio.
196
+
197
+ Returns:
198
+ Tensor: the pixel unshuffled feature.
199
+ """
200
+ b, c, hh, hw = x.size()
201
+ out_channel = c * (scale**2)
202
+ assert hh % scale == 0 and hw % scale == 0
203
+ h = hh // scale
204
+ w = hw // scale
205
+ x_view = x.view(b, c, h, scale, w, scale)
206
+ return x_view.permute(0, 1, 3, 5, 2, 4).reshape(b, out_channel, h, w)
207
+
208
+
209
+ class DCNv2Pack(ModulatedDeformConvPack):
210
+ """Modulated deformable conv for deformable alignment.
211
+
212
+ Different from the official DCNv2Pack, which generates offsets and masks
213
+ from the preceding features, this DCNv2Pack takes another different
214
+ features to generate offsets and masks.
215
+
216
+ Ref:
217
+ Delving Deep into Deformable Alignment in Video Super-Resolution.
218
+ """
219
+
220
+ def forward(self, x, feat):
221
+ out = self.conv_offset(feat)
222
+ o1, o2, mask = torch.chunk(out, 3, dim=1)
223
+ offset = torch.cat((o1, o2), dim=1)
224
+ mask = torch.sigmoid(mask)
225
+
226
+ offset_absmean = torch.mean(torch.abs(offset))
227
+ if offset_absmean > 50:
228
+ logger = get_root_logger()
229
+ logger.warning(f'Offset abs mean is {offset_absmean}, larger than 50.')
230
+
231
+ if LooseVersion(torchvision.__version__) >= LooseVersion('0.9.0'):
232
+ return torchvision.ops.deform_conv2d(x, offset, self.weight, self.bias, self.stride, self.padding,
233
+ self.dilation, mask)
234
+ else:
235
+ return modulated_deform_conv(x, offset, mask, self.weight, self.bias, self.stride, self.padding,
236
+ self.dilation, self.groups, self.deformable_groups)
237
+
238
+
239
+ def _no_grad_trunc_normal_(tensor, mean, std, a, b):
240
+ # From: https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/weight_init.py
241
+ # Cut & paste from PyTorch official master until it's in a few official releases - RW
242
+ # Method based on https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf
243
+ def norm_cdf(x):
244
+ # Computes standard normal cumulative distribution function
245
+ return (1. + math.erf(x / math.sqrt(2.))) / 2.
246
+
247
+ if (mean < a - 2 * std) or (mean > b + 2 * std):
248
+ warnings.warn(
249
+ 'mean is more than 2 std from [a, b] in nn.init.trunc_normal_. '
250
+ 'The distribution of values may be incorrect.',
251
+ stacklevel=2)
252
+
253
+ with torch.no_grad():
254
+ # Values are generated by using a truncated uniform distribution and
255
+ # then using the inverse CDF for the normal distribution.
256
+ # Get upper and lower cdf values
257
+ low = norm_cdf((a - mean) / std)
258
+ up = norm_cdf((b - mean) / std)
259
+
260
+ # Uniformly fill tensor with values from [low, up], then translate to
261
+ # [2l-1, 2u-1].
262
+ tensor.uniform_(2 * low - 1, 2 * up - 1)
263
+
264
+ # Use inverse cdf transform for normal distribution to get truncated
265
+ # standard normal
266
+ tensor.erfinv_()
267
+
268
+ # Transform to proper mean, std
269
+ tensor.mul_(std * math.sqrt(2.))
270
+ tensor.add_(mean)
271
+
272
+ # Clamp to ensure it's in the proper range
273
+ tensor.clamp_(min=a, max=b)
274
+ return tensor
275
+
276
+
277
+ def trunc_normal_(tensor, mean=0., std=1., a=-2., b=2.):
278
+ r"""Fills the input Tensor with values drawn from a truncated
279
+ normal distribution.
280
+
281
+ From: https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/weight_init.py
282
+
283
+ The values are effectively drawn from the
284
+ normal distribution :math:`\mathcal{N}(\text{mean}, \text{std}^2)`
285
+ with values outside :math:`[a, b]` redrawn until they are within
286
+ the bounds. The method used for generating the random values works
287
+ best when :math:`a \leq \text{mean} \leq b`.
288
+
289
+ Args:
290
+ tensor: an n-dimensional `torch.Tensor`
291
+ mean: the mean of the normal distribution
292
+ std: the standard deviation of the normal distribution
293
+ a: the minimum cutoff value
294
+ b: the maximum cutoff value
295
+
296
+ Examples:
297
+ >>> w = torch.empty(3, 5)
298
+ >>> nn.init.trunc_normal_(w)
299
+ """
300
+ return _no_grad_trunc_normal_(tensor, mean, std, a, b)
301
+
302
+
303
+ # From PyTorch
304
+ def _ntuple(n):
305
+
306
+ def parse(x):
307
+ if isinstance(x, collections.abc.Iterable):
308
+ return x
309
+ return tuple(repeat(x, n))
310
+
311
+ return parse
312
+
313
+
314
+ to_1tuple = _ntuple(1)
315
+ to_2tuple = _ntuple(2)
316
+ to_3tuple = _ntuple(3)
317
+ to_4tuple = _ntuple(4)
318
+ to_ntuple = _ntuple
basicsr/archs/correlation.py ADDED
@@ -0,0 +1,426 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ import cupy
4
+ import re
5
+
6
+
7
+ class Stream:
8
+ ptr = torch.cuda.current_stream().cuda_stream
9
+ # end
10
+
11
+
12
+ kernel_Correlation_rearrange = '''
13
+ extern "C" __global__ void kernel_Correlation_rearrange(
14
+ const int n,
15
+ const float* input,
16
+ float* output
17
+ ) {
18
+ int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x;
19
+
20
+ if (intIndex >= n) {
21
+ return;
22
+ }
23
+
24
+ int intSample = blockIdx.z;
25
+ int intChannel = blockIdx.y;
26
+
27
+ float dblValue = input[(((intSample * SIZE_1(input)) + intChannel) * SIZE_2(input) * SIZE_3(input)) + intIndex];
28
+
29
+ __syncthreads();
30
+
31
+ int intPaddedY = (intIndex / SIZE_3(input)) + 4;
32
+ int intPaddedX = (intIndex % SIZE_3(input)) + 4;
33
+ int intRearrange = ((SIZE_3(input) + 8) * intPaddedY) + intPaddedX;
34
+
35
+ output[(((intSample * SIZE_1(output) * SIZE_2(output)) + intRearrange) * SIZE_1(input)) + intChannel] = dblValue;
36
+ }
37
+ '''
38
+
39
+ kernel_Correlation_updateOutput = '''
40
+ extern "C" __global__ void kernel_Correlation_updateOutput(
41
+ const int n,
42
+ const float* rbot0,
43
+ const float* rbot1,
44
+ float* top
45
+ ) {
46
+ extern __shared__ char patch_data_char[];
47
+
48
+ float *patch_data = (float *)patch_data_char;
49
+
50
+ // First (upper left) position of kernel upper-left corner in current center position of neighborhood in image 1
51
+ int x1 = blockIdx.x + 4;
52
+ int y1 = blockIdx.y + 4;
53
+ int item = blockIdx.z;
54
+ int ch_off = threadIdx.x;
55
+
56
+ // Load 3D patch into shared shared memory
57
+ for (int j = 0; j < 1; j++) { // HEIGHT
58
+ for (int i = 0; i < 1; i++) { // WIDTH
59
+ int ji_off = (j + i) * SIZE_3(rbot0);
60
+ for (int ch = ch_off; ch < SIZE_3(rbot0); ch += 32) { // CHANNELS
61
+ int idx1 = ((item * SIZE_1(rbot0) + y1+j) * SIZE_2(rbot0) + x1+i) * SIZE_3(rbot0) + ch;
62
+ int idxPatchData = ji_off + ch;
63
+ patch_data[idxPatchData] = rbot0[idx1];
64
+ }
65
+ }
66
+ }
67
+
68
+ __syncthreads();
69
+
70
+ __shared__ float sum[32];
71
+
72
+ // Compute correlation
73
+ for (int top_channel = 0; top_channel < SIZE_1(top); top_channel++) {
74
+ sum[ch_off] = 0;
75
+
76
+ int s2o = top_channel % 9 - 4;
77
+ int s2p = top_channel / 9 - 4;
78
+
79
+ for (int j = 0; j < 1; j++) { // HEIGHT
80
+ for (int i = 0; i < 1; i++) { // WIDTH
81
+ int ji_off = (j + i) * SIZE_3(rbot0);
82
+ for (int ch = ch_off; ch < SIZE_3(rbot0); ch += 32) { // CHANNELS
83
+ int x2 = x1 + s2o;
84
+ int y2 = y1 + s2p;
85
+
86
+ int idxPatchData = ji_off + ch;
87
+ int idx2 = ((item * SIZE_1(rbot0) + y2+j) * SIZE_2(rbot0) + x2+i) * SIZE_3(rbot0) + ch;
88
+
89
+ sum[ch_off] += patch_data[idxPatchData] * rbot1[idx2];
90
+ }
91
+ }
92
+ }
93
+
94
+ __syncthreads();
95
+
96
+ if (ch_off == 0) {
97
+ float total_sum = 0;
98
+ for (int idx = 0; idx < 32; idx++) {
99
+ total_sum += sum[idx];
100
+ }
101
+ const int sumelems = SIZE_3(rbot0);
102
+ const int index = ((top_channel*SIZE_2(top) + blockIdx.y)*SIZE_3(top))+blockIdx.x;
103
+ top[index + item*SIZE_1(top)*SIZE_2(top)*SIZE_3(top)] = total_sum / (float)sumelems;
104
+ }
105
+ }
106
+ }
107
+ '''
108
+
109
+ kernel_Correlation_updateGradFirst = '''
110
+ #define ROUND_OFF 50000
111
+
112
+ extern "C" __global__ void kernel_Correlation_updateGradFirst(
113
+ const int n,
114
+ const int intSample,
115
+ const float* rbot0,
116
+ const float* rbot1,
117
+ const float* gradOutput,
118
+ float* gradFirst,
119
+ float* gradSecond
120
+ ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) {
121
+ int n = intIndex % SIZE_1(gradFirst); // channels
122
+ int l = (intIndex / SIZE_1(gradFirst)) % SIZE_3(gradFirst) + 4; // w-pos
123
+ int m = (intIndex / SIZE_1(gradFirst) / SIZE_3(gradFirst)) % SIZE_2(gradFirst) + 4; // h-pos
124
+
125
+ // round_off is a trick to enable integer division with ceil, even for negative numbers
126
+ // We use a large offset, for the inner part not to become negative.
127
+ const int round_off = ROUND_OFF;
128
+ const int round_off_s1 = round_off;
129
+
130
+ // We add round_off before_s1 the int division and subtract round_off after it, to ensure the formula matches ceil behavior:
131
+ int xmin = (l - 4 + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4)
132
+ int ymin = (m - 4 + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4)
133
+
134
+ // Same here:
135
+ int xmax = (l - 4 + round_off_s1) - round_off; // floor (l - 4)
136
+ int ymax = (m - 4 + round_off_s1) - round_off; // floor (m - 4)
137
+
138
+ float sum = 0;
139
+ if (xmax>=0 && ymax>=0 && (xmin<=SIZE_3(gradOutput)-1) && (ymin<=SIZE_2(gradOutput)-1)) {
140
+ xmin = max(0,xmin);
141
+ xmax = min(SIZE_3(gradOutput)-1,xmax);
142
+
143
+ ymin = max(0,ymin);
144
+ ymax = min(SIZE_2(gradOutput)-1,ymax);
145
+
146
+ for (int p = -4; p <= 4; p++) {
147
+ for (int o = -4; o <= 4; o++) {
148
+ // Get rbot1 data:
149
+ int s2o = o;
150
+ int s2p = p;
151
+ int idxbot1 = ((intSample * SIZE_1(rbot0) + (m+s2p)) * SIZE_2(rbot0) + (l+s2o)) * SIZE_3(rbot0) + n;
152
+ float bot1tmp = rbot1[idxbot1]; // rbot1[l+s2o,m+s2p,n]
153
+
154
+ // Index offset for gradOutput in following loops:
155
+ int op = (p+4) * 9 + (o+4); // index[o,p]
156
+ int idxopoffset = (intSample * SIZE_1(gradOutput) + op);
157
+
158
+ for (int y = ymin; y <= ymax; y++) {
159
+ for (int x = xmin; x <= xmax; x++) {
160
+ int idxgradOutput = (idxopoffset * SIZE_2(gradOutput) + y) * SIZE_3(gradOutput) + x; // gradOutput[x,y,o,p]
161
+ sum += gradOutput[idxgradOutput] * bot1tmp;
162
+ }
163
+ }
164
+ }
165
+ }
166
+ }
167
+ const int sumelems = SIZE_1(gradFirst);
168
+ const int bot0index = ((n * SIZE_2(gradFirst)) + (m-4)) * SIZE_3(gradFirst) + (l-4);
169
+ gradFirst[bot0index + intSample*SIZE_1(gradFirst)*SIZE_2(gradFirst)*SIZE_3(gradFirst)] = sum / (float)sumelems;
170
+ } }
171
+ '''
172
+
173
+ kernel_Correlation_updateGradSecond = '''
174
+ #define ROUND_OFF 50000
175
+
176
+ extern "C" __global__ void kernel_Correlation_updateGradSecond(
177
+ const int n,
178
+ const int intSample,
179
+ const float* rbot0,
180
+ const float* rbot1,
181
+ const float* gradOutput,
182
+ float* gradFirst,
183
+ float* gradSecond
184
+ ) { for (int intIndex = (blockIdx.x * blockDim.x) + threadIdx.x; intIndex < n; intIndex += blockDim.x * gridDim.x) {
185
+ int n = intIndex % SIZE_1(gradSecond); // channels
186
+ int l = (intIndex / SIZE_1(gradSecond)) % SIZE_3(gradSecond) + 4; // w-pos
187
+ int m = (intIndex / SIZE_1(gradSecond) / SIZE_3(gradSecond)) % SIZE_2(gradSecond) + 4; // h-pos
188
+
189
+ // round_off is a trick to enable integer division with ceil, even for negative numbers
190
+ // We use a large offset, for the inner part not to become negative.
191
+ const int round_off = ROUND_OFF;
192
+ const int round_off_s1 = round_off;
193
+
194
+ float sum = 0;
195
+ for (int p = -4; p <= 4; p++) {
196
+ for (int o = -4; o <= 4; o++) {
197
+ int s2o = o;
198
+ int s2p = p;
199
+
200
+ //Get X,Y ranges and clamp
201
+ // We add round_off before_s1 the int division and subtract round_off after it, to ensure the formula matches ceil behavior:
202
+ int xmin = (l - 4 - s2o + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4 - s2o)
203
+ int ymin = (m - 4 - s2p + round_off_s1 - 1) + 1 - round_off; // ceil (l - 4 - s2o)
204
+
205
+ // Same here:
206
+ int xmax = (l - 4 - s2o + round_off_s1) - round_off; // floor (l - 4 - s2o)
207
+ int ymax = (m - 4 - s2p + round_off_s1) - round_off; // floor (m - 4 - s2p)
208
+
209
+ if (xmax>=0 && ymax>=0 && (xmin<=SIZE_3(gradOutput)-1) && (ymin<=SIZE_2(gradOutput)-1)) {
210
+ xmin = max(0,xmin);
211
+ xmax = min(SIZE_3(gradOutput)-1,xmax);
212
+
213
+ ymin = max(0,ymin);
214
+ ymax = min(SIZE_2(gradOutput)-1,ymax);
215
+
216
+ // Get rbot0 data:
217
+ int idxbot0 = ((intSample * SIZE_1(rbot0) + (m-s2p)) * SIZE_2(rbot0) + (l-s2o)) * SIZE_3(rbot0) + n;
218
+ float bot0tmp = rbot0[idxbot0]; // rbot1[l+s2o,m+s2p,n]
219
+
220
+ // Index offset for gradOutput in following loops:
221
+ int op = (p+4) * 9 + (o+4); // index[o,p]
222
+ int idxopoffset = (intSample * SIZE_1(gradOutput) + op);
223
+
224
+ for (int y = ymin; y <= ymax; y++) {
225
+ for (int x = xmin; x <= xmax; x++) {
226
+ int idxgradOutput = (idxopoffset * SIZE_2(gradOutput) + y) * SIZE_3(gradOutput) + x; // gradOutput[x,y,o,p]
227
+ sum += gradOutput[idxgradOutput] * bot0tmp;
228
+ }
229
+ }
230
+ }
231
+ }
232
+ }
233
+ const int sumelems = SIZE_1(gradSecond);
234
+ const int bot1index = ((n * SIZE_2(gradSecond)) + (m-4)) * SIZE_3(gradSecond) + (l-4);
235
+ gradSecond[bot1index + intSample*SIZE_1(gradSecond)*SIZE_2(gradSecond)*SIZE_3(gradSecond)] = sum / (float)sumelems;
236
+ } }
237
+ '''
238
+
239
+
240
+ def cupy_kernel(strFunction, objectVariables):
241
+ strKernel = globals()[strFunction]
242
+
243
+ while True:
244
+ objectMatch = re.search('(SIZE_)([0-4])(\()([^\)]*)(\))', strKernel)
245
+
246
+ if objectMatch is None:
247
+ break
248
+ # end
249
+
250
+ intArg = int(objectMatch.group(2))
251
+
252
+ strTensor = objectMatch.group(4)
253
+ intSizes = objectVariables[strTensor].size()
254
+
255
+ strKernel = strKernel.replace(
256
+ objectMatch.group(), str(intSizes[intArg]))
257
+ # end
258
+
259
+ while True:
260
+ objectMatch = re.search('(VALUE_)([0-4])(\()([^\)]+)(\))', strKernel)
261
+
262
+ if objectMatch is None:
263
+ break
264
+ # end
265
+
266
+ intArgs = int(objectMatch.group(2))
267
+ strArgs = objectMatch.group(4).split(',')
268
+
269
+ strTensor = strArgs[0]
270
+ intStrides = objectVariables[strTensor].stride()
271
+ strIndex = ['((' + strArgs[intArg + 1].replace('{', '(').replace('}', ')').strip(
272
+ ) + ')*' + str(intStrides[intArg]) + ')' for intArg in range(intArgs)]
273
+
274
+ strKernel = strKernel.replace(objectMatch.group(
275
+ 0), strTensor + '[' + str.join('+', strIndex) + ']')
276
+ # end
277
+
278
+ return strKernel
279
+ # end
280
+
281
+ # @cupy.util.memoize(for_each_device=True)
282
+
283
+
284
+ @cupy.memoize(for_each_device=True)
285
+ def cupy_launch(strFunction, strKernel):
286
+ return cupy.cuda.compile_with_cache(strKernel).get_function(strFunction)
287
+ # end
288
+
289
+
290
+ class _FunctionCorrelation(torch.autograd.Function):
291
+ @staticmethod
292
+ def forward(self, first, second):
293
+ rbot0 = first.new_zeros([first.size(0), first.size(
294
+ 2) + 8, first.size(3) + 8, first.size(1)])
295
+ rbot1 = first.new_zeros([first.size(0), first.size(
296
+ 2) + 8, first.size(3) + 8, first.size(1)])
297
+
298
+ self.save_for_backward(first, second, rbot0, rbot1)
299
+
300
+ assert(first.is_contiguous() == True)
301
+ assert(second.is_contiguous() == True)
302
+
303
+ output = first.new_zeros(
304
+ [first.size(0), 81, first.size(2), first.size(3)])
305
+
306
+ if first.is_cuda == True:
307
+ n = first.size(2) * first.size(3)
308
+ cupy_launch('kernel_Correlation_rearrange', cupy_kernel('kernel_Correlation_rearrange', {
309
+ 'input': first,
310
+ 'output': rbot0
311
+ }))(
312
+ grid=tuple([int((n + 16 - 1) / 16),
313
+ first.size(1), first.size(0)]),
314
+ block=tuple([16, 1, 1]),
315
+ args=[n, first.data_ptr(), rbot0.data_ptr()],
316
+ stream=Stream
317
+ )
318
+
319
+ n = second.size(2) * second.size(3)
320
+ cupy_launch('kernel_Correlation_rearrange', cupy_kernel('kernel_Correlation_rearrange', {
321
+ 'input': second,
322
+ 'output': rbot1
323
+ }))(
324
+ grid=tuple([int((n + 16 - 1) / 16),
325
+ second.size(1), second.size(0)]),
326
+ block=tuple([16, 1, 1]),
327
+ args=[n, second.data_ptr(), rbot1.data_ptr()],
328
+ stream=Stream
329
+ )
330
+
331
+ n = output.size(1) * output.size(2) * output.size(3)
332
+ cupy_launch('kernel_Correlation_updateOutput', cupy_kernel('kernel_Correlation_updateOutput', {
333
+ 'rbot0': rbot0,
334
+ 'rbot1': rbot1,
335
+ 'top': output
336
+ }))(
337
+ grid=tuple([output.size(3), output.size(2), output.size(0)]),
338
+ block=tuple([32, 1, 1]),
339
+ shared_mem=first.size(1) * 4,
340
+ args=[n, rbot0.data_ptr(), rbot1.data_ptr(),
341
+ output.data_ptr()],
342
+ stream=Stream
343
+ )
344
+
345
+ elif first.is_cuda == False:
346
+ raise NotImplementedError()
347
+
348
+ # end
349
+
350
+ return output
351
+ # end
352
+
353
+ @staticmethod
354
+ def backward(self, gradOutput):
355
+ first, second, rbot0, rbot1 = self.saved_tensors
356
+
357
+ assert(gradOutput.is_contiguous() == True)
358
+
359
+ gradFirst = first.new_zeros([first.size(0), first.size(1), first.size(
360
+ 2), first.size(3)]) if self.needs_input_grad[0] == True else None
361
+ gradSecond = first.new_zeros([first.size(0), first.size(1), first.size(
362
+ 2), first.size(3)]) if self.needs_input_grad[1] == True else None
363
+
364
+ if first.is_cuda == True:
365
+ if gradFirst is not None:
366
+ for intSample in range(first.size(0)):
367
+ n = first.size(1) * first.size(2) * first.size(3)
368
+ cupy_launch('kernel_Correlation_updateGradFirst', cupy_kernel('kernel_Correlation_updateGradFirst', {
369
+ 'rbot0': rbot0,
370
+ 'rbot1': rbot1,
371
+ 'gradOutput': gradOutput,
372
+ 'gradFirst': gradFirst,
373
+ 'gradSecond': None
374
+ }))(
375
+ grid=tuple([int((n + 512 - 1) / 512), 1, 1]),
376
+ block=tuple([512, 1, 1]),
377
+ args=[n, intSample, rbot0.data_ptr(), rbot1.data_ptr(
378
+ ), gradOutput.data_ptr(), gradFirst.data_ptr(), None],
379
+ stream=Stream
380
+ )
381
+ # end
382
+ # end
383
+
384
+ if gradSecond is not None:
385
+ for intSample in range(first.size(0)):
386
+ n = first.size(1) * first.size(2) * first.size(3)
387
+ cupy_launch('kernel_Correlation_updateGradSecond', cupy_kernel('kernel_Correlation_updateGradSecond', {
388
+ 'rbot0': rbot0,
389
+ 'rbot1': rbot1,
390
+ 'gradOutput': gradOutput,
391
+ 'gradFirst': None,
392
+ 'gradSecond': gradSecond
393
+ }))(
394
+ grid=tuple([int((n + 512 - 1) / 512), 1, 1]),
395
+ block=tuple([512, 1, 1]),
396
+ args=[n, intSample, rbot0.data_ptr(), rbot1.data_ptr(
397
+ ), gradOutput.data_ptr(), None, gradSecond.data_ptr()],
398
+ stream=Stream
399
+ )
400
+ # end
401
+ # end
402
+
403
+ elif first.is_cuda == False:
404
+ raise NotImplementedError()
405
+
406
+ # end
407
+
408
+ return gradFirst, gradSecond
409
+ # end
410
+ # end
411
+
412
+
413
+ def FunctionCorrelation(tensorFirst, tensorSecond):
414
+ return _FunctionCorrelation.apply(tensorFirst, tensorSecond)
415
+ # end
416
+
417
+
418
+ class ModuleCorrelation(torch.nn.Module):
419
+ def __init__(self):
420
+ super(ModuleCorrelation, self).__init__()
421
+ # end
422
+
423
+ def forward(self, tensorFirst, tensorSecond):
424
+ return _FunctionCorrelation.apply(tensorFirst, tensorSecond)
425
+ # end
426
+ # end
basicsr/archs/gmflow/.gitignore ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/#use-with-ide
110
+ .pdm.toml
111
+
112
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113
+ __pypackages__/
114
+
115
+ # Celery stuff
116
+ celerybeat-schedule
117
+ celerybeat.pid
118
+
119
+ # SageMath parsed files
120
+ *.sage.py
121
+
122
+ # Environments
123
+ .env
124
+ .venv
125
+ env/
126
+ venv/
127
+ ENV/
128
+ env.bak/
129
+ venv.bak/
130
+
131
+ # Spyder project settings
132
+ .spyderproject
133
+ .spyproject
134
+
135
+ # Rope project settings
136
+ .ropeproject
137
+
138
+ # mkdocs documentation
139
+ /site
140
+
141
+ # mypy
142
+ .mypy_cache/
143
+ .dmypy.json
144
+ dmypy.json
145
+
146
+ # Pyre type checker
147
+ .pyre/
148
+
149
+ # pytype static type analyzer
150
+ .pytype/
151
+
152
+ # Cython debug symbols
153
+ cython_debug/
154
+
155
+ # PyCharm
156
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
159
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160
+ #.idea/
basicsr/archs/gmflow/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright 2022, Haofei Xu
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
basicsr/archs/gmflow/README.md ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # GMFlow
2
+
3
+
4
+ Official PyTorch implementation of paper:
5
+
6
+ [**GMFlow: Learning Optical Flow via Global Matching**](https://arxiv.org/abs/2111.13680), **CVPR 2022, Oral**
7
+
8
+ Authors: [Haofei Xu](https://haofeixu.github.io/), [Jing Zhang](https://scholar.google.com.hk/citations?user=9jH5v74AAAAJ), [Jianfei Cai](https://jianfei-cai.github.io/), [Hamid Rezatofighi](https://scholar.google.com/citations?user=VxAuxMwAAAAJ), [Dacheng Tao](https://scholar.google.com/citations?user=RwlJNLcAAAAJ)
9
+
10
+
11
+ **11/15/2022 Update: Check out our new work: [Unifying Flow, Stereo and Depth Estimation](https://haofeixu.github.io/unimatch/) and code: [unimatch](https://github.com/autonomousvision/unimatch) for extending GMFlow to stereo and depth tasks. [More pretrained GMFlow models](https://github.com/autonomousvision/unimatch/blob/master/MODEL_ZOO.md) with different speed-accuracy trade-offs are also released. Check out our [Colab](https://colab.research.google.com/drive/1r5m-xVy3Kw60U-m5VB-aQ98oqqg_6cab?usp=sharing) and [HuggingFace](https://huggingface.co/spaces/haofeixu/unimatch) demo to play with GMFlow in your browser!**
12
+
13
+
14
+
15
+ **A [video introduction](https://www.bilibili.com/video/BV18A4y1R7PL) (in Chinese) of GMFlow is available at bilibili!**
16
+
17
+
18
+
19
+ https://user-images.githubusercontent.com/19343475/174446408-520b8a6c-9714-4ff3-978c-98e23ab29c1f.mp4
20
+
21
+
22
+
23
+
24
+
25
+ We streamline the optical flow estimation pipeline by reformulating optical flow as a **global matching** problem.
26
+
27
+
28
+
29
+
30
+ <p align="center"><img width=90% src="assets/gmflow.png"></p>
31
+
32
+
33
+
34
+
35
+
36
+ ## Highlights
37
+
38
+ - **Flexible & Modular design**
39
+
40
+ We decompose the end-to-end optical flow framework into five components:
41
+
42
+ feature extraction, feature enhancement, feature matching, flow propagation and flow refinement.
43
+
44
+ One can easily construct a customized optical flow model by combining different components.
45
+
46
+ - **High accuracy**
47
+
48
+ With only one refinement, GMFlow outperforms 31-refinements RAFT on the challenging Sintel benchmark.
49
+
50
+ - **High efficiency**
51
+
52
+ A basic GMFlow model (without refinement) runs at 57ms (V100) or 26ms (A100) for Sintel data (436x1024).
53
+
54
+ GMFlow gains more speedup than RAFT on high-end GPUs (e.g., A100) since GMFlow doesn't require a large number of sequential computation.
55
+
56
+ GMFlow also simplifies backward flow computation without requiring to forward the network twice. The bidirectional flow can be used for occlusion detection with forward-backward consistency check.
57
+
58
+ <p align="center"><img width=90% src="assets/bidir_flow_occ.png"></p>
59
+
60
+
61
+
62
+
63
+ ## Installation
64
+
65
+ Our code is based on pytorch 1.9.0, CUDA 10.2 and python 3.8. Higher version pytorch should also work well.
66
+
67
+ We recommend using [conda](https://www.anaconda.com/distribution/) for installation:
68
+
69
+ ```
70
+ conda env create -f environment.yml
71
+ conda activate gmflow
72
+ ```
73
+
74
+ ## Demos
75
+
76
+ All pretrained models can be downloaded from [google drive](https://drive.google.com/file/d/1d5C5cgHIxWGsFR1vYs5XrQbbUiZl9TX2/view?usp=sharing).
77
+
78
+
79
+
80
+ You can run a trained model on a sequence of images and visualize the results:
81
+
82
+ ```
83
+ CUDA_VISIBLE_DEVICES=0 python main.py \
84
+ --inference_dir demo/sintel_market_1 \
85
+ --output_path output/gmflow-norefine-sintel_market_1 \
86
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
87
+ ```
88
+
89
+ You can also predict bidirectional flow with `--pred_bidir_flow` enabled and use `--fwd_bwd_consistency_check` for forward-backward consistency check. More examples can be found in [scripts/demo.sh](scripts/demo.sh).
90
+
91
+
92
+
93
+ ## Datasets
94
+
95
+ The datasets used to train and evaluate GMFlow are as follows:
96
+
97
+ * [FlyingChairs](https://lmb.informatik.uni-freiburg.de/resources/datasets/FlyingChairs.en.html#flyingchairs)
98
+ * [FlyingThings3D](https://lmb.informatik.uni-freiburg.de/resources/datasets/SceneFlowDatasets.en.html)
99
+ * [Sintel](http://sintel.is.tue.mpg.de/)
100
+ * [KITTI](http://www.cvlibs.net/datasets/kitti/eval_scene_flow.php?benchmark=flow)
101
+ * [HD1K](http://hci-benchmark.iwr.uni-heidelberg.de/)
102
+
103
+ By default the dataloader [datasets.py](data/datasets.py) assumes the datasets are located in folder `datasets` and are organized as follows:
104
+
105
+ ```
106
+ datasets
107
+ ├── FlyingChairs_release
108
+ │   └── data
109
+ ├── FlyingThings3D
110
+ │   ├── frames_cleanpass
111
+ │   ├── frames_finalpass
112
+ │   └── optical_flow
113
+ ├── HD1K
114
+ │   ├── hd1k_challenge
115
+ │   ├── hd1k_flow_gt
116
+ │   ├── hd1k_flow_uncertainty
117
+ │   └── hd1k_input
118
+ ├── KITTI
119
+ │   ├── testing
120
+ │   └── training
121
+ ├── Sintel
122
+ │   ├── test
123
+ │   └── training
124
+ ```
125
+
126
+ It is recommended to symlink your dataset root to `datasets`:
127
+
128
+ ```shell
129
+ ln -s $YOUR_DATASET_ROOT datasets
130
+ ```
131
+
132
+ Otherwise, you may need to change the corresponding paths in [datasets.py](data/datasets.py).
133
+
134
+
135
+
136
+ ## Evaluation
137
+
138
+ You can evaluate a trained GMFlow model by running:
139
+
140
+ ```
141
+ CUDA_VISIBLE_DEVICES=0 python main.py --eval --val_dataset things sintel --resume pretrained/gmflow_things-e9887eda.pth
142
+ ```
143
+
144
+ More evaluation scripts can be found in [scripts/evaluate.sh](scripts/evaluate.sh).
145
+
146
+
147
+
148
+ For submission to Sintel and KITTI online test sets, you can run [scripts/submission.sh](scripts/submission.sh).
149
+
150
+
151
+
152
+ ## Training
153
+
154
+ All training scripts on FlyingChairs, FlyingThings3D, Sintel and KITTI datasets can be found in [scripts/train_gmflow.sh](scripts/train_gmflow.sh) and [scripts/train_gmflow_with_refine.sh](scripts/train_gmflow_with_refine.sh).
155
+
156
+ Note that the basic GMFlow model (without refinement) can be trained on 4x 16GB V100 GPUs. For training GMFlow with refinement, 8x 16GB V100 or 4x 32GB V100 or 4x 40GB A100 GPUs are required by default. You may need to tune the batch size and training iterations according to your hardware.
157
+
158
+
159
+
160
+ We support using tensorboard to monitor and visualize the training process. You can first start a tensorboard session with
161
+
162
+ ```shell
163
+ tensorboard --logdir checkpoints
164
+ ```
165
+
166
+ and then access [http://localhost:6006](http://localhost:6006) in your browser.
167
+
168
+
169
+
170
+ ## Citation
171
+
172
+ If you find our work useful in your research, please consider citing our paper:
173
+
174
+ ```
175
+ @inproceedings{xu2022gmflow,
176
+ title={GMFlow: Learning Optical Flow via Global Matching},
177
+ author={Xu, Haofei and Zhang, Jing and Cai, Jianfei and Rezatofighi, Hamid and Tao, Dacheng},
178
+ booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
179
+ pages={8121-8130},
180
+ year={2022}
181
+ }
182
+ ```
183
+
184
+
185
+
186
+ ## Acknowledgements
187
+
188
+ This project would not have been possible without relying on some awesome repos : [RAFT](https://github.com/princeton-vl/RAFT), [LoFTR](https://github.com/zju3dv/LoFTR), [DETR](https://github.com/facebookresearch/detr), [Swin](https://github.com/microsoft/Swin-Transformer), [mmdetection](https://github.com/open-mmlab/mmdetection) and [Detectron2](https://github.com/facebookresearch/detectron2/blob/main/projects/TridentNet/tridentnet/trident_conv.py). We thank the original authors for their excellent work.
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+
222
+
223
+
224
+
225
+
226
+
227
+
228
+
229
+
230
+
231
+
232
+
233
+
234
+
235
+
236
+
237
+
238
+
239
+
basicsr/archs/gmflow/data/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ from .datasets import build_train_dataset
2
+ from .datasets import (FlyingChairs,
3
+ FlyingThings3D,
4
+ MpiSintel,
5
+ KITTI,
6
+ HD1K,
7
+ )
basicsr/archs/gmflow/data/chairs_split.txt ADDED
@@ -0,0 +1,22872 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1
2
+ 1
3
+ 1
4
+ 1
5
+ 1
6
+ 2
7
+ 1
8
+ 1
9
+ 1
10
+ 1
11
+ 1
12
+ 1
13
+ 1
14
+ 1
15
+ 1
16
+ 1
17
+ 1
18
+ 2
19
+ 1
20
+ 1
21
+ 1
22
+ 1
23
+ 1
24
+ 1
25
+ 1
26
+ 1
27
+ 1
28
+ 1
29
+ 1
30
+ 1
31
+ 1
32
+ 1
33
+ 1
34
+ 1
35
+ 1
36
+ 1
37
+ 1
38
+ 1
39
+ 1
40
+ 1
41
+ 1
42
+ 1
43
+ 2
44
+ 1
45
+ 1
46
+ 2
47
+ 1
48
+ 1
49
+ 1
50
+ 1
51
+ 1
52
+ 1
53
+ 1
54
+ 1
55
+ 1
56
+ 1
57
+ 1
58
+ 1
59
+ 2
60
+ 1
61
+ 1
62
+ 1
63
+ 2
64
+ 1
65
+ 1
66
+ 1
67
+ 1
68
+ 1
69
+ 1
70
+ 1
71
+ 1
72
+ 1
73
+ 1
74
+ 1
75
+ 1
76
+ 1
77
+ 1
78
+ 1
79
+ 1
80
+ 1
81
+ 1
82
+ 1
83
+ 1
84
+ 1
85
+ 1
86
+ 1
87
+ 1
88
+ 1
89
+ 1
90
+ 1
91
+ 1
92
+ 1
93
+ 1
94
+ 1
95
+ 1
96
+ 1
97
+ 2
98
+ 1
99
+ 1
100
+ 1
101
+ 1
102
+ 1
103
+ 1
104
+ 1
105
+ 1
106
+ 1
107
+ 1
108
+ 1
109
+ 1
110
+ 1
111
+ 1
112
+ 2
113
+ 1
114
+ 1
115
+ 1
116
+ 1
117
+ 1
118
+ 2
119
+ 1
120
+ 1
121
+ 2
122
+ 2
123
+ 1
124
+ 1
125
+ 1
126
+ 1
127
+ 1
128
+ 1
129
+ 1
130
+ 1
131
+ 1
132
+ 2
133
+ 2
134
+ 1
135
+ 1
136
+ 1
137
+ 1
138
+ 1
139
+ 1
140
+ 1
141
+ 1
142
+ 1
143
+ 1
144
+ 1
145
+ 1
146
+ 1
147
+ 1
148
+ 1
149
+ 1
150
+ 1
151
+ 1
152
+ 1
153
+ 2
154
+ 1
155
+ 1
156
+ 1
157
+ 1
158
+ 1
159
+ 1
160
+ 1
161
+ 2
162
+ 1
163
+ 1
164
+ 1
165
+ 1
166
+ 1
167
+ 1
168
+ 1
169
+ 1
170
+ 1
171
+ 1
172
+ 1
173
+ 1
174
+ 1
175
+ 1
176
+ 1
177
+ 1
178
+ 1
179
+ 1
180
+ 1
181
+ 1
182
+ 1
183
+ 1
184
+ 1
185
+ 1
186
+ 1
187
+ 1
188
+ 1
189
+ 1
190
+ 1
191
+ 1
192
+ 1
193
+ 1
194
+ 1
195
+ 1
196
+ 1
197
+ 1
198
+ 1
199
+ 1
200
+ 1
201
+ 1
202
+ 1
203
+ 1
204
+ 1
205
+ 1
206
+ 1
207
+ 1
208
+ 1
209
+ 1
210
+ 1
211
+ 1
212
+ 1
213
+ 1
214
+ 1
215
+ 1
216
+ 1
217
+ 1
218
+ 1
219
+ 1
220
+ 1
221
+ 1
222
+ 1
223
+ 1
224
+ 1
225
+ 1
226
+ 1
227
+ 1
228
+ 1
229
+ 1
230
+ 1
231
+ 1
232
+ 1
233
+ 1
234
+ 1
235
+ 1
236
+ 1
237
+ 1
238
+ 1
239
+ 1
240
+ 1
241
+ 1
242
+ 1
243
+ 1
244
+ 1
245
+ 1
246
+ 1
247
+ 1
248
+ 1
249
+ 2
250
+ 1
251
+ 1
252
+ 1
253
+ 1
254
+ 1
255
+ 1
256
+ 1
257
+ 1
258
+ 1
259
+ 1
260
+ 1
261
+ 1
262
+ 1
263
+ 1
264
+ 2
265
+ 2
266
+ 1
267
+ 1
268
+ 1
269
+ 1
270
+ 1
271
+ 1
272
+ 1
273
+ 1
274
+ 1
275
+ 1
276
+ 1
277
+ 1
278
+ 1
279
+ 1
280
+ 1
281
+ 1
282
+ 1
283
+ 1
284
+ 1
285
+ 1
286
+ 1
287
+ 1
288
+ 1
289
+ 1
290
+ 1
291
+ 1
292
+ 2
293
+ 1
294
+ 2
295
+ 1
296
+ 2
297
+ 1
298
+ 1
299
+ 1
300
+ 2
301
+ 1
302
+ 1
303
+ 1
304
+ 1
305
+ 1
306
+ 1
307
+ 1
308
+ 1
309
+ 1
310
+ 1
311
+ 1
312
+ 1
313
+ 1
314
+ 1
315
+ 1
316
+ 1
317
+ 2
318
+ 1
319
+ 1
320
+ 1
321
+ 2
322
+ 1
323
+ 1
324
+ 1
325
+ 1
326
+ 1
327
+ 1
328
+ 1
329
+ 1
330
+ 1
331
+ 1
332
+ 1
333
+ 1
334
+ 1
335
+ 1
336
+ 1
337
+ 2
338
+ 2
339
+ 1
340
+ 1
341
+ 1
342
+ 1
343
+ 1
344
+ 2
345
+ 1
346
+ 1
347
+ 1
348
+ 1
349
+ 1
350
+ 1
351
+ 1
352
+ 1
353
+ 1
354
+ 1
355
+ 1
356
+ 1
357
+ 1
358
+ 1
359
+ 2
360
+ 1
361
+ 1
362
+ 1
363
+ 1
364
+ 1
365
+ 1
366
+ 1
367
+ 1
368
+ 1
369
+ 1
370
+ 1
371
+ 1
372
+ 1
373
+ 1
374
+ 1
375
+ 1
376
+ 1
377
+ 1
378
+ 1
379
+ 1
380
+ 1
381
+ 1
382
+ 1
383
+ 1
384
+ 1
385
+ 1
386
+ 1
387
+ 1
388
+ 1
389
+ 1
390
+ 1
391
+ 1
392
+ 1
393
+ 1
394
+ 1
395
+ 1
396
+ 1
397
+ 1
398
+ 1
399
+ 1
400
+ 2
401
+ 1
402
+ 2
403
+ 1
404
+ 1
405
+ 1
406
+ 1
407
+ 1
408
+ 1
409
+ 1
410
+ 1
411
+ 1
412
+ 1
413
+ 1
414
+ 1
415
+ 1
416
+ 1
417
+ 1
418
+ 1
419
+ 1
420
+ 1
421
+ 1
422
+ 1
423
+ 1
424
+ 1
425
+ 1
426
+ 1
427
+ 1
428
+ 1
429
+ 1
430
+ 2
431
+ 1
432
+ 1
433
+ 1
434
+ 1
435
+ 1
436
+ 1
437
+ 1
438
+ 1
439
+ 2
440
+ 1
441
+ 1
442
+ 1
443
+ 1
444
+ 1
445
+ 1
446
+ 1
447
+ 1
448
+ 1
449
+ 1
450
+ 1
451
+ 1
452
+ 1
453
+ 1
454
+ 1
455
+ 1
456
+ 1
457
+ 1
458
+ 1
459
+ 1
460
+ 1
461
+ 1
462
+ 1
463
+ 1
464
+ 1
465
+ 1
466
+ 1
467
+ 1
468
+ 1
469
+ 2
470
+ 1
471
+ 1
472
+ 1
473
+ 1
474
+ 1
475
+ 1
476
+ 1
477
+ 2
478
+ 1
479
+ 1
480
+ 1
481
+ 1
482
+ 1
483
+ 1
484
+ 1
485
+ 1
486
+ 1
487
+ 1
488
+ 1
489
+ 1
490
+ 1
491
+ 1
492
+ 1
493
+ 1
494
+ 1
495
+ 2
496
+ 1
497
+ 1
498
+ 1
499
+ 1
500
+ 1
501
+ 1
502
+ 1
503
+ 1
504
+ 1
505
+ 1
506
+ 1
507
+ 1
508
+ 1
509
+ 1
510
+ 2
511
+ 1
512
+ 1
513
+ 1
514
+ 1
515
+ 1
516
+ 1
517
+ 1
518
+ 1
519
+ 1
520
+ 1
521
+ 1
522
+ 1
523
+ 1
524
+ 1
525
+ 1
526
+ 1
527
+ 1
528
+ 1
529
+ 2
530
+ 1
531
+ 1
532
+ 2
533
+ 1
534
+ 1
535
+ 1
536
+ 1
537
+ 1
538
+ 1
539
+ 1
540
+ 1
541
+ 1
542
+ 1
543
+ 1
544
+ 1
545
+ 1
546
+ 1
547
+ 1
548
+ 1
549
+ 1
550
+ 1
551
+ 1
552
+ 1
553
+ 1
554
+ 1
555
+ 1
556
+ 1
557
+ 1
558
+ 1
559
+ 1
560
+ 1
561
+ 1
562
+ 1
563
+ 1
564
+ 1
565
+ 1
566
+ 1
567
+ 1
568
+ 1
569
+ 1
570
+ 1
571
+ 1
572
+ 1
573
+ 2
574
+ 1
575
+ 1
576
+ 1
577
+ 1
578
+ 1
579
+ 1
580
+ 1
581
+ 1
582
+ 2
583
+ 1
584
+ 2
585
+ 1
586
+ 1
587
+ 1
588
+ 1
589
+ 2
590
+ 1
591
+ 1
592
+ 1
593
+ 1
594
+ 2
595
+ 1
596
+ 1
597
+ 1
598
+ 1
599
+ 1
600
+ 1
601
+ 1
602
+ 1
603
+ 1
604
+ 1
605
+ 1
606
+ 1
607
+ 1
608
+ 1
609
+ 1
610
+ 1
611
+ 1
612
+ 1
613
+ 1
614
+ 1
615
+ 1
616
+ 1
617
+ 1
618
+ 1
619
+ 1
620
+ 1
621
+ 1
622
+ 1
623
+ 1
624
+ 1
625
+ 1
626
+ 1
627
+ 1
628
+ 1
629
+ 1
630
+ 1
631
+ 1
632
+ 1
633
+ 1
634
+ 1
635
+ 1
636
+ 1
637
+ 1
638
+ 1
639
+ 1
640
+ 1
641
+ 1
642
+ 1
643
+ 1
644
+ 1
645
+ 1
646
+ 1
647
+ 1
648
+ 1
649
+ 1
650
+ 1
651
+ 1
652
+ 1
653
+ 1
654
+ 1
655
+ 1
656
+ 1
657
+ 1
658
+ 1
659
+ 1
660
+ 1
661
+ 1
662
+ 1
663
+ 1
664
+ 1
665
+ 1
666
+ 1
667
+ 1
668
+ 1
669
+ 1
670
+ 1
671
+ 1
672
+ 1
673
+ 1
674
+ 1
675
+ 1
676
+ 1
677
+ 1
678
+ 1
679
+ 1
680
+ 1
681
+ 1
682
+ 2
683
+ 1
684
+ 1
685
+ 1
686
+ 1
687
+ 1
688
+ 1
689
+ 2
690
+ 1
691
+ 1
692
+ 1
693
+ 1
694
+ 1
695
+ 1
696
+ 1
697
+ 2
698
+ 1
699
+ 1
700
+ 1
701
+ 1
702
+ 1
703
+ 1
704
+ 1
705
+ 1
706
+ 1
707
+ 1
708
+ 1
709
+ 1
710
+ 1
711
+ 1
712
+ 1
713
+ 1
714
+ 1
715
+ 2
716
+ 1
717
+ 1
718
+ 1
719
+ 1
720
+ 1
721
+ 1
722
+ 1
723
+ 1
724
+ 1
725
+ 1
726
+ 1
727
+ 1
728
+ 1
729
+ 1
730
+ 1
731
+ 1
732
+ 1
733
+ 1
734
+ 1
735
+ 1
736
+ 1
737
+ 1
738
+ 1
739
+ 1
740
+ 1
741
+ 1
742
+ 1
743
+ 1
744
+ 1
745
+ 1
746
+ 1
747
+ 1
748
+ 1
749
+ 1
750
+ 1
751
+ 1
752
+ 1
753
+ 1
754
+ 1
755
+ 1
756
+ 1
757
+ 1
758
+ 1
759
+ 1
760
+ 1
761
+ 1
762
+ 1
763
+ 1
764
+ 1
765
+ 1
766
+ 1
767
+ 1
768
+ 2
769
+ 1
770
+ 1
771
+ 1
772
+ 1
773
+ 1
774
+ 1
775
+ 1
776
+ 1
777
+ 1
778
+ 1
779
+ 1
780
+ 1
781
+ 1
782
+ 1
783
+ 1
784
+ 1
785
+ 1
786
+ 1
787
+ 2
788
+ 1
789
+ 1
790
+ 1
791
+ 1
792
+ 1
793
+ 1
794
+ 1
795
+ 1
796
+ 1
797
+ 1
798
+ 1
799
+ 1
800
+ 1
801
+ 1
802
+ 1
803
+ 1
804
+ 1
805
+ 1
806
+ 1
807
+ 1
808
+ 1
809
+ 1
810
+ 1
811
+ 2
812
+ 1
813
+ 1
814
+ 1
815
+ 1
816
+ 1
817
+ 1
818
+ 1
819
+ 1
820
+ 1
821
+ 1
822
+ 1
823
+ 1
824
+ 1
825
+ 1
826
+ 2
827
+ 1
828
+ 1
829
+ 1
830
+ 1
831
+ 1
832
+ 1
833
+ 1
834
+ 1
835
+ 1
836
+ 1
837
+ 2
838
+ 1
839
+ 1
840
+ 1
841
+ 1
842
+ 2
843
+ 1
844
+ 1
845
+ 1
846
+ 1
847
+ 1
848
+ 1
849
+ 1
850
+ 1
851
+ 1
852
+ 1
853
+ 1
854
+ 1
855
+ 1
856
+ 1
857
+ 1
858
+ 1
859
+ 1
860
+ 1
861
+ 1
862
+ 1
863
+ 1
864
+ 1
865
+ 1
866
+ 1
867
+ 1
868
+ 1
869
+ 1
870
+ 1
871
+ 1
872
+ 1
873
+ 1
874
+ 1
875
+ 1
876
+ 1
877
+ 1
878
+ 1
879
+ 1
880
+ 1
881
+ 1
882
+ 1
883
+ 1
884
+ 2
885
+ 1
886
+ 1
887
+ 1
888
+ 1
889
+ 1
890
+ 1
891
+ 1
892
+ 1
893
+ 1
894
+ 1
895
+ 1
896
+ 1
897
+ 1
898
+ 1
899
+ 1
900
+ 1
901
+ 1
902
+ 1
903
+ 1
904
+ 1
905
+ 1
906
+ 1
907
+ 1
908
+ 1
909
+ 1
910
+ 1
911
+ 1
912
+ 1
913
+ 1
914
+ 1
915
+ 1
916
+ 1
917
+ 1
918
+ 2
919
+ 1
920
+ 1
921
+ 1
922
+ 1
923
+ 1
924
+ 1
925
+ 1
926
+ 1
927
+ 1
928
+ 1
929
+ 1
930
+ 1
931
+ 1
932
+ 1
933
+ 1
934
+ 1
935
+ 1
936
+ 1
937
+ 1
938
+ 2
939
+ 1
940
+ 1
941
+ 1
942
+ 1
943
+ 2
944
+ 1
945
+ 1
946
+ 1
947
+ 1
948
+ 1
949
+ 1
950
+ 1
951
+ 1
952
+ 1
953
+ 1
954
+ 1
955
+ 1
956
+ 1
957
+ 1
958
+ 1
959
+ 1
960
+ 1
961
+ 1
962
+ 1
963
+ 1
964
+ 1
965
+ 1
966
+ 1
967
+ 1
968
+ 1
969
+ 1
970
+ 1
971
+ 2
972
+ 1
973
+ 1
974
+ 1
975
+ 2
976
+ 1
977
+ 1
978
+ 1
979
+ 1
980
+ 1
981
+ 2
982
+ 1
983
+ 1
984
+ 1
985
+ 1
986
+ 1
987
+ 1
988
+ 1
989
+ 1
990
+ 1
991
+ 1
992
+ 1
993
+ 1
994
+ 1
995
+ 1
996
+ 1
997
+ 1
998
+ 1
999
+ 1
1000
+ 1
1001
+ 1
1002
+ 1
1003
+ 1
1004
+ 1
1005
+ 1
1006
+ 1
1007
+ 1
1008
+ 1
1009
+ 1
1010
+ 1
1011
+ 1
1012
+ 1
1013
+ 1
1014
+ 1
1015
+ 1
1016
+ 1
1017
+ 2
1018
+ 1
1019
+ 1
1020
+ 1
1021
+ 1
1022
+ 1
1023
+ 1
1024
+ 1
1025
+ 1
1026
+ 1
1027
+ 1
1028
+ 1
1029
+ 1
1030
+ 1
1031
+ 1
1032
+ 1
1033
+ 1
1034
+ 1
1035
+ 1
1036
+ 1
1037
+ 1
1038
+ 1
1039
+ 1
1040
+ 1
1041
+ 1
1042
+ 1
1043
+ 1
1044
+ 2
1045
+ 1
1046
+ 1
1047
+ 1
1048
+ 1
1049
+ 1
1050
+ 1
1051
+ 1
1052
+ 1
1053
+ 1
1054
+ 1
1055
+ 1
1056
+ 1
1057
+ 1
1058
+ 1
1059
+ 1
1060
+ 1
1061
+ 1
1062
+ 1
1063
+ 1
1064
+ 1
1065
+ 2
1066
+ 1
1067
+ 1
1068
+ 1
1069
+ 1
1070
+ 1
1071
+ 1
1072
+ 1
1073
+ 1
1074
+ 1
1075
+ 1
1076
+ 1
1077
+ 1
1078
+ 1
1079
+ 1
1080
+ 1
1081
+ 1
1082
+ 1
1083
+ 1
1084
+ 1
1085
+ 1
1086
+ 1
1087
+ 1
1088
+ 1
1089
+ 1
1090
+ 1
1091
+ 1
1092
+ 1
1093
+ 1
1094
+ 1
1095
+ 1
1096
+ 1
1097
+ 1
1098
+ 1
1099
+ 1
1100
+ 1
1101
+ 1
1102
+ 1
1103
+ 1
1104
+ 1
1105
+ 1
1106
+ 1
1107
+ 1
1108
+ 1
1109
+ 1
1110
+ 1
1111
+ 1
1112
+ 1
1113
+ 1
1114
+ 1
1115
+ 1
1116
+ 1
1117
+ 1
1118
+ 1
1119
+ 2
1120
+ 1
1121
+ 1
1122
+ 2
1123
+ 1
1124
+ 1
1125
+ 1
1126
+ 1
1127
+ 1
1128
+ 1
1129
+ 1
1130
+ 1
1131
+ 1
1132
+ 1
1133
+ 1
1134
+ 2
1135
+ 1
1136
+ 1
1137
+ 1
1138
+ 1
1139
+ 1
1140
+ 1
1141
+ 1
1142
+ 1
1143
+ 1
1144
+ 1
1145
+ 1
1146
+ 1
1147
+ 1
1148
+ 1
1149
+ 1
1150
+ 1
1151
+ 1
1152
+ 1
1153
+ 1
1154
+ 2
1155
+ 1
1156
+ 2
1157
+ 1
1158
+ 1
1159
+ 2
1160
+ 2
1161
+ 1
1162
+ 1
1163
+ 1
1164
+ 1
1165
+ 1
1166
+ 1
1167
+ 1
1168
+ 1
1169
+ 1
1170
+ 1
1171
+ 1
1172
+ 1
1173
+ 1
1174
+ 2
1175
+ 1
1176
+ 1
1177
+ 1
1178
+ 1
1179
+ 1
1180
+ 1
1181
+ 1
1182
+ 1
1183
+ 1
1184
+ 1
1185
+ 1
1186
+ 1
1187
+ 1
1188
+ 2
1189
+ 1
1190
+ 1
1191
+ 1
1192
+ 1
1193
+ 1
1194
+ 1
1195
+ 1
1196
+ 1
1197
+ 1
1198
+ 1
1199
+ 1
1200
+ 1
1201
+ 1
1202
+ 1
1203
+ 1
1204
+ 1
1205
+ 1
1206
+ 1
1207
+ 1
1208
+ 1
1209
+ 1
1210
+ 1
1211
+ 1
1212
+ 1
1213
+ 1
1214
+ 1
1215
+ 1
1216
+ 1
1217
+ 1
1218
+ 1
1219
+ 1
1220
+ 2
1221
+ 1
1222
+ 1
1223
+ 1
1224
+ 1
1225
+ 1
1226
+ 1
1227
+ 1
1228
+ 1
1229
+ 1
1230
+ 1
1231
+ 1
1232
+ 1
1233
+ 1
1234
+ 1
1235
+ 1
1236
+ 1
1237
+ 1
1238
+ 2
1239
+ 2
1240
+ 1
1241
+ 1
1242
+ 1
1243
+ 1
1244
+ 1
1245
+ 1
1246
+ 1
1247
+ 1
1248
+ 1
1249
+ 1
1250
+ 1
1251
+ 1
1252
+ 1
1253
+ 1
1254
+ 1
1255
+ 1
1256
+ 1
1257
+ 1
1258
+ 1
1259
+ 1
1260
+ 2
1261
+ 1
1262
+ 1
1263
+ 1
1264
+ 1
1265
+ 1
1266
+ 1
1267
+ 2
1268
+ 1
1269
+ 1
1270
+ 1
1271
+ 1
1272
+ 1
1273
+ 1
1274
+ 1
1275
+ 1
1276
+ 1
1277
+ 1
1278
+ 1
1279
+ 2
1280
+ 1
1281
+ 1
1282
+ 1
1283
+ 1
1284
+ 1
1285
+ 1
1286
+ 1
1287
+ 1
1288
+ 1
1289
+ 1
1290
+ 1
1291
+ 1
1292
+ 1
1293
+ 1
1294
+ 1
1295
+ 1
1296
+ 1
1297
+ 2
1298
+ 1
1299
+ 1
1300
+ 1
1301
+ 1
1302
+ 1
1303
+ 1
1304
+ 1
1305
+ 1
1306
+ 1
1307
+ 1
1308
+ 1
1309
+ 1
1310
+ 1
1311
+ 1
1312
+ 1
1313
+ 1
1314
+ 1
1315
+ 1
1316
+ 1
1317
+ 1
1318
+ 1
1319
+ 1
1320
+ 1
1321
+ 1
1322
+ 1
1323
+ 1
1324
+ 1
1325
+ 1
1326
+ 1
1327
+ 1
1328
+ 1
1329
+ 1
1330
+ 1
1331
+ 1
1332
+ 1
1333
+ 1
1334
+ 1
1335
+ 1
1336
+ 1
1337
+ 1
1338
+ 1
1339
+ 1
1340
+ 1
1341
+ 1
1342
+ 1
1343
+ 1
1344
+ 1
1345
+ 1
1346
+ 1
1347
+ 1
1348
+ 1
1349
+ 1
1350
+ 1
1351
+ 1
1352
+ 1
1353
+ 1
1354
+ 1
1355
+ 2
1356
+ 1
1357
+ 1
1358
+ 1
1359
+ 1
1360
+ 1
1361
+ 1
1362
+ 1
1363
+ 1
1364
+ 1
1365
+ 1
1366
+ 1
1367
+ 1
1368
+ 1
1369
+ 1
1370
+ 1
1371
+ 1
1372
+ 1
1373
+ 1
1374
+ 1
1375
+ 1
1376
+ 1
1377
+ 1
1378
+ 1
1379
+ 2
1380
+ 1
1381
+ 1
1382
+ 1
1383
+ 1
1384
+ 1
1385
+ 1
1386
+ 1
1387
+ 1
1388
+ 2
1389
+ 1
1390
+ 1
1391
+ 1
1392
+ 1
1393
+ 1
1394
+ 1
1395
+ 1
1396
+ 1
1397
+ 1
1398
+ 1
1399
+ 1
1400
+ 1
1401
+ 1
1402
+ 1
1403
+ 1
1404
+ 1
1405
+ 1
1406
+ 1
1407
+ 1
1408
+ 1
1409
+ 1
1410
+ 1
1411
+ 1
1412
+ 1
1413
+ 1
1414
+ 1
1415
+ 1
1416
+ 1
1417
+ 1
1418
+ 1
1419
+ 1
1420
+ 1
1421
+ 1
1422
+ 1
1423
+ 1
1424
+ 1
1425
+ 1
1426
+ 1
1427
+ 1
1428
+ 1
1429
+ 1
1430
+ 1
1431
+ 1
1432
+ 1
1433
+ 1
1434
+ 1
1435
+ 1
1436
+ 1
1437
+ 1
1438
+ 1
1439
+ 1
1440
+ 1
1441
+ 1
1442
+ 1
1443
+ 1
1444
+ 1
1445
+ 1
1446
+ 1
1447
+ 1
1448
+ 1
1449
+ 1
1450
+ 1
1451
+ 1
1452
+ 1
1453
+ 1
1454
+ 1
1455
+ 1
1456
+ 1
1457
+ 1
1458
+ 1
1459
+ 1
1460
+ 1
1461
+ 1
1462
+ 1
1463
+ 1
1464
+ 1
1465
+ 1
1466
+ 1
1467
+ 1
1468
+ 1
1469
+ 1
1470
+ 1
1471
+ 1
1472
+ 1
1473
+ 1
1474
+ 1
1475
+ 1
1476
+ 1
1477
+ 1
1478
+ 1
1479
+ 1
1480
+ 1
1481
+ 1
1482
+ 1
1483
+ 1
1484
+ 1
1485
+ 1
1486
+ 1
1487
+ 1
1488
+ 1
1489
+ 1
1490
+ 1
1491
+ 1
1492
+ 1
1493
+ 1
1494
+ 1
1495
+ 2
1496
+ 1
1497
+ 1
1498
+ 1
1499
+ 1
1500
+ 1
1501
+ 1
1502
+ 1
1503
+ 1
1504
+ 1
1505
+ 1
1506
+ 1
1507
+ 1
1508
+ 1
1509
+ 2
1510
+ 1
1511
+ 1
1512
+ 1
1513
+ 1
1514
+ 1
1515
+ 1
1516
+ 1
1517
+ 1
1518
+ 1
1519
+ 2
1520
+ 1
1521
+ 1
1522
+ 1
1523
+ 1
1524
+ 1
1525
+ 1
1526
+ 1
1527
+ 1
1528
+ 1
1529
+ 1
1530
+ 1
1531
+ 1
1532
+ 1
1533
+ 1
1534
+ 1
1535
+ 1
1536
+ 1
1537
+ 1
1538
+ 1
1539
+ 1
1540
+ 1
1541
+ 1
1542
+ 1
1543
+ 1
1544
+ 1
1545
+ 1
1546
+ 1
1547
+ 1
1548
+ 1
1549
+ 1
1550
+ 1
1551
+ 1
1552
+ 1
1553
+ 1
1554
+ 1
1555
+ 1
1556
+ 1
1557
+ 1
1558
+ 1
1559
+ 1
1560
+ 1
1561
+ 1
1562
+ 1
1563
+ 1
1564
+ 1
1565
+ 1
1566
+ 1
1567
+ 1
1568
+ 1
1569
+ 1
1570
+ 1
1571
+ 1
1572
+ 1
1573
+ 1
1574
+ 1
1575
+ 2
1576
+ 1
1577
+ 1
1578
+ 1
1579
+ 1
1580
+ 1
1581
+ 1
1582
+ 1
1583
+ 1
1584
+ 1
1585
+ 1
1586
+ 1
1587
+ 1
1588
+ 1
1589
+ 1
1590
+ 1
1591
+ 1
1592
+ 1
1593
+ 1
1594
+ 1
1595
+ 1
1596
+ 1
1597
+ 1
1598
+ 1
1599
+ 1
1600
+ 1
1601
+ 1
1602
+ 2
1603
+ 1
1604
+ 1
1605
+ 1
1606
+ 1
1607
+ 1
1608
+ 1
1609
+ 1
1610
+ 1
1611
+ 1
1612
+ 1
1613
+ 1
1614
+ 1
1615
+ 2
1616
+ 1
1617
+ 1
1618
+ 1
1619
+ 1
1620
+ 1
1621
+ 1
1622
+ 1
1623
+ 1
1624
+ 1
1625
+ 1
1626
+ 1
1627
+ 1
1628
+ 1
1629
+ 1
1630
+ 1
1631
+ 1
1632
+ 1
1633
+ 1
1634
+ 1
1635
+ 1
1636
+ 1
1637
+ 1
1638
+ 1
1639
+ 1
1640
+ 1
1641
+ 1
1642
+ 1
1643
+ 1
1644
+ 1
1645
+ 1
1646
+ 1
1647
+ 1
1648
+ 1
1649
+ 1
1650
+ 1
1651
+ 1
1652
+ 1
1653
+ 1
1654
+ 1
1655
+ 1
1656
+ 1
1657
+ 1
1658
+ 1
1659
+ 1
1660
+ 1
1661
+ 1
1662
+ 1
1663
+ 1
1664
+ 1
1665
+ 1
1666
+ 1
1667
+ 1
1668
+ 1
1669
+ 2
1670
+ 1
1671
+ 1
1672
+ 1
1673
+ 1
1674
+ 2
1675
+ 1
1676
+ 1
1677
+ 1
1678
+ 1
1679
+ 1
1680
+ 1
1681
+ 1
1682
+ 1
1683
+ 1
1684
+ 1
1685
+ 1
1686
+ 1
1687
+ 1
1688
+ 1
1689
+ 1
1690
+ 1
1691
+ 1
1692
+ 1
1693
+ 1
1694
+ 1
1695
+ 1
1696
+ 1
1697
+ 1
1698
+ 1
1699
+ 1
1700
+ 2
1701
+ 1
1702
+ 1
1703
+ 1
1704
+ 1
1705
+ 1
1706
+ 1
1707
+ 1
1708
+ 1
1709
+ 1
1710
+ 1
1711
+ 1
1712
+ 1
1713
+ 2
1714
+ 1
1715
+ 2
1716
+ 1
1717
+ 1
1718
+ 1
1719
+ 1
1720
+ 1
1721
+ 1
1722
+ 1
1723
+ 1
1724
+ 1
1725
+ 1
1726
+ 1
1727
+ 1
1728
+ 1
1729
+ 1
1730
+ 1
1731
+ 1
1732
+ 1
1733
+ 1
1734
+ 1
1735
+ 1
1736
+ 1
1737
+ 1
1738
+ 2
1739
+ 1
1740
+ 1
1741
+ 1
1742
+ 1
1743
+ 1
1744
+ 1
1745
+ 1
1746
+ 1
1747
+ 1
1748
+ 1
1749
+ 1
1750
+ 1
1751
+ 1
1752
+ 1
1753
+ 1
1754
+ 1
1755
+ 1
1756
+ 1
1757
+ 1
1758
+ 1
1759
+ 1
1760
+ 1
1761
+ 1
1762
+ 1
1763
+ 1
1764
+ 1
1765
+ 1
1766
+ 1
1767
+ 1
1768
+ 1
1769
+ 1
1770
+ 1
1771
+ 1
1772
+ 1
1773
+ 1
1774
+ 1
1775
+ 1
1776
+ 1
1777
+ 1
1778
+ 1
1779
+ 1
1780
+ 1
1781
+ 1
1782
+ 1
1783
+ 1
1784
+ 1
1785
+ 1
1786
+ 1
1787
+ 1
1788
+ 1
1789
+ 1
1790
+ 1
1791
+ 1
1792
+ 1
1793
+ 1
1794
+ 1
1795
+ 1
1796
+ 1
1797
+ 1
1798
+ 1
1799
+ 1
1800
+ 1
1801
+ 1
1802
+ 1
1803
+ 1
1804
+ 1
1805
+ 1
1806
+ 1
1807
+ 1
1808
+ 1
1809
+ 1
1810
+ 1
1811
+ 1
1812
+ 1
1813
+ 1
1814
+ 1
1815
+ 1
1816
+ 1
1817
+ 1
1818
+ 1
1819
+ 1
1820
+ 1
1821
+ 1
1822
+ 1
1823
+ 1
1824
+ 1
1825
+ 1
1826
+ 1
1827
+ 1
1828
+ 1
1829
+ 1
1830
+ 1
1831
+ 1
1832
+ 1
1833
+ 1
1834
+ 1
1835
+ 1
1836
+ 1
1837
+ 1
1838
+ 1
1839
+ 1
1840
+ 1
1841
+ 1
1842
+ 2
1843
+ 1
1844
+ 1
1845
+ 1
1846
+ 1
1847
+ 1
1848
+ 1
1849
+ 1
1850
+ 1
1851
+ 1
1852
+ 1
1853
+ 1
1854
+ 1
1855
+ 1
1856
+ 1
1857
+ 1
1858
+ 1
1859
+ 1
1860
+ 1
1861
+ 1
1862
+ 1
1863
+ 1
1864
+ 1
1865
+ 1
1866
+ 1
1867
+ 1
1868
+ 1
1869
+ 1
1870
+ 1
1871
+ 1
1872
+ 1
1873
+ 2
1874
+ 1
1875
+ 1
1876
+ 1
1877
+ 1
1878
+ 1
1879
+ 1
1880
+ 2
1881
+ 1
1882
+ 1
1883
+ 1
1884
+ 1
1885
+ 1
1886
+ 1
1887
+ 1
1888
+ 1
1889
+ 1
1890
+ 1
1891
+ 1
1892
+ 1
1893
+ 1
1894
+ 1
1895
+ 1
1896
+ 1
1897
+ 1
1898
+ 1
1899
+ 1
1900
+ 1
1901
+ 1
1902
+ 2
1903
+ 1
1904
+ 1
1905
+ 1
1906
+ 1
1907
+ 1
1908
+ 1
1909
+ 1
1910
+ 1
1911
+ 1
1912
+ 1
1913
+ 1
1914
+ 1
1915
+ 1
1916
+ 1
1917
+ 1
1918
+ 1
1919
+ 1
1920
+ 1
1921
+ 1
1922
+ 2
1923
+ 1
1924
+ 1
1925
+ 1
1926
+ 1
1927
+ 1
1928
+ 1
1929
+ 1
1930
+ 1
1931
+ 1
1932
+ 1
1933
+ 1
1934
+ 1
1935
+ 2
1936
+ 1
1937
+ 1
1938
+ 1
1939
+ 1
1940
+ 1
1941
+ 1
1942
+ 1
1943
+ 1
1944
+ 1
1945
+ 1
1946
+ 1
1947
+ 1
1948
+ 1
1949
+ 1
1950
+ 1
1951
+ 1
1952
+ 1
1953
+ 1
1954
+ 1
1955
+ 1
1956
+ 1
1957
+ 1
1958
+ 1
1959
+ 1
1960
+ 1
1961
+ 1
1962
+ 2
1963
+ 1
1964
+ 1
1965
+ 1
1966
+ 1
1967
+ 1
1968
+ 2
1969
+ 1
1970
+ 1
1971
+ 1
1972
+ 1
1973
+ 1
1974
+ 1
1975
+ 1
1976
+ 1
1977
+ 1
1978
+ 1
1979
+ 2
1980
+ 1
1981
+ 1
1982
+ 1
1983
+ 1
1984
+ 1
1985
+ 1
1986
+ 1
1987
+ 1
1988
+ 1
1989
+ 1
1990
+ 1
1991
+ 1
1992
+ 1
1993
+ 1
1994
+ 1
1995
+ 1
1996
+ 1
1997
+ 1
1998
+ 1
1999
+ 1
2000
+ 1
2001
+ 1
2002
+ 1
2003
+ 1
2004
+ 1
2005
+ 1
2006
+ 1
2007
+ 1
2008
+ 1
2009
+ 1
2010
+ 1
2011
+ 1
2012
+ 1
2013
+ 1
2014
+ 1
2015
+ 1
2016
+ 1
2017
+ 1
2018
+ 1
2019
+ 2
2020
+ 1
2021
+ 1
2022
+ 1
2023
+ 1
2024
+ 1
2025
+ 1
2026
+ 1
2027
+ 1
2028
+ 1
2029
+ 1
2030
+ 1
2031
+ 2
2032
+ 1
2033
+ 1
2034
+ 1
2035
+ 1
2036
+ 1
2037
+ 1
2038
+ 1
2039
+ 1
2040
+ 2
2041
+ 1
2042
+ 1
2043
+ 1
2044
+ 2
2045
+ 1
2046
+ 1
2047
+ 1
2048
+ 1
2049
+ 1
2050
+ 1
2051
+ 1
2052
+ 1
2053
+ 1
2054
+ 1
2055
+ 1
2056
+ 1
2057
+ 1
2058
+ 1
2059
+ 1
2060
+ 1
2061
+ 1
2062
+ 2
2063
+ 1
2064
+ 1
2065
+ 1
2066
+ 1
2067
+ 1
2068
+ 1
2069
+ 1
2070
+ 1
2071
+ 1
2072
+ 1
2073
+ 1
2074
+ 1
2075
+ 1
2076
+ 1
2077
+ 1
2078
+ 1
2079
+ 1
2080
+ 1
2081
+ 1
2082
+ 1
2083
+ 1
2084
+ 1
2085
+ 1
2086
+ 1
2087
+ 1
2088
+ 1
2089
+ 1
2090
+ 1
2091
+ 1
2092
+ 1
2093
+ 1
2094
+ 1
2095
+ 1
2096
+ 1
2097
+ 1
2098
+ 1
2099
+ 1
2100
+ 1
2101
+ 1
2102
+ 1
2103
+ 1
2104
+ 1
2105
+ 1
2106
+ 1
2107
+ 1
2108
+ 1
2109
+ 1
2110
+ 1
2111
+ 1
2112
+ 1
2113
+ 1
2114
+ 2
2115
+ 1
2116
+ 1
2117
+ 1
2118
+ 1
2119
+ 1
2120
+ 1
2121
+ 1
2122
+ 1
2123
+ 1
2124
+ 1
2125
+ 1
2126
+ 1
2127
+ 1
2128
+ 1
2129
+ 1
2130
+ 1
2131
+ 1
2132
+ 1
2133
+ 1
2134
+ 1
2135
+ 1
2136
+ 1
2137
+ 1
2138
+ 1
2139
+ 1
2140
+ 1
2141
+ 1
2142
+ 1
2143
+ 1
2144
+ 1
2145
+ 1
2146
+ 1
2147
+ 1
2148
+ 1
2149
+ 1
2150
+ 1
2151
+ 1
2152
+ 1
2153
+ 1
2154
+ 1
2155
+ 1
2156
+ 1
2157
+ 1
2158
+ 1
2159
+ 1
2160
+ 1
2161
+ 1
2162
+ 1
2163
+ 1
2164
+ 1
2165
+ 1
2166
+ 1
2167
+ 1
2168
+ 1
2169
+ 1
2170
+ 1
2171
+ 1
2172
+ 1
2173
+ 1
2174
+ 1
2175
+ 1
2176
+ 1
2177
+ 1
2178
+ 1
2179
+ 1
2180
+ 1
2181
+ 1
2182
+ 1
2183
+ 1
2184
+ 1
2185
+ 1
2186
+ 1
2187
+ 1
2188
+ 1
2189
+ 1
2190
+ 1
2191
+ 1
2192
+ 1
2193
+ 1
2194
+ 1
2195
+ 1
2196
+ 1
2197
+ 1
2198
+ 1
2199
+ 1
2200
+ 1
2201
+ 1
2202
+ 1
2203
+ 1
2204
+ 1
2205
+ 2
2206
+ 1
2207
+ 1
2208
+ 1
2209
+ 1
2210
+ 1
2211
+ 1
2212
+ 1
2213
+ 1
2214
+ 1
2215
+ 1
2216
+ 1
2217
+ 2
2218
+ 1
2219
+ 1
2220
+ 1
2221
+ 1
2222
+ 1
2223
+ 1
2224
+ 1
2225
+ 1
2226
+ 1
2227
+ 1
2228
+ 1
2229
+ 1
2230
+ 1
2231
+ 1
2232
+ 1
2233
+ 1
2234
+ 1
2235
+ 1
2236
+ 1
2237
+ 2
2238
+ 1
2239
+ 1
2240
+ 1
2241
+ 1
2242
+ 1
2243
+ 1
2244
+ 1
2245
+ 1
2246
+ 1
2247
+ 1
2248
+ 1
2249
+ 1
2250
+ 1
2251
+ 2
2252
+ 1
2253
+ 1
2254
+ 1
2255
+ 1
2256
+ 1
2257
+ 1
2258
+ 1
2259
+ 1
2260
+ 1
2261
+ 1
2262
+ 1
2263
+ 1
2264
+ 1
2265
+ 1
2266
+ 1
2267
+ 1
2268
+ 1
2269
+ 1
2270
+ 1
2271
+ 1
2272
+ 1
2273
+ 1
2274
+ 1
2275
+ 2
2276
+ 1
2277
+ 1
2278
+ 1
2279
+ 1
2280
+ 1
2281
+ 1
2282
+ 1
2283
+ 1
2284
+ 1
2285
+ 1
2286
+ 1
2287
+ 1
2288
+ 1
2289
+ 1
2290
+ 1
2291
+ 1
2292
+ 1
2293
+ 2
2294
+ 1
2295
+ 1
2296
+ 1
2297
+ 1
2298
+ 1
2299
+ 1
2300
+ 1
2301
+ 1
2302
+ 1
2303
+ 1
2304
+ 1
2305
+ 1
2306
+ 1
2307
+ 1
2308
+ 1
2309
+ 1
2310
+ 1
2311
+ 2
2312
+ 1
2313
+ 1
2314
+ 1
2315
+ 1
2316
+ 1
2317
+ 1
2318
+ 1
2319
+ 1
2320
+ 1
2321
+ 1
2322
+ 1
2323
+ 1
2324
+ 1
2325
+ 1
2326
+ 1
2327
+ 1
2328
+ 1
2329
+ 1
2330
+ 1
2331
+ 1
2332
+ 1
2333
+ 1
2334
+ 1
2335
+ 1
2336
+ 1
2337
+ 1
2338
+ 1
2339
+ 1
2340
+ 1
2341
+ 1
2342
+ 1
2343
+ 2
2344
+ 1
2345
+ 1
2346
+ 1
2347
+ 1
2348
+ 1
2349
+ 1
2350
+ 1
2351
+ 1
2352
+ 1
2353
+ 1
2354
+ 1
2355
+ 1
2356
+ 1
2357
+ 1
2358
+ 1
2359
+ 1
2360
+ 2
2361
+ 1
2362
+ 1
2363
+ 1
2364
+ 1
2365
+ 1
2366
+ 1
2367
+ 1
2368
+ 1
2369
+ 1
2370
+ 1
2371
+ 1
2372
+ 1
2373
+ 1
2374
+ 1
2375
+ 2
2376
+ 1
2377
+ 1
2378
+ 1
2379
+ 1
2380
+ 1
2381
+ 1
2382
+ 1
2383
+ 2
2384
+ 1
2385
+ 1
2386
+ 1
2387
+ 1
2388
+ 1
2389
+ 1
2390
+ 1
2391
+ 1
2392
+ 1
2393
+ 1
2394
+ 1
2395
+ 1
2396
+ 1
2397
+ 1
2398
+ 1
2399
+ 1
2400
+ 2
2401
+ 1
2402
+ 1
2403
+ 1
2404
+ 1
2405
+ 1
2406
+ 1
2407
+ 1
2408
+ 1
2409
+ 1
2410
+ 1
2411
+ 1
2412
+ 1
2413
+ 1
2414
+ 1
2415
+ 1
2416
+ 2
2417
+ 1
2418
+ 1
2419
+ 1
2420
+ 2
2421
+ 1
2422
+ 1
2423
+ 1
2424
+ 1
2425
+ 1
2426
+ 1
2427
+ 1
2428
+ 1
2429
+ 1
2430
+ 1
2431
+ 1
2432
+ 1
2433
+ 1
2434
+ 1
2435
+ 1
2436
+ 1
2437
+ 1
2438
+ 1
2439
+ 1
2440
+ 1
2441
+ 1
2442
+ 1
2443
+ 1
2444
+ 1
2445
+ 1
2446
+ 1
2447
+ 1
2448
+ 1
2449
+ 1
2450
+ 1
2451
+ 1
2452
+ 1
2453
+ 1
2454
+ 1
2455
+ 1
2456
+ 1
2457
+ 1
2458
+ 1
2459
+ 1
2460
+ 1
2461
+ 1
2462
+ 1
2463
+ 1
2464
+ 1
2465
+ 1
2466
+ 1
2467
+ 1
2468
+ 1
2469
+ 1
2470
+ 1
2471
+ 1
2472
+ 1
2473
+ 1
2474
+ 1
2475
+ 1
2476
+ 1
2477
+ 1
2478
+ 1
2479
+ 1
2480
+ 1
2481
+ 1
2482
+ 1
2483
+ 1
2484
+ 2
2485
+ 1
2486
+ 1
2487
+ 1
2488
+ 1
2489
+ 1
2490
+ 1
2491
+ 1
2492
+ 1
2493
+ 1
2494
+ 1
2495
+ 1
2496
+ 1
2497
+ 1
2498
+ 1
2499
+ 1
2500
+ 1
2501
+ 1
2502
+ 1
2503
+ 2
2504
+ 1
2505
+ 2
2506
+ 1
2507
+ 1
2508
+ 1
2509
+ 1
2510
+ 1
2511
+ 1
2512
+ 1
2513
+ 1
2514
+ 1
2515
+ 1
2516
+ 1
2517
+ 1
2518
+ 1
2519
+ 1
2520
+ 1
2521
+ 1
2522
+ 1
2523
+ 1
2524
+ 1
2525
+ 1
2526
+ 1
2527
+ 1
2528
+ 1
2529
+ 1
2530
+ 1
2531
+ 1
2532
+ 1
2533
+ 1
2534
+ 1
2535
+ 1
2536
+ 1
2537
+ 1
2538
+ 1
2539
+ 1
2540
+ 1
2541
+ 1
2542
+ 1
2543
+ 1
2544
+ 1
2545
+ 1
2546
+ 1
2547
+ 1
2548
+ 1
2549
+ 1
2550
+ 1
2551
+ 1
2552
+ 1
2553
+ 1
2554
+ 1
2555
+ 1
2556
+ 1
2557
+ 1
2558
+ 1
2559
+ 1
2560
+ 1
2561
+ 1
2562
+ 1
2563
+ 1
2564
+ 1
2565
+ 1
2566
+ 1
2567
+ 1
2568
+ 1
2569
+ 1
2570
+ 1
2571
+ 1
2572
+ 1
2573
+ 1
2574
+ 1
2575
+ 1
2576
+ 1
2577
+ 2
2578
+ 1
2579
+ 1
2580
+ 1
2581
+ 1
2582
+ 1
2583
+ 1
2584
+ 1
2585
+ 1
2586
+ 1
2587
+ 1
2588
+ 1
2589
+ 1
2590
+ 2
2591
+ 2
2592
+ 1
2593
+ 1
2594
+ 1
2595
+ 1
2596
+ 1
2597
+ 1
2598
+ 1
2599
+ 1
2600
+ 1
2601
+ 1
2602
+ 1
2603
+ 1
2604
+ 1
2605
+ 1
2606
+ 1
2607
+ 1
2608
+ 1
2609
+ 1
2610
+ 1
2611
+ 1
2612
+ 1
2613
+ 1
2614
+ 1
2615
+ 1
2616
+ 1
2617
+ 1
2618
+ 1
2619
+ 1
2620
+ 1
2621
+ 1
2622
+ 1
2623
+ 2
2624
+ 1
2625
+ 2
2626
+ 1
2627
+ 1
2628
+ 1
2629
+ 1
2630
+ 1
2631
+ 1
2632
+ 1
2633
+ 1
2634
+ 1
2635
+ 1
2636
+ 1
2637
+ 2
2638
+ 1
2639
+ 1
2640
+ 1
2641
+ 1
2642
+ 1
2643
+ 1
2644
+ 1
2645
+ 1
2646
+ 1
2647
+ 1
2648
+ 1
2649
+ 1
2650
+ 1
2651
+ 1
2652
+ 2
2653
+ 1
2654
+ 1
2655
+ 1
2656
+ 2
2657
+ 1
2658
+ 1
2659
+ 2
2660
+ 2
2661
+ 1
2662
+ 1
2663
+ 1
2664
+ 1
2665
+ 2
2666
+ 1
2667
+ 1
2668
+ 1
2669
+ 1
2670
+ 1
2671
+ 1
2672
+ 1
2673
+ 2
2674
+ 1
2675
+ 1
2676
+ 1
2677
+ 1
2678
+ 1
2679
+ 1
2680
+ 1
2681
+ 1
2682
+ 1
2683
+ 1
2684
+ 1
2685
+ 1
2686
+ 1
2687
+ 1
2688
+ 1
2689
+ 1
2690
+ 1
2691
+ 1
2692
+ 1
2693
+ 1
2694
+ 1
2695
+ 1
2696
+ 1
2697
+ 1
2698
+ 1
2699
+ 1
2700
+ 1
2701
+ 1
2702
+ 1
2703
+ 1
2704
+ 1
2705
+ 1
2706
+ 1
2707
+ 2
2708
+ 2
2709
+ 1
2710
+ 2
2711
+ 1
2712
+ 1
2713
+ 1
2714
+ 1
2715
+ 1
2716
+ 1
2717
+ 1
2718
+ 1
2719
+ 1
2720
+ 1
2721
+ 1
2722
+ 1
2723
+ 1
2724
+ 1
2725
+ 1
2726
+ 2
2727
+ 1
2728
+ 1
2729
+ 1
2730
+ 1
2731
+ 1
2732
+ 1
2733
+ 2
2734
+ 1
2735
+ 1
2736
+ 1
2737
+ 1
2738
+ 1
2739
+ 1
2740
+ 1
2741
+ 1
2742
+ 1
2743
+ 1
2744
+ 1
2745
+ 1
2746
+ 1
2747
+ 1
2748
+ 1
2749
+ 1
2750
+ 1
2751
+ 1
2752
+ 1
2753
+ 1
2754
+ 1
2755
+ 1
2756
+ 1
2757
+ 1
2758
+ 1
2759
+ 1
2760
+ 1
2761
+ 1
2762
+ 2
2763
+ 1
2764
+ 1
2765
+ 1
2766
+ 1
2767
+ 1
2768
+ 1
2769
+ 1
2770
+ 1
2771
+ 1
2772
+ 1
2773
+ 1
2774
+ 1
2775
+ 1
2776
+ 1
2777
+ 1
2778
+ 1
2779
+ 1
2780
+ 1
2781
+ 1
2782
+ 1
2783
+ 1
2784
+ 1
2785
+ 1
2786
+ 1
2787
+ 1
2788
+ 1
2789
+ 1
2790
+ 1
2791
+ 1
2792
+ 1
2793
+ 1
2794
+ 1
2795
+ 1
2796
+ 1
2797
+ 1
2798
+ 1
2799
+ 1
2800
+ 1
2801
+ 1
2802
+ 1
2803
+ 1
2804
+ 1
2805
+ 1
2806
+ 1
2807
+ 1
2808
+ 1
2809
+ 1
2810
+ 1
2811
+ 1
2812
+ 1
2813
+ 1
2814
+ 1
2815
+ 1
2816
+ 1
2817
+ 1
2818
+ 1
2819
+ 1
2820
+ 1
2821
+ 1
2822
+ 1
2823
+ 1
2824
+ 1
2825
+ 1
2826
+ 1
2827
+ 1
2828
+ 2
2829
+ 1
2830
+ 1
2831
+ 1
2832
+ 1
2833
+ 1
2834
+ 1
2835
+ 1
2836
+ 1
2837
+ 1
2838
+ 1
2839
+ 1
2840
+ 1
2841
+ 1
2842
+ 1
2843
+ 1
2844
+ 1
2845
+ 1
2846
+ 1
2847
+ 1
2848
+ 1
2849
+ 1
2850
+ 1
2851
+ 1
2852
+ 1
2853
+ 1
2854
+ 1
2855
+ 1
2856
+ 1
2857
+ 1
2858
+ 1
2859
+ 1
2860
+ 1
2861
+ 1
2862
+ 1
2863
+ 1
2864
+ 1
2865
+ 2
2866
+ 1
2867
+ 2
2868
+ 1
2869
+ 1
2870
+ 1
2871
+ 1
2872
+ 1
2873
+ 1
2874
+ 1
2875
+ 1
2876
+ 1
2877
+ 1
2878
+ 1
2879
+ 1
2880
+ 1
2881
+ 1
2882
+ 1
2883
+ 1
2884
+ 1
2885
+ 1
2886
+ 1
2887
+ 1
2888
+ 1
2889
+ 1
2890
+ 1
2891
+ 1
2892
+ 1
2893
+ 1
2894
+ 1
2895
+ 1
2896
+ 1
2897
+ 1
2898
+ 1
2899
+ 1
2900
+ 1
2901
+ 1
2902
+ 1
2903
+ 1
2904
+ 1
2905
+ 1
2906
+ 2
2907
+ 1
2908
+ 1
2909
+ 1
2910
+ 1
2911
+ 1
2912
+ 1
2913
+ 1
2914
+ 1
2915
+ 1
2916
+ 1
2917
+ 1
2918
+ 1
2919
+ 1
2920
+ 1
2921
+ 1
2922
+ 1
2923
+ 2
2924
+ 1
2925
+ 1
2926
+ 1
2927
+ 1
2928
+ 1
2929
+ 1
2930
+ 2
2931
+ 1
2932
+ 1
2933
+ 1
2934
+ 1
2935
+ 1
2936
+ 1
2937
+ 1
2938
+ 1
2939
+ 1
2940
+ 1
2941
+ 1
2942
+ 1
2943
+ 1
2944
+ 1
2945
+ 1
2946
+ 1
2947
+ 1
2948
+ 1
2949
+ 1
2950
+ 1
2951
+ 1
2952
+ 1
2953
+ 1
2954
+ 1
2955
+ 1
2956
+ 1
2957
+ 1
2958
+ 1
2959
+ 1
2960
+ 1
2961
+ 1
2962
+ 1
2963
+ 1
2964
+ 1
2965
+ 1
2966
+ 1
2967
+ 2
2968
+ 1
2969
+ 1
2970
+ 1
2971
+ 1
2972
+ 1
2973
+ 2
2974
+ 1
2975
+ 1
2976
+ 1
2977
+ 1
2978
+ 1
2979
+ 1
2980
+ 1
2981
+ 1
2982
+ 1
2983
+ 1
2984
+ 1
2985
+ 1
2986
+ 1
2987
+ 1
2988
+ 1
2989
+ 1
2990
+ 1
2991
+ 1
2992
+ 1
2993
+ 1
2994
+ 2
2995
+ 1
2996
+ 1
2997
+ 1
2998
+ 1
2999
+ 1
3000
+ 1
3001
+ 1
3002
+ 1
3003
+ 1
3004
+ 1
3005
+ 1
3006
+ 1
3007
+ 1
3008
+ 1
3009
+ 1
3010
+ 1
3011
+ 2
3012
+ 1
3013
+ 1
3014
+ 1
3015
+ 1
3016
+ 1
3017
+ 1
3018
+ 1
3019
+ 1
3020
+ 1
3021
+ 1
3022
+ 1
3023
+ 1
3024
+ 1
3025
+ 1
3026
+ 2
3027
+ 1
3028
+ 1
3029
+ 1
3030
+ 1
3031
+ 1
3032
+ 2
3033
+ 1
3034
+ 1
3035
+ 1
3036
+ 1
3037
+ 1
3038
+ 1
3039
+ 1
3040
+ 1
3041
+ 2
3042
+ 2
3043
+ 1
3044
+ 1
3045
+ 1
3046
+ 1
3047
+ 1
3048
+ 1
3049
+ 1
3050
+ 1
3051
+ 1
3052
+ 1
3053
+ 1
3054
+ 1
3055
+ 1
3056
+ 1
3057
+ 1
3058
+ 1
3059
+ 1
3060
+ 1
3061
+ 1
3062
+ 1
3063
+ 1
3064
+ 1
3065
+ 1
3066
+ 1
3067
+ 1
3068
+ 1
3069
+ 1
3070
+ 1
3071
+ 2
3072
+ 1
3073
+ 1
3074
+ 1
3075
+ 1
3076
+ 1
3077
+ 1
3078
+ 1
3079
+ 1
3080
+ 1
3081
+ 1
3082
+ 1
3083
+ 1
3084
+ 1
3085
+ 1
3086
+ 1
3087
+ 1
3088
+ 1
3089
+ 1
3090
+ 1
3091
+ 1
3092
+ 1
3093
+ 1
3094
+ 1
3095
+ 1
3096
+ 1
3097
+ 1
3098
+ 1
3099
+ 1
3100
+ 1
3101
+ 1
3102
+ 1
3103
+ 1
3104
+ 1
3105
+ 1
3106
+ 1
3107
+ 1
3108
+ 1
3109
+ 1
3110
+ 1
3111
+ 1
3112
+ 1
3113
+ 1
3114
+ 2
3115
+ 1
3116
+ 1
3117
+ 1
3118
+ 1
3119
+ 1
3120
+ 1
3121
+ 1
3122
+ 1
3123
+ 1
3124
+ 1
3125
+ 2
3126
+ 1
3127
+ 1
3128
+ 1
3129
+ 1
3130
+ 2
3131
+ 1
3132
+ 1
3133
+ 1
3134
+ 1
3135
+ 1
3136
+ 1
3137
+ 1
3138
+ 2
3139
+ 1
3140
+ 1
3141
+ 1
3142
+ 2
3143
+ 1
3144
+ 1
3145
+ 1
3146
+ 1
3147
+ 1
3148
+ 1
3149
+ 1
3150
+ 1
3151
+ 1
3152
+ 1
3153
+ 1
3154
+ 1
3155
+ 1
3156
+ 1
3157
+ 1
3158
+ 2
3159
+ 1
3160
+ 1
3161
+ 1
3162
+ 1
3163
+ 1
3164
+ 1
3165
+ 1
3166
+ 1
3167
+ 1
3168
+ 1
3169
+ 1
3170
+ 1
3171
+ 1
3172
+ 1
3173
+ 1
3174
+ 1
3175
+ 1
3176
+ 1
3177
+ 1
3178
+ 1
3179
+ 1
3180
+ 1
3181
+ 1
3182
+ 1
3183
+ 1
3184
+ 2
3185
+ 1
3186
+ 1
3187
+ 1
3188
+ 1
3189
+ 1
3190
+ 1
3191
+ 1
3192
+ 1
3193
+ 1
3194
+ 1
3195
+ 1
3196
+ 1
3197
+ 1
3198
+ 1
3199
+ 1
3200
+ 1
3201
+ 1
3202
+ 1
3203
+ 1
3204
+ 1
3205
+ 1
3206
+ 1
3207
+ 2
3208
+ 1
3209
+ 1
3210
+ 1
3211
+ 1
3212
+ 1
3213
+ 1
3214
+ 1
3215
+ 1
3216
+ 1
3217
+ 1
3218
+ 1
3219
+ 1
3220
+ 2
3221
+ 1
3222
+ 1
3223
+ 1
3224
+ 1
3225
+ 1
3226
+ 1
3227
+ 1
3228
+ 1
3229
+ 1
3230
+ 1
3231
+ 1
3232
+ 1
3233
+ 1
3234
+ 1
3235
+ 1
3236
+ 1
3237
+ 1
3238
+ 1
3239
+ 1
3240
+ 1
3241
+ 1
3242
+ 1
3243
+ 1
3244
+ 1
3245
+ 1
3246
+ 1
3247
+ 1
3248
+ 2
3249
+ 1
3250
+ 1
3251
+ 1
3252
+ 1
3253
+ 1
3254
+ 2
3255
+ 1
3256
+ 1
3257
+ 1
3258
+ 1
3259
+ 1
3260
+ 1
3261
+ 1
3262
+ 1
3263
+ 1
3264
+ 1
3265
+ 1
3266
+ 1
3267
+ 1
3268
+ 1
3269
+ 1
3270
+ 1
3271
+ 1
3272
+ 1
3273
+ 2
3274
+ 1
3275
+ 1
3276
+ 1
3277
+ 2
3278
+ 1
3279
+ 1
3280
+ 1
3281
+ 1
3282
+ 1
3283
+ 1
3284
+ 1
3285
+ 1
3286
+ 1
3287
+ 1
3288
+ 1
3289
+ 1
3290
+ 1
3291
+ 1
3292
+ 1
3293
+ 1
3294
+ 1
3295
+ 1
3296
+ 1
3297
+ 1
3298
+ 1
3299
+ 1
3300
+ 1
3301
+ 1
3302
+ 1
3303
+ 1
3304
+ 1
3305
+ 1
3306
+ 1
3307
+ 1
3308
+ 1
3309
+ 1
3310
+ 1
3311
+ 1
3312
+ 1
3313
+ 1
3314
+ 1
3315
+ 1
3316
+ 1
3317
+ 1
3318
+ 1
3319
+ 1
3320
+ 1
3321
+ 1
3322
+ 2
3323
+ 1
3324
+ 1
3325
+ 1
3326
+ 1
3327
+ 1
3328
+ 1
3329
+ 2
3330
+ 1
3331
+ 1
3332
+ 1
3333
+ 1
3334
+ 2
3335
+ 1
3336
+ 1
3337
+ 1
3338
+ 1
3339
+ 2
3340
+ 1
3341
+ 1
3342
+ 2
3343
+ 1
3344
+ 1
3345
+ 1
3346
+ 1
3347
+ 2
3348
+ 1
3349
+ 1
3350
+ 1
3351
+ 1
3352
+ 2
3353
+ 1
3354
+ 1
3355
+ 1
3356
+ 1
3357
+ 1
3358
+ 1
3359
+ 1
3360
+ 1
3361
+ 1
3362
+ 1
3363
+ 1
3364
+ 1
3365
+ 1
3366
+ 1
3367
+ 1
3368
+ 1
3369
+ 1
3370
+ 1
3371
+ 1
3372
+ 1
3373
+ 1
3374
+ 1
3375
+ 1
3376
+ 1
3377
+ 1
3378
+ 1
3379
+ 1
3380
+ 1
3381
+ 1
3382
+ 1
3383
+ 1
3384
+ 1
3385
+ 1
3386
+ 1
3387
+ 1
3388
+ 1
3389
+ 1
3390
+ 1
3391
+ 1
3392
+ 1
3393
+ 1
3394
+ 1
3395
+ 1
3396
+ 1
3397
+ 2
3398
+ 1
3399
+ 1
3400
+ 1
3401
+ 1
3402
+ 1
3403
+ 1
3404
+ 1
3405
+ 1
3406
+ 1
3407
+ 1
3408
+ 1
3409
+ 1
3410
+ 1
3411
+ 1
3412
+ 1
3413
+ 1
3414
+ 1
3415
+ 1
3416
+ 1
3417
+ 1
3418
+ 1
3419
+ 1
3420
+ 2
3421
+ 1
3422
+ 1
3423
+ 1
3424
+ 1
3425
+ 1
3426
+ 1
3427
+ 1
3428
+ 1
3429
+ 1
3430
+ 1
3431
+ 2
3432
+ 1
3433
+ 1
3434
+ 2
3435
+ 1
3436
+ 1
3437
+ 1
3438
+ 1
3439
+ 1
3440
+ 1
3441
+ 1
3442
+ 1
3443
+ 1
3444
+ 1
3445
+ 1
3446
+ 1
3447
+ 1
3448
+ 1
3449
+ 2
3450
+ 1
3451
+ 1
3452
+ 1
3453
+ 1
3454
+ 1
3455
+ 1
3456
+ 2
3457
+ 1
3458
+ 1
3459
+ 1
3460
+ 1
3461
+ 1
3462
+ 1
3463
+ 1
3464
+ 2
3465
+ 1
3466
+ 1
3467
+ 1
3468
+ 1
3469
+ 1
3470
+ 1
3471
+ 1
3472
+ 1
3473
+ 1
3474
+ 1
3475
+ 1
3476
+ 1
3477
+ 1
3478
+ 1
3479
+ 1
3480
+ 1
3481
+ 1
3482
+ 1
3483
+ 1
3484
+ 1
3485
+ 1
3486
+ 1
3487
+ 1
3488
+ 1
3489
+ 1
3490
+ 1
3491
+ 1
3492
+ 1
3493
+ 1
3494
+ 1
3495
+ 1
3496
+ 1
3497
+ 1
3498
+ 1
3499
+ 1
3500
+ 1
3501
+ 1
3502
+ 1
3503
+ 1
3504
+ 2
3505
+ 1
3506
+ 1
3507
+ 1
3508
+ 1
3509
+ 1
3510
+ 1
3511
+ 1
3512
+ 1
3513
+ 1
3514
+ 1
3515
+ 1
3516
+ 1
3517
+ 1
3518
+ 1
3519
+ 1
3520
+ 1
3521
+ 1
3522
+ 1
3523
+ 1
3524
+ 1
3525
+ 1
3526
+ 1
3527
+ 2
3528
+ 1
3529
+ 1
3530
+ 2
3531
+ 1
3532
+ 1
3533
+ 1
3534
+ 1
3535
+ 1
3536
+ 1
3537
+ 1
3538
+ 2
3539
+ 1
3540
+ 1
3541
+ 1
3542
+ 1
3543
+ 1
3544
+ 1
3545
+ 1
3546
+ 1
3547
+ 1
3548
+ 1
3549
+ 1
3550
+ 1
3551
+ 1
3552
+ 1
3553
+ 1
3554
+ 1
3555
+ 1
3556
+ 2
3557
+ 1
3558
+ 1
3559
+ 1
3560
+ 1
3561
+ 1
3562
+ 1
3563
+ 1
3564
+ 1
3565
+ 1
3566
+ 1
3567
+ 1
3568
+ 1
3569
+ 1
3570
+ 1
3571
+ 1
3572
+ 1
3573
+ 1
3574
+ 1
3575
+ 1
3576
+ 1
3577
+ 1
3578
+ 2
3579
+ 1
3580
+ 1
3581
+ 1
3582
+ 1
3583
+ 1
3584
+ 1
3585
+ 2
3586
+ 1
3587
+ 1
3588
+ 1
3589
+ 1
3590
+ 1
3591
+ 1
3592
+ 2
3593
+ 1
3594
+ 1
3595
+ 2
3596
+ 1
3597
+ 1
3598
+ 2
3599
+ 1
3600
+ 1
3601
+ 1
3602
+ 1
3603
+ 1
3604
+ 2
3605
+ 1
3606
+ 1
3607
+ 1
3608
+ 1
3609
+ 1
3610
+ 1
3611
+ 1
3612
+ 1
3613
+ 1
3614
+ 2
3615
+ 1
3616
+ 2
3617
+ 1
3618
+ 1
3619
+ 1
3620
+ 1
3621
+ 1
3622
+ 1
3623
+ 1
3624
+ 1
3625
+ 1
3626
+ 1
3627
+ 1
3628
+ 1
3629
+ 1
3630
+ 1
3631
+ 1
3632
+ 1
3633
+ 1
3634
+ 1
3635
+ 1
3636
+ 1
3637
+ 1
3638
+ 1
3639
+ 1
3640
+ 1
3641
+ 1
3642
+ 1
3643
+ 1
3644
+ 1
3645
+ 1
3646
+ 1
3647
+ 1
3648
+ 1
3649
+ 1
3650
+ 1
3651
+ 1
3652
+ 1
3653
+ 1
3654
+ 1
3655
+ 1
3656
+ 1
3657
+ 1
3658
+ 1
3659
+ 1
3660
+ 1
3661
+ 1
3662
+ 1
3663
+ 1
3664
+ 1
3665
+ 1
3666
+ 1
3667
+ 1
3668
+ 1
3669
+ 1
3670
+ 1
3671
+ 2
3672
+ 1
3673
+ 1
3674
+ 1
3675
+ 1
3676
+ 1
3677
+ 2
3678
+ 1
3679
+ 2
3680
+ 1
3681
+ 1
3682
+ 1
3683
+ 1
3684
+ 1
3685
+ 1
3686
+ 1
3687
+ 1
3688
+ 1
3689
+ 1
3690
+ 1
3691
+ 1
3692
+ 1
3693
+ 1
3694
+ 1
3695
+ 1
3696
+ 1
3697
+ 1
3698
+ 2
3699
+ 1
3700
+ 1
3701
+ 1
3702
+ 1
3703
+ 1
3704
+ 1
3705
+ 1
3706
+ 1
3707
+ 1
3708
+ 1
3709
+ 1
3710
+ 1
3711
+ 1
3712
+ 1
3713
+ 1
3714
+ 1
3715
+ 1
3716
+ 1
3717
+ 1
3718
+ 1
3719
+ 1
3720
+ 1
3721
+ 1
3722
+ 1
3723
+ 1
3724
+ 2
3725
+ 1
3726
+ 1
3727
+ 1
3728
+ 1
3729
+ 2
3730
+ 1
3731
+ 1
3732
+ 1
3733
+ 1
3734
+ 1
3735
+ 2
3736
+ 1
3737
+ 1
3738
+ 1
3739
+ 1
3740
+ 1
3741
+ 1
3742
+ 1
3743
+ 1
3744
+ 1
3745
+ 1
3746
+ 2
3747
+ 1
3748
+ 1
3749
+ 1
3750
+ 1
3751
+ 2
3752
+ 1
3753
+ 2
3754
+ 1
3755
+ 1
3756
+ 1
3757
+ 1
3758
+ 1
3759
+ 1
3760
+ 1
3761
+ 1
3762
+ 1
3763
+ 1
3764
+ 1
3765
+ 1
3766
+ 1
3767
+ 1
3768
+ 1
3769
+ 1
3770
+ 1
3771
+ 1
3772
+ 1
3773
+ 1
3774
+ 1
3775
+ 1
3776
+ 1
3777
+ 1
3778
+ 1
3779
+ 1
3780
+ 2
3781
+ 1
3782
+ 1
3783
+ 2
3784
+ 1
3785
+ 1
3786
+ 1
3787
+ 1
3788
+ 1
3789
+ 1
3790
+ 1
3791
+ 1
3792
+ 1
3793
+ 1
3794
+ 1
3795
+ 1
3796
+ 1
3797
+ 1
3798
+ 1
3799
+ 1
3800
+ 1
3801
+ 1
3802
+ 1
3803
+ 1
3804
+ 1
3805
+ 1
3806
+ 1
3807
+ 1
3808
+ 1
3809
+ 1
3810
+ 1
3811
+ 1
3812
+ 1
3813
+ 1
3814
+ 2
3815
+ 1
3816
+ 1
3817
+ 1
3818
+ 2
3819
+ 1
3820
+ 2
3821
+ 1
3822
+ 1
3823
+ 1
3824
+ 1
3825
+ 1
3826
+ 1
3827
+ 1
3828
+ 1
3829
+ 1
3830
+ 1
3831
+ 1
3832
+ 1
3833
+ 1
3834
+ 1
3835
+ 1
3836
+ 1
3837
+ 1
3838
+ 1
3839
+ 1
3840
+ 1
3841
+ 1
3842
+ 1
3843
+ 1
3844
+ 1
3845
+ 1
3846
+ 1
3847
+ 1
3848
+ 1
3849
+ 1
3850
+ 1
3851
+ 1
3852
+ 1
3853
+ 1
3854
+ 1
3855
+ 2
3856
+ 1
3857
+ 1
3858
+ 1
3859
+ 1
3860
+ 1
3861
+ 1
3862
+ 1
3863
+ 1
3864
+ 1
3865
+ 1
3866
+ 1
3867
+ 1
3868
+ 1
3869
+ 1
3870
+ 1
3871
+ 1
3872
+ 1
3873
+ 1
3874
+ 1
3875
+ 1
3876
+ 1
3877
+ 1
3878
+ 1
3879
+ 1
3880
+ 1
3881
+ 1
3882
+ 1
3883
+ 1
3884
+ 1
3885
+ 1
3886
+ 2
3887
+ 1
3888
+ 1
3889
+ 1
3890
+ 1
3891
+ 1
3892
+ 1
3893
+ 1
3894
+ 1
3895
+ 1
3896
+ 1
3897
+ 1
3898
+ 1
3899
+ 1
3900
+ 1
3901
+ 1
3902
+ 1
3903
+ 1
3904
+ 1
3905
+ 1
3906
+ 1
3907
+ 1
3908
+ 1
3909
+ 1
3910
+ 1
3911
+ 1
3912
+ 1
3913
+ 1
3914
+ 1
3915
+ 1
3916
+ 1
3917
+ 1
3918
+ 1
3919
+ 1
3920
+ 1
3921
+ 1
3922
+ 1
3923
+ 1
3924
+ 1
3925
+ 1
3926
+ 1
3927
+ 1
3928
+ 1
3929
+ 1
3930
+ 1
3931
+ 1
3932
+ 1
3933
+ 1
3934
+ 1
3935
+ 1
3936
+ 1
3937
+ 1
3938
+ 1
3939
+ 1
3940
+ 1
3941
+ 1
3942
+ 1
3943
+ 1
3944
+ 1
3945
+ 2
3946
+ 1
3947
+ 1
3948
+ 2
3949
+ 1
3950
+ 1
3951
+ 1
3952
+ 1
3953
+ 1
3954
+ 1
3955
+ 1
3956
+ 1
3957
+ 1
3958
+ 1
3959
+ 1
3960
+ 1
3961
+ 1
3962
+ 1
3963
+ 1
3964
+ 1
3965
+ 1
3966
+ 1
3967
+ 1
3968
+ 1
3969
+ 1
3970
+ 1
3971
+ 2
3972
+ 1
3973
+ 1
3974
+ 1
3975
+ 1
3976
+ 1
3977
+ 1
3978
+ 1
3979
+ 1
3980
+ 1
3981
+ 1
3982
+ 1
3983
+ 1
3984
+ 1
3985
+ 1
3986
+ 2
3987
+ 1
3988
+ 1
3989
+ 1
3990
+ 1
3991
+ 1
3992
+ 1
3993
+ 1
3994
+ 1
3995
+ 1
3996
+ 1
3997
+ 1
3998
+ 1
3999
+ 1
4000
+ 1
4001
+ 1
4002
+ 1
4003
+ 1
4004
+ 1
4005
+ 1
4006
+ 1
4007
+ 1
4008
+ 1
4009
+ 1
4010
+ 1
4011
+ 1
4012
+ 2
4013
+ 1
4014
+ 1
4015
+ 1
4016
+ 1
4017
+ 1
4018
+ 1
4019
+ 1
4020
+ 1
4021
+ 1
4022
+ 1
4023
+ 2
4024
+ 1
4025
+ 1
4026
+ 1
4027
+ 1
4028
+ 1
4029
+ 1
4030
+ 1
4031
+ 1
4032
+ 1
4033
+ 1
4034
+ 1
4035
+ 1
4036
+ 1
4037
+ 1
4038
+ 1
4039
+ 1
4040
+ 1
4041
+ 1
4042
+ 1
4043
+ 1
4044
+ 1
4045
+ 1
4046
+ 1
4047
+ 1
4048
+ 1
4049
+ 1
4050
+ 1
4051
+ 1
4052
+ 1
4053
+ 1
4054
+ 1
4055
+ 1
4056
+ 1
4057
+ 1
4058
+ 1
4059
+ 1
4060
+ 1
4061
+ 1
4062
+ 1
4063
+ 1
4064
+ 1
4065
+ 1
4066
+ 1
4067
+ 1
4068
+ 1
4069
+ 1
4070
+ 1
4071
+ 1
4072
+ 2
4073
+ 1
4074
+ 1
4075
+ 1
4076
+ 2
4077
+ 1
4078
+ 1
4079
+ 1
4080
+ 1
4081
+ 1
4082
+ 1
4083
+ 1
4084
+ 1
4085
+ 1
4086
+ 1
4087
+ 1
4088
+ 1
4089
+ 1
4090
+ 1
4091
+ 1
4092
+ 1
4093
+ 1
4094
+ 1
4095
+ 1
4096
+ 1
4097
+ 1
4098
+ 1
4099
+ 1
4100
+ 1
4101
+ 1
4102
+ 1
4103
+ 1
4104
+ 1
4105
+ 1
4106
+ 1
4107
+ 1
4108
+ 1
4109
+ 1
4110
+ 1
4111
+ 1
4112
+ 1
4113
+ 1
4114
+ 1
4115
+ 1
4116
+ 1
4117
+ 1
4118
+ 1
4119
+ 1
4120
+ 1
4121
+ 1
4122
+ 1
4123
+ 1
4124
+ 1
4125
+ 1
4126
+ 1
4127
+ 1
4128
+ 1
4129
+ 1
4130
+ 1
4131
+ 1
4132
+ 1
4133
+ 2
4134
+ 1
4135
+ 1
4136
+ 1
4137
+ 1
4138
+ 1
4139
+ 1
4140
+ 1
4141
+ 1
4142
+ 1
4143
+ 1
4144
+ 1
4145
+ 1
4146
+ 1
4147
+ 1
4148
+ 1
4149
+ 1
4150
+ 1
4151
+ 1
4152
+ 1
4153
+ 1
4154
+ 1
4155
+ 1
4156
+ 1
4157
+ 1
4158
+ 1
4159
+ 2
4160
+ 1
4161
+ 1
4162
+ 1
4163
+ 1
4164
+ 1
4165
+ 1
4166
+ 1
4167
+ 1
4168
+ 2
4169
+ 1
4170
+ 1
4171
+ 1
4172
+ 1
4173
+ 1
4174
+ 1
4175
+ 1
4176
+ 1
4177
+ 1
4178
+ 1
4179
+ 1
4180
+ 1
4181
+ 1
4182
+ 1
4183
+ 1
4184
+ 1
4185
+ 1
4186
+ 1
4187
+ 1
4188
+ 1
4189
+ 1
4190
+ 1
4191
+ 2
4192
+ 1
4193
+ 1
4194
+ 1
4195
+ 2
4196
+ 1
4197
+ 1
4198
+ 1
4199
+ 1
4200
+ 1
4201
+ 1
4202
+ 1
4203
+ 1
4204
+ 1
4205
+ 1
4206
+ 1
4207
+ 1
4208
+ 2
4209
+ 1
4210
+ 1
4211
+ 1
4212
+ 1
4213
+ 1
4214
+ 1
4215
+ 1
4216
+ 1
4217
+ 1
4218
+ 1
4219
+ 1
4220
+ 1
4221
+ 1
4222
+ 1
4223
+ 1
4224
+ 1
4225
+ 1
4226
+ 1
4227
+ 1
4228
+ 1
4229
+ 1
4230
+ 1
4231
+ 1
4232
+ 1
4233
+ 1
4234
+ 1
4235
+ 1
4236
+ 1
4237
+ 1
4238
+ 1
4239
+ 1
4240
+ 1
4241
+ 1
4242
+ 1
4243
+ 1
4244
+ 1
4245
+ 1
4246
+ 1
4247
+ 2
4248
+ 1
4249
+ 1
4250
+ 2
4251
+ 1
4252
+ 1
4253
+ 1
4254
+ 1
4255
+ 1
4256
+ 1
4257
+ 1
4258
+ 1
4259
+ 1
4260
+ 1
4261
+ 1
4262
+ 1
4263
+ 1
4264
+ 1
4265
+ 1
4266
+ 1
4267
+ 1
4268
+ 1
4269
+ 1
4270
+ 1
4271
+ 1
4272
+ 1
4273
+ 1
4274
+ 1
4275
+ 1
4276
+ 1
4277
+ 1
4278
+ 1
4279
+ 1
4280
+ 1
4281
+ 1
4282
+ 1
4283
+ 1
4284
+ 1
4285
+ 1
4286
+ 1
4287
+ 1
4288
+ 1
4289
+ 1
4290
+ 1
4291
+ 1
4292
+ 1
4293
+ 1
4294
+ 1
4295
+ 1
4296
+ 1
4297
+ 1
4298
+ 1
4299
+ 2
4300
+ 1
4301
+ 1
4302
+ 1
4303
+ 1
4304
+ 1
4305
+ 1
4306
+ 1
4307
+ 1
4308
+ 2
4309
+ 1
4310
+ 1
4311
+ 1
4312
+ 1
4313
+ 1
4314
+ 1
4315
+ 1
4316
+ 1
4317
+ 1
4318
+ 2
4319
+ 2
4320
+ 2
4321
+ 2
4322
+ 1
4323
+ 1
4324
+ 1
4325
+ 1
4326
+ 1
4327
+ 1
4328
+ 1
4329
+ 1
4330
+ 1
4331
+ 1
4332
+ 1
4333
+ 1
4334
+ 1
4335
+ 1
4336
+ 1
4337
+ 1
4338
+ 1
4339
+ 1
4340
+ 1
4341
+ 1
4342
+ 1
4343
+ 1
4344
+ 1
4345
+ 1
4346
+ 1
4347
+ 1
4348
+ 1
4349
+ 1
4350
+ 1
4351
+ 1
4352
+ 1
4353
+ 1
4354
+ 1
4355
+ 1
4356
+ 1
4357
+ 1
4358
+ 1
4359
+ 1
4360
+ 1
4361
+ 1
4362
+ 1
4363
+ 1
4364
+ 1
4365
+ 1
4366
+ 1
4367
+ 1
4368
+ 1
4369
+ 1
4370
+ 1
4371
+ 1
4372
+ 1
4373
+ 1
4374
+ 1
4375
+ 1
4376
+ 1
4377
+ 1
4378
+ 1
4379
+ 1
4380
+ 1
4381
+ 1
4382
+ 1
4383
+ 2
4384
+ 1
4385
+ 1
4386
+ 1
4387
+ 1
4388
+ 1
4389
+ 1
4390
+ 1
4391
+ 1
4392
+ 1
4393
+ 1
4394
+ 1
4395
+ 1
4396
+ 1
4397
+ 1
4398
+ 1
4399
+ 1
4400
+ 2
4401
+ 1
4402
+ 2
4403
+ 1
4404
+ 1
4405
+ 1
4406
+ 1
4407
+ 1
4408
+ 2
4409
+ 1
4410
+ 1
4411
+ 1
4412
+ 1
4413
+ 1
4414
+ 1
4415
+ 1
4416
+ 1
4417
+ 2
4418
+ 1
4419
+ 1
4420
+ 1
4421
+ 1
4422
+ 1
4423
+ 1
4424
+ 2
4425
+ 1
4426
+ 1
4427
+ 1
4428
+ 1
4429
+ 1
4430
+ 1
4431
+ 1
4432
+ 1
4433
+ 1
4434
+ 1
4435
+ 1
4436
+ 1
4437
+ 1
4438
+ 1
4439
+ 1
4440
+ 1
4441
+ 1
4442
+ 1
4443
+ 1
4444
+ 1
4445
+ 1
4446
+ 1
4447
+ 1
4448
+ 1
4449
+ 1
4450
+ 1
4451
+ 1
4452
+ 1
4453
+ 1
4454
+ 1
4455
+ 1
4456
+ 1
4457
+ 1
4458
+ 1
4459
+ 1
4460
+ 1
4461
+ 1
4462
+ 1
4463
+ 1
4464
+ 1
4465
+ 1
4466
+ 1
4467
+ 1
4468
+ 1
4469
+ 1
4470
+ 1
4471
+ 1
4472
+ 1
4473
+ 1
4474
+ 1
4475
+ 1
4476
+ 1
4477
+ 1
4478
+ 1
4479
+ 1
4480
+ 1
4481
+ 1
4482
+ 1
4483
+ 1
4484
+ 1
4485
+ 2
4486
+ 1
4487
+ 1
4488
+ 1
4489
+ 1
4490
+ 1
4491
+ 1
4492
+ 2
4493
+ 1
4494
+ 2
4495
+ 1
4496
+ 1
4497
+ 1
4498
+ 1
4499
+ 1
4500
+ 1
4501
+ 1
4502
+ 1
4503
+ 1
4504
+ 1
4505
+ 1
4506
+ 1
4507
+ 1
4508
+ 1
4509
+ 1
4510
+ 1
4511
+ 1
4512
+ 1
4513
+ 1
4514
+ 1
4515
+ 1
4516
+ 1
4517
+ 1
4518
+ 2
4519
+ 1
4520
+ 1
4521
+ 1
4522
+ 1
4523
+ 1
4524
+ 1
4525
+ 1
4526
+ 2
4527
+ 1
4528
+ 1
4529
+ 1
4530
+ 1
4531
+ 1
4532
+ 1
4533
+ 1
4534
+ 1
4535
+ 1
4536
+ 1
4537
+ 1
4538
+ 1
4539
+ 2
4540
+ 1
4541
+ 1
4542
+ 1
4543
+ 1
4544
+ 1
4545
+ 1
4546
+ 1
4547
+ 1
4548
+ 1
4549
+ 1
4550
+ 1
4551
+ 1
4552
+ 1
4553
+ 1
4554
+ 1
4555
+ 1
4556
+ 1
4557
+ 1
4558
+ 1
4559
+ 1
4560
+ 1
4561
+ 1
4562
+ 1
4563
+ 1
4564
+ 1
4565
+ 1
4566
+ 1
4567
+ 1
4568
+ 1
4569
+ 1
4570
+ 1
4571
+ 1
4572
+ 1
4573
+ 1
4574
+ 1
4575
+ 1
4576
+ 1
4577
+ 1
4578
+ 1
4579
+ 2
4580
+ 1
4581
+ 1
4582
+ 1
4583
+ 1
4584
+ 1
4585
+ 1
4586
+ 1
4587
+ 1
4588
+ 1
4589
+ 1
4590
+ 1
4591
+ 1
4592
+ 1
4593
+ 1
4594
+ 1
4595
+ 1
4596
+ 1
4597
+ 1
4598
+ 1
4599
+ 1
4600
+ 1
4601
+ 1
4602
+ 1
4603
+ 1
4604
+ 1
4605
+ 1
4606
+ 1
4607
+ 2
4608
+ 1
4609
+ 1
4610
+ 2
4611
+ 1
4612
+ 1
4613
+ 1
4614
+ 1
4615
+ 1
4616
+ 1
4617
+ 1
4618
+ 1
4619
+ 1
4620
+ 1
4621
+ 2
4622
+ 1
4623
+ 1
4624
+ 2
4625
+ 1
4626
+ 1
4627
+ 1
4628
+ 1
4629
+ 1
4630
+ 1
4631
+ 1
4632
+ 1
4633
+ 1
4634
+ 1
4635
+ 1
4636
+ 1
4637
+ 1
4638
+ 2
4639
+ 1
4640
+ 1
4641
+ 1
4642
+ 1
4643
+ 1
4644
+ 1
4645
+ 1
4646
+ 1
4647
+ 2
4648
+ 1
4649
+ 1
4650
+ 1
4651
+ 1
4652
+ 1
4653
+ 1
4654
+ 1
4655
+ 1
4656
+ 1
4657
+ 1
4658
+ 1
4659
+ 1
4660
+ 1
4661
+ 1
4662
+ 1
4663
+ 2
4664
+ 1
4665
+ 1
4666
+ 1
4667
+ 1
4668
+ 1
4669
+ 2
4670
+ 1
4671
+ 1
4672
+ 1
4673
+ 1
4674
+ 1
4675
+ 1
4676
+ 1
4677
+ 1
4678
+ 1
4679
+ 1
4680
+ 1
4681
+ 1
4682
+ 1
4683
+ 1
4684
+ 1
4685
+ 1
4686
+ 1
4687
+ 1
4688
+ 1
4689
+ 1
4690
+ 1
4691
+ 1
4692
+ 1
4693
+ 1
4694
+ 1
4695
+ 1
4696
+ 1
4697
+ 1
4698
+ 1
4699
+ 1
4700
+ 1
4701
+ 1
4702
+ 1
4703
+ 1
4704
+ 1
4705
+ 1
4706
+ 1
4707
+ 1
4708
+ 1
4709
+ 1
4710
+ 1
4711
+ 1
4712
+ 1
4713
+ 1
4714
+ 1
4715
+ 1
4716
+ 1
4717
+ 2
4718
+ 1
4719
+ 1
4720
+ 1
4721
+ 1
4722
+ 1
4723
+ 1
4724
+ 1
4725
+ 1
4726
+ 1
4727
+ 1
4728
+ 1
4729
+ 1
4730
+ 1
4731
+ 1
4732
+ 1
4733
+ 1
4734
+ 1
4735
+ 1
4736
+ 1
4737
+ 1
4738
+ 1
4739
+ 1
4740
+ 2
4741
+ 1
4742
+ 1
4743
+ 1
4744
+ 1
4745
+ 1
4746
+ 1
4747
+ 1
4748
+ 2
4749
+ 1
4750
+ 1
4751
+ 1
4752
+ 1
4753
+ 1
4754
+ 1
4755
+ 1
4756
+ 1
4757
+ 1
4758
+ 1
4759
+ 1
4760
+ 1
4761
+ 1
4762
+ 1
4763
+ 1
4764
+ 1
4765
+ 1
4766
+ 1
4767
+ 1
4768
+ 1
4769
+ 1
4770
+ 1
4771
+ 2
4772
+ 1
4773
+ 1
4774
+ 1
4775
+ 2
4776
+ 1
4777
+ 2
4778
+ 1
4779
+ 1
4780
+ 1
4781
+ 1
4782
+ 1
4783
+ 1
4784
+ 1
4785
+ 1
4786
+ 2
4787
+ 1
4788
+ 1
4789
+ 1
4790
+ 1
4791
+ 1
4792
+ 1
4793
+ 1
4794
+ 1
4795
+ 1
4796
+ 1
4797
+ 1
4798
+ 1
4799
+ 1
4800
+ 1
4801
+ 2
4802
+ 1
4803
+ 1
4804
+ 1
4805
+ 1
4806
+ 1
4807
+ 1
4808
+ 1
4809
+ 1
4810
+ 1
4811
+ 1
4812
+ 1
4813
+ 1
4814
+ 1
4815
+ 1
4816
+ 1
4817
+ 1
4818
+ 1
4819
+ 1
4820
+ 1
4821
+ 1
4822
+ 1
4823
+ 1
4824
+ 1
4825
+ 1
4826
+ 1
4827
+ 1
4828
+ 1
4829
+ 1
4830
+ 1
4831
+ 1
4832
+ 1
4833
+ 1
4834
+ 1
4835
+ 1
4836
+ 1
4837
+ 1
4838
+ 1
4839
+ 1
4840
+ 1
4841
+ 1
4842
+ 1
4843
+ 1
4844
+ 1
4845
+ 1
4846
+ 2
4847
+ 1
4848
+ 1
4849
+ 1
4850
+ 1
4851
+ 1
4852
+ 1
4853
+ 1
4854
+ 1
4855
+ 1
4856
+ 1
4857
+ 1
4858
+ 1
4859
+ 1
4860
+ 1
4861
+ 1
4862
+ 1
4863
+ 1
4864
+ 2
4865
+ 1
4866
+ 1
4867
+ 1
4868
+ 1
4869
+ 1
4870
+ 1
4871
+ 1
4872
+ 1
4873
+ 1
4874
+ 1
4875
+ 1
4876
+ 1
4877
+ 1
4878
+ 1
4879
+ 1
4880
+ 1
4881
+ 1
4882
+ 1
4883
+ 1
4884
+ 1
4885
+ 1
4886
+ 1
4887
+ 1
4888
+ 1
4889
+ 1
4890
+ 1
4891
+ 1
4892
+ 2
4893
+ 1
4894
+ 1
4895
+ 1
4896
+ 1
4897
+ 1
4898
+ 1
4899
+ 1
4900
+ 1
4901
+ 1
4902
+ 1
4903
+ 1
4904
+ 1
4905
+ 2
4906
+ 1
4907
+ 1
4908
+ 1
4909
+ 1
4910
+ 1
4911
+ 1
4912
+ 1
4913
+ 1
4914
+ 1
4915
+ 1
4916
+ 1
4917
+ 1
4918
+ 1
4919
+ 1
4920
+ 1
4921
+ 1
4922
+ 1
4923
+ 2
4924
+ 1
4925
+ 1
4926
+ 2
4927
+ 1
4928
+ 1
4929
+ 1
4930
+ 1
4931
+ 1
4932
+ 1
4933
+ 1
4934
+ 1
4935
+ 1
4936
+ 1
4937
+ 1
4938
+ 1
4939
+ 1
4940
+ 1
4941
+ 1
4942
+ 1
4943
+ 1
4944
+ 1
4945
+ 1
4946
+ 1
4947
+ 1
4948
+ 1
4949
+ 1
4950
+ 1
4951
+ 1
4952
+ 1
4953
+ 1
4954
+ 1
4955
+ 1
4956
+ 1
4957
+ 2
4958
+ 1
4959
+ 1
4960
+ 1
4961
+ 1
4962
+ 1
4963
+ 1
4964
+ 2
4965
+ 2
4966
+ 1
4967
+ 1
4968
+ 1
4969
+ 1
4970
+ 1
4971
+ 1
4972
+ 1
4973
+ 1
4974
+ 1
4975
+ 1
4976
+ 1
4977
+ 1
4978
+ 1
4979
+ 1
4980
+ 1
4981
+ 1
4982
+ 1
4983
+ 1
4984
+ 1
4985
+ 1
4986
+ 1
4987
+ 1
4988
+ 1
4989
+ 1
4990
+ 1
4991
+ 1
4992
+ 1
4993
+ 1
4994
+ 1
4995
+ 2
4996
+ 1
4997
+ 1
4998
+ 1
4999
+ 1
5000
+ 1
5001
+ 1
5002
+ 1
5003
+ 1
5004
+ 1
5005
+ 1
5006
+ 1
5007
+ 1
5008
+ 1
5009
+ 1
5010
+ 1
5011
+ 1
5012
+ 2
5013
+ 1
5014
+ 1
5015
+ 1
5016
+ 1
5017
+ 1
5018
+ 1
5019
+ 1
5020
+ 2
5021
+ 1
5022
+ 1
5023
+ 1
5024
+ 1
5025
+ 1
5026
+ 1
5027
+ 1
5028
+ 1
5029
+ 1
5030
+ 1
5031
+ 1
5032
+ 1
5033
+ 1
5034
+ 1
5035
+ 1
5036
+ 1
5037
+ 2
5038
+ 1
5039
+ 2
5040
+ 1
5041
+ 1
5042
+ 2
5043
+ 1
5044
+ 1
5045
+ 1
5046
+ 1
5047
+ 1
5048
+ 1
5049
+ 1
5050
+ 1
5051
+ 1
5052
+ 1
5053
+ 1
5054
+ 1
5055
+ 1
5056
+ 2
5057
+ 1
5058
+ 1
5059
+ 1
5060
+ 1
5061
+ 1
5062
+ 1
5063
+ 1
5064
+ 1
5065
+ 1
5066
+ 1
5067
+ 1
5068
+ 1
5069
+ 1
5070
+ 1
5071
+ 1
5072
+ 1
5073
+ 1
5074
+ 1
5075
+ 1
5076
+ 1
5077
+ 1
5078
+ 1
5079
+ 1
5080
+ 1
5081
+ 1
5082
+ 1
5083
+ 1
5084
+ 1
5085
+ 1
5086
+ 1
5087
+ 1
5088
+ 1
5089
+ 1
5090
+ 1
5091
+ 1
5092
+ 1
5093
+ 1
5094
+ 1
5095
+ 1
5096
+ 1
5097
+ 1
5098
+ 1
5099
+ 1
5100
+ 1
5101
+ 1
5102
+ 1
5103
+ 1
5104
+ 1
5105
+ 1
5106
+ 1
5107
+ 1
5108
+ 1
5109
+ 1
5110
+ 1
5111
+ 1
5112
+ 1
5113
+ 1
5114
+ 1
5115
+ 1
5116
+ 1
5117
+ 1
5118
+ 1
5119
+ 2
5120
+ 1
5121
+ 1
5122
+ 1
5123
+ 2
5124
+ 1
5125
+ 1
5126
+ 1
5127
+ 1
5128
+ 1
5129
+ 1
5130
+ 1
5131
+ 2
5132
+ 1
5133
+ 1
5134
+ 1
5135
+ 1
5136
+ 1
5137
+ 1
5138
+ 1
5139
+ 1
5140
+ 1
5141
+ 1
5142
+ 1
5143
+ 1
5144
+ 1
5145
+ 1
5146
+ 1
5147
+ 1
5148
+ 1
5149
+ 1
5150
+ 1
5151
+ 1
5152
+ 1
5153
+ 1
5154
+ 1
5155
+ 1
5156
+ 1
5157
+ 1
5158
+ 1
5159
+ 1
5160
+ 1
5161
+ 1
5162
+ 1
5163
+ 2
5164
+ 1
5165
+ 2
5166
+ 1
5167
+ 1
5168
+ 1
5169
+ 1
5170
+ 1
5171
+ 1
5172
+ 1
5173
+ 1
5174
+ 1
5175
+ 1
5176
+ 1
5177
+ 1
5178
+ 1
5179
+ 2
5180
+ 1
5181
+ 1
5182
+ 1
5183
+ 1
5184
+ 1
5185
+ 1
5186
+ 1
5187
+ 1
5188
+ 1
5189
+ 1
5190
+ 1
5191
+ 1
5192
+ 1
5193
+ 1
5194
+ 1
5195
+ 1
5196
+ 1
5197
+ 2
5198
+ 1
5199
+ 1
5200
+ 1
5201
+ 1
5202
+ 1
5203
+ 1
5204
+ 1
5205
+ 1
5206
+ 1
5207
+ 1
5208
+ 1
5209
+ 1
5210
+ 1
5211
+ 1
5212
+ 1
5213
+ 1
5214
+ 1
5215
+ 1
5216
+ 1
5217
+ 1
5218
+ 1
5219
+ 1
5220
+ 1
5221
+ 1
5222
+ 1
5223
+ 1
5224
+ 1
5225
+ 1
5226
+ 1
5227
+ 1
5228
+ 2
5229
+ 1
5230
+ 1
5231
+ 1
5232
+ 1
5233
+ 1
5234
+ 1
5235
+ 1
5236
+ 1
5237
+ 1
5238
+ 1
5239
+ 1
5240
+ 1
5241
+ 1
5242
+ 1
5243
+ 1
5244
+ 1
5245
+ 1
5246
+ 1
5247
+ 1
5248
+ 1
5249
+ 1
5250
+ 1
5251
+ 1
5252
+ 1
5253
+ 1
5254
+ 1
5255
+ 1
5256
+ 1
5257
+ 1
5258
+ 1
5259
+ 1
5260
+ 1
5261
+ 1
5262
+ 1
5263
+ 1
5264
+ 1
5265
+ 1
5266
+ 1
5267
+ 2
5268
+ 1
5269
+ 1
5270
+ 1
5271
+ 2
5272
+ 1
5273
+ 1
5274
+ 2
5275
+ 1
5276
+ 1
5277
+ 1
5278
+ 1
5279
+ 1
5280
+ 2
5281
+ 1
5282
+ 1
5283
+ 1
5284
+ 1
5285
+ 1
5286
+ 1
5287
+ 1
5288
+ 1
5289
+ 1
5290
+ 1
5291
+ 1
5292
+ 1
5293
+ 1
5294
+ 1
5295
+ 1
5296
+ 1
5297
+ 1
5298
+ 1
5299
+ 1
5300
+ 2
5301
+ 1
5302
+ 1
5303
+ 1
5304
+ 1
5305
+ 1
5306
+ 1
5307
+ 1
5308
+ 1
5309
+ 1
5310
+ 1
5311
+ 2
5312
+ 1
5313
+ 1
5314
+ 1
5315
+ 2
5316
+ 1
5317
+ 1
5318
+ 1
5319
+ 1
5320
+ 1
5321
+ 1
5322
+ 1
5323
+ 1
5324
+ 1
5325
+ 1
5326
+ 1
5327
+ 1
5328
+ 1
5329
+ 1
5330
+ 1
5331
+ 1
5332
+ 1
5333
+ 1
5334
+ 1
5335
+ 1
5336
+ 1
5337
+ 1
5338
+ 1
5339
+ 1
5340
+ 1
5341
+ 1
5342
+ 1
5343
+ 1
5344
+ 1
5345
+ 1
5346
+ 1
5347
+ 1
5348
+ 1
5349
+ 1
5350
+ 1
5351
+ 1
5352
+ 1
5353
+ 1
5354
+ 1
5355
+ 1
5356
+ 1
5357
+ 1
5358
+ 1
5359
+ 1
5360
+ 1
5361
+ 1
5362
+ 1
5363
+ 1
5364
+ 2
5365
+ 1
5366
+ 1
5367
+ 1
5368
+ 1
5369
+ 1
5370
+ 1
5371
+ 1
5372
+ 1
5373
+ 1
5374
+ 1
5375
+ 1
5376
+ 2
5377
+ 1
5378
+ 1
5379
+ 1
5380
+ 1
5381
+ 1
5382
+ 1
5383
+ 1
5384
+ 1
5385
+ 2
5386
+ 1
5387
+ 1
5388
+ 1
5389
+ 1
5390
+ 1
5391
+ 1
5392
+ 1
5393
+ 1
5394
+ 2
5395
+ 1
5396
+ 1
5397
+ 1
5398
+ 1
5399
+ 1
5400
+ 1
5401
+ 1
5402
+ 1
5403
+ 1
5404
+ 1
5405
+ 1
5406
+ 1
5407
+ 1
5408
+ 1
5409
+ 1
5410
+ 1
5411
+ 1
5412
+ 1
5413
+ 1
5414
+ 1
5415
+ 2
5416
+ 1
5417
+ 1
5418
+ 2
5419
+ 1
5420
+ 1
5421
+ 1
5422
+ 1
5423
+ 1
5424
+ 1
5425
+ 1
5426
+ 1
5427
+ 1
5428
+ 1
5429
+ 1
5430
+ 1
5431
+ 1
5432
+ 1
5433
+ 1
5434
+ 2
5435
+ 1
5436
+ 1
5437
+ 1
5438
+ 1
5439
+ 1
5440
+ 1
5441
+ 1
5442
+ 1
5443
+ 1
5444
+ 1
5445
+ 1
5446
+ 1
5447
+ 1
5448
+ 1
5449
+ 2
5450
+ 1
5451
+ 1
5452
+ 1
5453
+ 1
5454
+ 1
5455
+ 1
5456
+ 1
5457
+ 1
5458
+ 1
5459
+ 1
5460
+ 1
5461
+ 1
5462
+ 1
5463
+ 1
5464
+ 1
5465
+ 1
5466
+ 1
5467
+ 1
5468
+ 1
5469
+ 1
5470
+ 1
5471
+ 1
5472
+ 1
5473
+ 1
5474
+ 1
5475
+ 1
5476
+ 1
5477
+ 1
5478
+ 1
5479
+ 1
5480
+ 1
5481
+ 1
5482
+ 1
5483
+ 1
5484
+ 1
5485
+ 1
5486
+ 1
5487
+ 1
5488
+ 1
5489
+ 1
5490
+ 1
5491
+ 1
5492
+ 1
5493
+ 1
5494
+ 1
5495
+ 2
5496
+ 1
5497
+ 1
5498
+ 1
5499
+ 1
5500
+ 1
5501
+ 1
5502
+ 1
5503
+ 1
5504
+ 1
5505
+ 1
5506
+ 2
5507
+ 1
5508
+ 1
5509
+ 1
5510
+ 2
5511
+ 1
5512
+ 1
5513
+ 1
5514
+ 1
5515
+ 1
5516
+ 1
5517
+ 1
5518
+ 1
5519
+ 1
5520
+ 1
5521
+ 1
5522
+ 1
5523
+ 1
5524
+ 1
5525
+ 1
5526
+ 2
5527
+ 1
5528
+ 1
5529
+ 1
5530
+ 1
5531
+ 1
5532
+ 1
5533
+ 1
5534
+ 1
5535
+ 1
5536
+ 1
5537
+ 1
5538
+ 1
5539
+ 1
5540
+ 1
5541
+ 1
5542
+ 1
5543
+ 1
5544
+ 1
5545
+ 1
5546
+ 1
5547
+ 1
5548
+ 1
5549
+ 1
5550
+ 1
5551
+ 1
5552
+ 1
5553
+ 1
5554
+ 1
5555
+ 1
5556
+ 1
5557
+ 1
5558
+ 1
5559
+ 1
5560
+ 1
5561
+ 1
5562
+ 1
5563
+ 1
5564
+ 1
5565
+ 1
5566
+ 1
5567
+ 2
5568
+ 1
5569
+ 1
5570
+ 1
5571
+ 1
5572
+ 1
5573
+ 1
5574
+ 1
5575
+ 1
5576
+ 1
5577
+ 1
5578
+ 1
5579
+ 1
5580
+ 1
5581
+ 1
5582
+ 2
5583
+ 1
5584
+ 1
5585
+ 1
5586
+ 1
5587
+ 1
5588
+ 1
5589
+ 1
5590
+ 1
5591
+ 1
5592
+ 1
5593
+ 1
5594
+ 1
5595
+ 1
5596
+ 1
5597
+ 1
5598
+ 1
5599
+ 1
5600
+ 1
5601
+ 1
5602
+ 1
5603
+ 2
5604
+ 1
5605
+ 1
5606
+ 1
5607
+ 1
5608
+ 1
5609
+ 1
5610
+ 2
5611
+ 1
5612
+ 1
5613
+ 1
5614
+ 1
5615
+ 1
5616
+ 1
5617
+ 1
5618
+ 1
5619
+ 1
5620
+ 1
5621
+ 2
5622
+ 1
5623
+ 1
5624
+ 1
5625
+ 1
5626
+ 1
5627
+ 1
5628
+ 1
5629
+ 1
5630
+ 1
5631
+ 1
5632
+ 1
5633
+ 1
5634
+ 1
5635
+ 1
5636
+ 1
5637
+ 1
5638
+ 1
5639
+ 1
5640
+ 1
5641
+ 1
5642
+ 1
5643
+ 1
5644
+ 1
5645
+ 1
5646
+ 1
5647
+ 1
5648
+ 1
5649
+ 1
5650
+ 1
5651
+ 1
5652
+ 1
5653
+ 1
5654
+ 2
5655
+ 1
5656
+ 1
5657
+ 1
5658
+ 1
5659
+ 1
5660
+ 1
5661
+ 1
5662
+ 1
5663
+ 1
5664
+ 1
5665
+ 1
5666
+ 1
5667
+ 1
5668
+ 1
5669
+ 1
5670
+ 1
5671
+ 2
5672
+ 1
5673
+ 1
5674
+ 1
5675
+ 1
5676
+ 1
5677
+ 1
5678
+ 1
5679
+ 2
5680
+ 1
5681
+ 1
5682
+ 1
5683
+ 1
5684
+ 1
5685
+ 1
5686
+ 1
5687
+ 1
5688
+ 1
5689
+ 1
5690
+ 1
5691
+ 2
5692
+ 1
5693
+ 1
5694
+ 1
5695
+ 1
5696
+ 1
5697
+ 1
5698
+ 1
5699
+ 1
5700
+ 1
5701
+ 2
5702
+ 1
5703
+ 1
5704
+ 2
5705
+ 1
5706
+ 1
5707
+ 1
5708
+ 1
5709
+ 1
5710
+ 1
5711
+ 1
5712
+ 1
5713
+ 1
5714
+ 1
5715
+ 1
5716
+ 1
5717
+ 1
5718
+ 1
5719
+ 1
5720
+ 1
5721
+ 1
5722
+ 1
5723
+ 1
5724
+ 1
5725
+ 2
5726
+ 1
5727
+ 1
5728
+ 1
5729
+ 1
5730
+ 1
5731
+ 1
5732
+ 1
5733
+ 1
5734
+ 1
5735
+ 1
5736
+ 1
5737
+ 1
5738
+ 1
5739
+ 1
5740
+ 1
5741
+ 1
5742
+ 1
5743
+ 1
5744
+ 1
5745
+ 1
5746
+ 1
5747
+ 1
5748
+ 1
5749
+ 1
5750
+ 1
5751
+ 1
5752
+ 1
5753
+ 2
5754
+ 1
5755
+ 1
5756
+ 1
5757
+ 1
5758
+ 1
5759
+ 1
5760
+ 1
5761
+ 1
5762
+ 1
5763
+ 1
5764
+ 1
5765
+ 1
5766
+ 2
5767
+ 1
5768
+ 1
5769
+ 1
5770
+ 1
5771
+ 1
5772
+ 1
5773
+ 1
5774
+ 1
5775
+ 1
5776
+ 1
5777
+ 1
5778
+ 1
5779
+ 1
5780
+ 1
5781
+ 1
5782
+ 1
5783
+ 1
5784
+ 1
5785
+ 1
5786
+ 1
5787
+ 1
5788
+ 1
5789
+ 1
5790
+ 1
5791
+ 1
5792
+ 1
5793
+ 1
5794
+ 1
5795
+ 1
5796
+ 1
5797
+ 1
5798
+ 1
5799
+ 1
5800
+ 1
5801
+ 1
5802
+ 1
5803
+ 1
5804
+ 2
5805
+ 1
5806
+ 1
5807
+ 1
5808
+ 1
5809
+ 1
5810
+ 1
5811
+ 1
5812
+ 2
5813
+ 1
5814
+ 1
5815
+ 1
5816
+ 1
5817
+ 1
5818
+ 1
5819
+ 1
5820
+ 1
5821
+ 1
5822
+ 1
5823
+ 1
5824
+ 1
5825
+ 1
5826
+ 1
5827
+ 1
5828
+ 1
5829
+ 1
5830
+ 1
5831
+ 1
5832
+ 1
5833
+ 1
5834
+ 1
5835
+ 1
5836
+ 1
5837
+ 1
5838
+ 1
5839
+ 1
5840
+ 1
5841
+ 1
5842
+ 1
5843
+ 1
5844
+ 1
5845
+ 1
5846
+ 1
5847
+ 1
5848
+ 1
5849
+ 1
5850
+ 1
5851
+ 1
5852
+ 1
5853
+ 1
5854
+ 1
5855
+ 1
5856
+ 1
5857
+ 1
5858
+ 1
5859
+ 1
5860
+ 1
5861
+ 2
5862
+ 1
5863
+ 1
5864
+ 1
5865
+ 1
5866
+ 1
5867
+ 1
5868
+ 1
5869
+ 1
5870
+ 1
5871
+ 1
5872
+ 1
5873
+ 1
5874
+ 1
5875
+ 1
5876
+ 1
5877
+ 1
5878
+ 1
5879
+ 1
5880
+ 1
5881
+ 1
5882
+ 2
5883
+ 1
5884
+ 1
5885
+ 1
5886
+ 1
5887
+ 1
5888
+ 1
5889
+ 1
5890
+ 1
5891
+ 1
5892
+ 1
5893
+ 1
5894
+ 1
5895
+ 1
5896
+ 2
5897
+ 1
5898
+ 1
5899
+ 1
5900
+ 1
5901
+ 1
5902
+ 1
5903
+ 1
5904
+ 1
5905
+ 1
5906
+ 1
5907
+ 1
5908
+ 1
5909
+ 1
5910
+ 1
5911
+ 1
5912
+ 1
5913
+ 2
5914
+ 1
5915
+ 1
5916
+ 2
5917
+ 1
5918
+ 1
5919
+ 1
5920
+ 1
5921
+ 1
5922
+ 1
5923
+ 1
5924
+ 1
5925
+ 1
5926
+ 1
5927
+ 1
5928
+ 1
5929
+ 1
5930
+ 1
5931
+ 1
5932
+ 1
5933
+ 1
5934
+ 1
5935
+ 1
5936
+ 1
5937
+ 1
5938
+ 1
5939
+ 1
5940
+ 1
5941
+ 2
5942
+ 1
5943
+ 1
5944
+ 1
5945
+ 1
5946
+ 1
5947
+ 1
5948
+ 1
5949
+ 1
5950
+ 1
5951
+ 1
5952
+ 1
5953
+ 2
5954
+ 1
5955
+ 1
5956
+ 1
5957
+ 1
5958
+ 1
5959
+ 1
5960
+ 1
5961
+ 1
5962
+ 1
5963
+ 1
5964
+ 1
5965
+ 1
5966
+ 1
5967
+ 2
5968
+ 1
5969
+ 1
5970
+ 1
5971
+ 1
5972
+ 1
5973
+ 1
5974
+ 1
5975
+ 1
5976
+ 1
5977
+ 1
5978
+ 2
5979
+ 1
5980
+ 1
5981
+ 1
5982
+ 1
5983
+ 1
5984
+ 1
5985
+ 1
5986
+ 1
5987
+ 1
5988
+ 1
5989
+ 2
5990
+ 1
5991
+ 1
5992
+ 1
5993
+ 1
5994
+ 1
5995
+ 1
5996
+ 1
5997
+ 1
5998
+ 1
5999
+ 1
6000
+ 1
6001
+ 1
6002
+ 1
6003
+ 1
6004
+ 1
6005
+ 1
6006
+ 1
6007
+ 1
6008
+ 2
6009
+ 1
6010
+ 1
6011
+ 1
6012
+ 1
6013
+ 1
6014
+ 1
6015
+ 1
6016
+ 1
6017
+ 1
6018
+ 1
6019
+ 1
6020
+ 1
6021
+ 1
6022
+ 1
6023
+ 1
6024
+ 1
6025
+ 1
6026
+ 1
6027
+ 1
6028
+ 1
6029
+ 1
6030
+ 1
6031
+ 1
6032
+ 1
6033
+ 1
6034
+ 1
6035
+ 1
6036
+ 1
6037
+ 1
6038
+ 2
6039
+ 1
6040
+ 1
6041
+ 1
6042
+ 1
6043
+ 1
6044
+ 1
6045
+ 1
6046
+ 1
6047
+ 1
6048
+ 1
6049
+ 1
6050
+ 1
6051
+ 1
6052
+ 1
6053
+ 1
6054
+ 1
6055
+ 1
6056
+ 1
6057
+ 1
6058
+ 1
6059
+ 1
6060
+ 1
6061
+ 1
6062
+ 2
6063
+ 1
6064
+ 1
6065
+ 1
6066
+ 1
6067
+ 1
6068
+ 1
6069
+ 1
6070
+ 2
6071
+ 1
6072
+ 1
6073
+ 1
6074
+ 1
6075
+ 1
6076
+ 1
6077
+ 1
6078
+ 1
6079
+ 1
6080
+ 1
6081
+ 2
6082
+ 1
6083
+ 1
6084
+ 1
6085
+ 1
6086
+ 1
6087
+ 1
6088
+ 1
6089
+ 1
6090
+ 1
6091
+ 1
6092
+ 1
6093
+ 1
6094
+ 1
6095
+ 1
6096
+ 1
6097
+ 1
6098
+ 1
6099
+ 1
6100
+ 1
6101
+ 1
6102
+ 1
6103
+ 1
6104
+ 1
6105
+ 1
6106
+ 1
6107
+ 1
6108
+ 1
6109
+ 1
6110
+ 1
6111
+ 1
6112
+ 2
6113
+ 1
6114
+ 1
6115
+ 1
6116
+ 1
6117
+ 1
6118
+ 1
6119
+ 1
6120
+ 1
6121
+ 1
6122
+ 1
6123
+ 1
6124
+ 1
6125
+ 1
6126
+ 1
6127
+ 1
6128
+ 2
6129
+ 1
6130
+ 1
6131
+ 1
6132
+ 1
6133
+ 1
6134
+ 1
6135
+ 1
6136
+ 1
6137
+ 1
6138
+ 1
6139
+ 1
6140
+ 1
6141
+ 1
6142
+ 1
6143
+ 1
6144
+ 1
6145
+ 1
6146
+ 1
6147
+ 2
6148
+ 1
6149
+ 1
6150
+ 1
6151
+ 1
6152
+ 1
6153
+ 1
6154
+ 1
6155
+ 1
6156
+ 1
6157
+ 1
6158
+ 1
6159
+ 1
6160
+ 1
6161
+ 1
6162
+ 2
6163
+ 1
6164
+ 1
6165
+ 1
6166
+ 1
6167
+ 2
6168
+ 1
6169
+ 2
6170
+ 1
6171
+ 1
6172
+ 1
6173
+ 1
6174
+ 1
6175
+ 1
6176
+ 1
6177
+ 1
6178
+ 1
6179
+ 2
6180
+ 1
6181
+ 1
6182
+ 1
6183
+ 2
6184
+ 1
6185
+ 1
6186
+ 1
6187
+ 1
6188
+ 1
6189
+ 1
6190
+ 1
6191
+ 2
6192
+ 1
6193
+ 1
6194
+ 1
6195
+ 1
6196
+ 1
6197
+ 1
6198
+ 1
6199
+ 1
6200
+ 1
6201
+ 1
6202
+ 1
6203
+ 1
6204
+ 1
6205
+ 1
6206
+ 1
6207
+ 1
6208
+ 1
6209
+ 1
6210
+ 1
6211
+ 1
6212
+ 1
6213
+ 1
6214
+ 1
6215
+ 1
6216
+ 1
6217
+ 1
6218
+ 1
6219
+ 1
6220
+ 1
6221
+ 2
6222
+ 1
6223
+ 1
6224
+ 1
6225
+ 1
6226
+ 1
6227
+ 1
6228
+ 1
6229
+ 1
6230
+ 1
6231
+ 1
6232
+ 1
6233
+ 1
6234
+ 1
6235
+ 1
6236
+ 2
6237
+ 1
6238
+ 1
6239
+ 1
6240
+ 1
6241
+ 1
6242
+ 1
6243
+ 1
6244
+ 1
6245
+ 1
6246
+ 1
6247
+ 1
6248
+ 1
6249
+ 1
6250
+ 1
6251
+ 1
6252
+ 1
6253
+ 1
6254
+ 2
6255
+ 1
6256
+ 1
6257
+ 1
6258
+ 1
6259
+ 1
6260
+ 1
6261
+ 1
6262
+ 1
6263
+ 1
6264
+ 1
6265
+ 1
6266
+ 1
6267
+ 1
6268
+ 1
6269
+ 1
6270
+ 1
6271
+ 2
6272
+ 1
6273
+ 1
6274
+ 1
6275
+ 1
6276
+ 1
6277
+ 1
6278
+ 1
6279
+ 1
6280
+ 1
6281
+ 1
6282
+ 1
6283
+ 1
6284
+ 1
6285
+ 1
6286
+ 1
6287
+ 1
6288
+ 1
6289
+ 1
6290
+ 1
6291
+ 1
6292
+ 1
6293
+ 1
6294
+ 1
6295
+ 1
6296
+ 1
6297
+ 1
6298
+ 1
6299
+ 1
6300
+ 1
6301
+ 1
6302
+ 1
6303
+ 1
6304
+ 1
6305
+ 1
6306
+ 1
6307
+ 1
6308
+ 1
6309
+ 1
6310
+ 1
6311
+ 1
6312
+ 1
6313
+ 1
6314
+ 1
6315
+ 1
6316
+ 1
6317
+ 1
6318
+ 1
6319
+ 1
6320
+ 1
6321
+ 1
6322
+ 1
6323
+ 1
6324
+ 1
6325
+ 1
6326
+ 1
6327
+ 1
6328
+ 1
6329
+ 1
6330
+ 1
6331
+ 1
6332
+ 1
6333
+ 1
6334
+ 1
6335
+ 1
6336
+ 1
6337
+ 1
6338
+ 1
6339
+ 1
6340
+ 1
6341
+ 1
6342
+ 1
6343
+ 1
6344
+ 2
6345
+ 1
6346
+ 1
6347
+ 1
6348
+ 1
6349
+ 1
6350
+ 1
6351
+ 1
6352
+ 1
6353
+ 1
6354
+ 1
6355
+ 1
6356
+ 1
6357
+ 1
6358
+ 1
6359
+ 1
6360
+ 1
6361
+ 1
6362
+ 1
6363
+ 1
6364
+ 1
6365
+ 1
6366
+ 1
6367
+ 1
6368
+ 1
6369
+ 1
6370
+ 1
6371
+ 1
6372
+ 1
6373
+ 2
6374
+ 1
6375
+ 1
6376
+ 1
6377
+ 1
6378
+ 1
6379
+ 1
6380
+ 2
6381
+ 1
6382
+ 1
6383
+ 1
6384
+ 1
6385
+ 1
6386
+ 1
6387
+ 1
6388
+ 1
6389
+ 1
6390
+ 1
6391
+ 1
6392
+ 1
6393
+ 1
6394
+ 1
6395
+ 1
6396
+ 1
6397
+ 1
6398
+ 1
6399
+ 1
6400
+ 1
6401
+ 1
6402
+ 1
6403
+ 1
6404
+ 1
6405
+ 1
6406
+ 1
6407
+ 1
6408
+ 1
6409
+ 1
6410
+ 1
6411
+ 2
6412
+ 2
6413
+ 1
6414
+ 1
6415
+ 1
6416
+ 1
6417
+ 1
6418
+ 1
6419
+ 1
6420
+ 1
6421
+ 1
6422
+ 1
6423
+ 1
6424
+ 1
6425
+ 1
6426
+ 1
6427
+ 1
6428
+ 1
6429
+ 1
6430
+ 1
6431
+ 1
6432
+ 1
6433
+ 1
6434
+ 1
6435
+ 1
6436
+ 1
6437
+ 1
6438
+ 1
6439
+ 1
6440
+ 1
6441
+ 1
6442
+ 1
6443
+ 2
6444
+ 1
6445
+ 1
6446
+ 1
6447
+ 1
6448
+ 1
6449
+ 1
6450
+ 1
6451
+ 1
6452
+ 1
6453
+ 1
6454
+ 2
6455
+ 1
6456
+ 1
6457
+ 1
6458
+ 1
6459
+ 1
6460
+ 1
6461
+ 1
6462
+ 1
6463
+ 1
6464
+ 1
6465
+ 1
6466
+ 1
6467
+ 1
6468
+ 1
6469
+ 1
6470
+ 1
6471
+ 1
6472
+ 1
6473
+ 1
6474
+ 1
6475
+ 1
6476
+ 1
6477
+ 1
6478
+ 1
6479
+ 1
6480
+ 1
6481
+ 1
6482
+ 2
6483
+ 1
6484
+ 1
6485
+ 1
6486
+ 1
6487
+ 1
6488
+ 1
6489
+ 1
6490
+ 1
6491
+ 1
6492
+ 1
6493
+ 1
6494
+ 1
6495
+ 1
6496
+ 1
6497
+ 1
6498
+ 1
6499
+ 2
6500
+ 1
6501
+ 2
6502
+ 1
6503
+ 1
6504
+ 1
6505
+ 1
6506
+ 1
6507
+ 1
6508
+ 1
6509
+ 1
6510
+ 2
6511
+ 1
6512
+ 1
6513
+ 1
6514
+ 1
6515
+ 1
6516
+ 1
6517
+ 1
6518
+ 1
6519
+ 1
6520
+ 1
6521
+ 1
6522
+ 1
6523
+ 1
6524
+ 1
6525
+ 1
6526
+ 1
6527
+ 1
6528
+ 1
6529
+ 1
6530
+ 1
6531
+ 1
6532
+ 1
6533
+ 2
6534
+ 1
6535
+ 1
6536
+ 1
6537
+ 1
6538
+ 1
6539
+ 1
6540
+ 1
6541
+ 1
6542
+ 2
6543
+ 1
6544
+ 2
6545
+ 1
6546
+ 1
6547
+ 1
6548
+ 1
6549
+ 1
6550
+ 1
6551
+ 1
6552
+ 1
6553
+ 1
6554
+ 1
6555
+ 1
6556
+ 1
6557
+ 1
6558
+ 1
6559
+ 1
6560
+ 1
6561
+ 2
6562
+ 1
6563
+ 1
6564
+ 1
6565
+ 1
6566
+ 1
6567
+ 1
6568
+ 1
6569
+ 1
6570
+ 1
6571
+ 1
6572
+ 1
6573
+ 1
6574
+ 1
6575
+ 1
6576
+ 1
6577
+ 2
6578
+ 1
6579
+ 1
6580
+ 1
6581
+ 2
6582
+ 1
6583
+ 1
6584
+ 1
6585
+ 1
6586
+ 1
6587
+ 1
6588
+ 1
6589
+ 1
6590
+ 1
6591
+ 1
6592
+ 1
6593
+ 1
6594
+ 1
6595
+ 2
6596
+ 2
6597
+ 1
6598
+ 1
6599
+ 1
6600
+ 1
6601
+ 1
6602
+ 1
6603
+ 1
6604
+ 1
6605
+ 1
6606
+ 1
6607
+ 1
6608
+ 1
6609
+ 1
6610
+ 2
6611
+ 1
6612
+ 1
6613
+ 1
6614
+ 1
6615
+ 1
6616
+ 1
6617
+ 1
6618
+ 1
6619
+ 1
6620
+ 1
6621
+ 1
6622
+ 1
6623
+ 1
6624
+ 1
6625
+ 1
6626
+ 2
6627
+ 1
6628
+ 1
6629
+ 1
6630
+ 2
6631
+ 1
6632
+ 1
6633
+ 1
6634
+ 1
6635
+ 1
6636
+ 1
6637
+ 1
6638
+ 1
6639
+ 1
6640
+ 1
6641
+ 1
6642
+ 1
6643
+ 1
6644
+ 1
6645
+ 2
6646
+ 1
6647
+ 1
6648
+ 1
6649
+ 1
6650
+ 1
6651
+ 1
6652
+ 1
6653
+ 1
6654
+ 1
6655
+ 1
6656
+ 1
6657
+ 1
6658
+ 1
6659
+ 2
6660
+ 1
6661
+ 1
6662
+ 1
6663
+ 1
6664
+ 1
6665
+ 1
6666
+ 1
6667
+ 1
6668
+ 1
6669
+ 1
6670
+ 1
6671
+ 1
6672
+ 1
6673
+ 1
6674
+ 2
6675
+ 1
6676
+ 1
6677
+ 1
6678
+ 1
6679
+ 1
6680
+ 1
6681
+ 2
6682
+ 1
6683
+ 1
6684
+ 1
6685
+ 1
6686
+ 1
6687
+ 1
6688
+ 1
6689
+ 1
6690
+ 1
6691
+ 1
6692
+ 1
6693
+ 1
6694
+ 1
6695
+ 1
6696
+ 1
6697
+ 1
6698
+ 1
6699
+ 2
6700
+ 2
6701
+ 1
6702
+ 1
6703
+ 2
6704
+ 1
6705
+ 1
6706
+ 2
6707
+ 1
6708
+ 1
6709
+ 1
6710
+ 1
6711
+ 1
6712
+ 1
6713
+ 1
6714
+ 1
6715
+ 1
6716
+ 1
6717
+ 1
6718
+ 1
6719
+ 1
6720
+ 1
6721
+ 1
6722
+ 1
6723
+ 1
6724
+ 1
6725
+ 1
6726
+ 1
6727
+ 1
6728
+ 1
6729
+ 1
6730
+ 1
6731
+ 1
6732
+ 1
6733
+ 1
6734
+ 1
6735
+ 1
6736
+ 1
6737
+ 1
6738
+ 1
6739
+ 1
6740
+ 1
6741
+ 1
6742
+ 2
6743
+ 1
6744
+ 1
6745
+ 1
6746
+ 1
6747
+ 1
6748
+ 1
6749
+ 1
6750
+ 1
6751
+ 1
6752
+ 1
6753
+ 1
6754
+ 1
6755
+ 1
6756
+ 1
6757
+ 1
6758
+ 1
6759
+ 1
6760
+ 2
6761
+ 1
6762
+ 1
6763
+ 1
6764
+ 1
6765
+ 1
6766
+ 1
6767
+ 1
6768
+ 1
6769
+ 1
6770
+ 1
6771
+ 1
6772
+ 1
6773
+ 1
6774
+ 1
6775
+ 1
6776
+ 1
6777
+ 1
6778
+ 1
6779
+ 1
6780
+ 1
6781
+ 1
6782
+ 1
6783
+ 1
6784
+ 1
6785
+ 1
6786
+ 2
6787
+ 1
6788
+ 1
6789
+ 1
6790
+ 1
6791
+ 1
6792
+ 1
6793
+ 2
6794
+ 1
6795
+ 2
6796
+ 1
6797
+ 1
6798
+ 1
6799
+ 1
6800
+ 1
6801
+ 1
6802
+ 1
6803
+ 1
6804
+ 1
6805
+ 1
6806
+ 1
6807
+ 1
6808
+ 1
6809
+ 1
6810
+ 2
6811
+ 2
6812
+ 1
6813
+ 1
6814
+ 1
6815
+ 1
6816
+ 1
6817
+ 1
6818
+ 1
6819
+ 1
6820
+ 1
6821
+ 1
6822
+ 1
6823
+ 1
6824
+ 1
6825
+ 1
6826
+ 1
6827
+ 1
6828
+ 1
6829
+ 1
6830
+ 1
6831
+ 2
6832
+ 1
6833
+ 1
6834
+ 1
6835
+ 1
6836
+ 1
6837
+ 1
6838
+ 1
6839
+ 2
6840
+ 1
6841
+ 1
6842
+ 1
6843
+ 1
6844
+ 1
6845
+ 1
6846
+ 1
6847
+ 1
6848
+ 1
6849
+ 1
6850
+ 1
6851
+ 1
6852
+ 1
6853
+ 1
6854
+ 1
6855
+ 1
6856
+ 1
6857
+ 1
6858
+ 1
6859
+ 1
6860
+ 1
6861
+ 1
6862
+ 1
6863
+ 1
6864
+ 1
6865
+ 1
6866
+ 1
6867
+ 1
6868
+ 1
6869
+ 1
6870
+ 2
6871
+ 1
6872
+ 2
6873
+ 1
6874
+ 1
6875
+ 1
6876
+ 1
6877
+ 1
6878
+ 1
6879
+ 1
6880
+ 1
6881
+ 1
6882
+ 1
6883
+ 1
6884
+ 1
6885
+ 1
6886
+ 1
6887
+ 1
6888
+ 1
6889
+ 1
6890
+ 2
6891
+ 1
6892
+ 1
6893
+ 1
6894
+ 1
6895
+ 1
6896
+ 1
6897
+ 1
6898
+ 1
6899
+ 1
6900
+ 1
6901
+ 1
6902
+ 1
6903
+ 1
6904
+ 1
6905
+ 1
6906
+ 1
6907
+ 1
6908
+ 1
6909
+ 1
6910
+ 1
6911
+ 1
6912
+ 1
6913
+ 1
6914
+ 1
6915
+ 1
6916
+ 1
6917
+ 1
6918
+ 1
6919
+ 1
6920
+ 1
6921
+ 1
6922
+ 1
6923
+ 1
6924
+ 1
6925
+ 1
6926
+ 2
6927
+ 1
6928
+ 1
6929
+ 1
6930
+ 1
6931
+ 1
6932
+ 1
6933
+ 1
6934
+ 1
6935
+ 1
6936
+ 1
6937
+ 1
6938
+ 1
6939
+ 1
6940
+ 1
6941
+ 1
6942
+ 1
6943
+ 1
6944
+ 1
6945
+ 1
6946
+ 1
6947
+ 1
6948
+ 1
6949
+ 1
6950
+ 1
6951
+ 1
6952
+ 1
6953
+ 1
6954
+ 1
6955
+ 1
6956
+ 1
6957
+ 1
6958
+ 1
6959
+ 1
6960
+ 1
6961
+ 1
6962
+ 1
6963
+ 1
6964
+ 1
6965
+ 1
6966
+ 1
6967
+ 1
6968
+ 1
6969
+ 1
6970
+ 1
6971
+ 1
6972
+ 1
6973
+ 1
6974
+ 1
6975
+ 1
6976
+ 1
6977
+ 1
6978
+ 1
6979
+ 1
6980
+ 1
6981
+ 1
6982
+ 1
6983
+ 1
6984
+ 1
6985
+ 1
6986
+ 1
6987
+ 1
6988
+ 1
6989
+ 1
6990
+ 1
6991
+ 1
6992
+ 1
6993
+ 1
6994
+ 1
6995
+ 1
6996
+ 2
6997
+ 1
6998
+ 1
6999
+ 1
7000
+ 1
7001
+ 1
7002
+ 1
7003
+ 1
7004
+ 2
7005
+ 1
7006
+ 1
7007
+ 1
7008
+ 1
7009
+ 1
7010
+ 1
7011
+ 1
7012
+ 1
7013
+ 1
7014
+ 1
7015
+ 1
7016
+ 1
7017
+ 1
7018
+ 1
7019
+ 1
7020
+ 1
7021
+ 1
7022
+ 1
7023
+ 1
7024
+ 1
7025
+ 1
7026
+ 1
7027
+ 2
7028
+ 1
7029
+ 1
7030
+ 2
7031
+ 1
7032
+ 1
7033
+ 1
7034
+ 1
7035
+ 1
7036
+ 1
7037
+ 1
7038
+ 1
7039
+ 1
7040
+ 1
7041
+ 1
7042
+ 1
7043
+ 1
7044
+ 1
7045
+ 1
7046
+ 1
7047
+ 1
7048
+ 1
7049
+ 1
7050
+ 1
7051
+ 1
7052
+ 1
7053
+ 1
7054
+ 1
7055
+ 1
7056
+ 1
7057
+ 1
7058
+ 1
7059
+ 1
7060
+ 1
7061
+ 1
7062
+ 1
7063
+ 1
7064
+ 1
7065
+ 1
7066
+ 1
7067
+ 1
7068
+ 1
7069
+ 1
7070
+ 1
7071
+ 1
7072
+ 1
7073
+ 1
7074
+ 1
7075
+ 1
7076
+ 1
7077
+ 1
7078
+ 1
7079
+ 1
7080
+ 1
7081
+ 2
7082
+ 1
7083
+ 2
7084
+ 1
7085
+ 1
7086
+ 1
7087
+ 1
7088
+ 1
7089
+ 1
7090
+ 1
7091
+ 1
7092
+ 1
7093
+ 1
7094
+ 1
7095
+ 1
7096
+ 1
7097
+ 1
7098
+ 2
7099
+ 1
7100
+ 1
7101
+ 1
7102
+ 1
7103
+ 2
7104
+ 1
7105
+ 1
7106
+ 1
7107
+ 1
7108
+ 1
7109
+ 1
7110
+ 1
7111
+ 1
7112
+ 1
7113
+ 1
7114
+ 1
7115
+ 1
7116
+ 1
7117
+ 2
7118
+ 1
7119
+ 1
7120
+ 1
7121
+ 1
7122
+ 1
7123
+ 1
7124
+ 1
7125
+ 1
7126
+ 1
7127
+ 1
7128
+ 1
7129
+ 1
7130
+ 1
7131
+ 1
7132
+ 1
7133
+ 1
7134
+ 1
7135
+ 1
7136
+ 1
7137
+ 1
7138
+ 1
7139
+ 1
7140
+ 1
7141
+ 1
7142
+ 1
7143
+ 1
7144
+ 1
7145
+ 1
7146
+ 1
7147
+ 1
7148
+ 1
7149
+ 1
7150
+ 1
7151
+ 1
7152
+ 1
7153
+ 1
7154
+ 1
7155
+ 1
7156
+ 1
7157
+ 1
7158
+ 1
7159
+ 1
7160
+ 1
7161
+ 1
7162
+ 1
7163
+ 1
7164
+ 1
7165
+ 1
7166
+ 2
7167
+ 1
7168
+ 1
7169
+ 1
7170
+ 1
7171
+ 1
7172
+ 1
7173
+ 1
7174
+ 1
7175
+ 1
7176
+ 1
7177
+ 1
7178
+ 1
7179
+ 1
7180
+ 1
7181
+ 1
7182
+ 1
7183
+ 1
7184
+ 1
7185
+ 1
7186
+ 1
7187
+ 1
7188
+ 1
7189
+ 1
7190
+ 1
7191
+ 1
7192
+ 1
7193
+ 1
7194
+ 1
7195
+ 1
7196
+ 1
7197
+ 1
7198
+ 1
7199
+ 1
7200
+ 1
7201
+ 2
7202
+ 1
7203
+ 1
7204
+ 1
7205
+ 1
7206
+ 1
7207
+ 1
7208
+ 1
7209
+ 1
7210
+ 1
7211
+ 1
7212
+ 1
7213
+ 1
7214
+ 1
7215
+ 1
7216
+ 1
7217
+ 1
7218
+ 1
7219
+ 1
7220
+ 1
7221
+ 1
7222
+ 1
7223
+ 1
7224
+ 1
7225
+ 1
7226
+ 1
7227
+ 1
7228
+ 1
7229
+ 1
7230
+ 1
7231
+ 1
7232
+ 1
7233
+ 2
7234
+ 1
7235
+ 1
7236
+ 1
7237
+ 1
7238
+ 1
7239
+ 1
7240
+ 1
7241
+ 1
7242
+ 1
7243
+ 1
7244
+ 1
7245
+ 1
7246
+ 1
7247
+ 1
7248
+ 1
7249
+ 1
7250
+ 1
7251
+ 1
7252
+ 1
7253
+ 1
7254
+ 1
7255
+ 1
7256
+ 1
7257
+ 1
7258
+ 1
7259
+ 1
7260
+ 1
7261
+ 1
7262
+ 1
7263
+ 1
7264
+ 1
7265
+ 1
7266
+ 1
7267
+ 1
7268
+ 1
7269
+ 1
7270
+ 1
7271
+ 1
7272
+ 2
7273
+ 1
7274
+ 1
7275
+ 1
7276
+ 1
7277
+ 1
7278
+ 1
7279
+ 1
7280
+ 1
7281
+ 1
7282
+ 1
7283
+ 2
7284
+ 1
7285
+ 1
7286
+ 1
7287
+ 1
7288
+ 1
7289
+ 1
7290
+ 1
7291
+ 1
7292
+ 1
7293
+ 1
7294
+ 1
7295
+ 1
7296
+ 1
7297
+ 1
7298
+ 1
7299
+ 1
7300
+ 1
7301
+ 1
7302
+ 1
7303
+ 1
7304
+ 1
7305
+ 1
7306
+ 1
7307
+ 1
7308
+ 1
7309
+ 1
7310
+ 1
7311
+ 1
7312
+ 1
7313
+ 1
7314
+ 1
7315
+ 1
7316
+ 1
7317
+ 1
7318
+ 1
7319
+ 1
7320
+ 1
7321
+ 1
7322
+ 1
7323
+ 1
7324
+ 1
7325
+ 2
7326
+ 1
7327
+ 1
7328
+ 1
7329
+ 1
7330
+ 1
7331
+ 1
7332
+ 1
7333
+ 1
7334
+ 2
7335
+ 1
7336
+ 2
7337
+ 1
7338
+ 1
7339
+ 1
7340
+ 1
7341
+ 1
7342
+ 1
7343
+ 1
7344
+ 1
7345
+ 1
7346
+ 1
7347
+ 1
7348
+ 1
7349
+ 1
7350
+ 1
7351
+ 1
7352
+ 1
7353
+ 1
7354
+ 1
7355
+ 1
7356
+ 1
7357
+ 1
7358
+ 1
7359
+ 1
7360
+ 1
7361
+ 1
7362
+ 1
7363
+ 1
7364
+ 1
7365
+ 1
7366
+ 1
7367
+ 1
7368
+ 1
7369
+ 1
7370
+ 1
7371
+ 1
7372
+ 1
7373
+ 2
7374
+ 1
7375
+ 1
7376
+ 1
7377
+ 1
7378
+ 1
7379
+ 1
7380
+ 1
7381
+ 1
7382
+ 1
7383
+ 1
7384
+ 1
7385
+ 1
7386
+ 1
7387
+ 1
7388
+ 2
7389
+ 1
7390
+ 1
7391
+ 1
7392
+ 1
7393
+ 1
7394
+ 1
7395
+ 1
7396
+ 1
7397
+ 1
7398
+ 1
7399
+ 1
7400
+ 1
7401
+ 1
7402
+ 1
7403
+ 1
7404
+ 1
7405
+ 1
7406
+ 1
7407
+ 1
7408
+ 2
7409
+ 1
7410
+ 1
7411
+ 1
7412
+ 1
7413
+ 1
7414
+ 1
7415
+ 1
7416
+ 1
7417
+ 1
7418
+ 1
7419
+ 1
7420
+ 1
7421
+ 1
7422
+ 1
7423
+ 1
7424
+ 1
7425
+ 1
7426
+ 1
7427
+ 1
7428
+ 1
7429
+ 1
7430
+ 1
7431
+ 1
7432
+ 1
7433
+ 1
7434
+ 1
7435
+ 1
7436
+ 1
7437
+ 1
7438
+ 1
7439
+ 1
7440
+ 1
7441
+ 1
7442
+ 1
7443
+ 1
7444
+ 1
7445
+ 1
7446
+ 1
7447
+ 1
7448
+ 1
7449
+ 1
7450
+ 1
7451
+ 1
7452
+ 1
7453
+ 1
7454
+ 1
7455
+ 1
7456
+ 1
7457
+ 1
7458
+ 1
7459
+ 1
7460
+ 1
7461
+ 1
7462
+ 1
7463
+ 1
7464
+ 1
7465
+ 1
7466
+ 1
7467
+ 1
7468
+ 1
7469
+ 1
7470
+ 1
7471
+ 1
7472
+ 1
7473
+ 2
7474
+ 1
7475
+ 2
7476
+ 1
7477
+ 1
7478
+ 1
7479
+ 1
7480
+ 1
7481
+ 1
7482
+ 1
7483
+ 2
7484
+ 1
7485
+ 1
7486
+ 1
7487
+ 1
7488
+ 1
7489
+ 1
7490
+ 2
7491
+ 1
7492
+ 1
7493
+ 1
7494
+ 1
7495
+ 1
7496
+ 1
7497
+ 1
7498
+ 1
7499
+ 1
7500
+ 2
7501
+ 1
7502
+ 1
7503
+ 1
7504
+ 1
7505
+ 1
7506
+ 1
7507
+ 1
7508
+ 1
7509
+ 1
7510
+ 1
7511
+ 1
7512
+ 1
7513
+ 1
7514
+ 1
7515
+ 1
7516
+ 1
7517
+ 2
7518
+ 1
7519
+ 1
7520
+ 1
7521
+ 1
7522
+ 1
7523
+ 1
7524
+ 1
7525
+ 1
7526
+ 1
7527
+ 1
7528
+ 1
7529
+ 1
7530
+ 1
7531
+ 1
7532
+ 1
7533
+ 1
7534
+ 2
7535
+ 1
7536
+ 1
7537
+ 2
7538
+ 1
7539
+ 1
7540
+ 1
7541
+ 1
7542
+ 1
7543
+ 1
7544
+ 1
7545
+ 1
7546
+ 1
7547
+ 1
7548
+ 1
7549
+ 1
7550
+ 1
7551
+ 1
7552
+ 1
7553
+ 1
7554
+ 1
7555
+ 1
7556
+ 1
7557
+ 1
7558
+ 1
7559
+ 1
7560
+ 1
7561
+ 1
7562
+ 1
7563
+ 1
7564
+ 1
7565
+ 1
7566
+ 1
7567
+ 2
7568
+ 1
7569
+ 1
7570
+ 1
7571
+ 1
7572
+ 1
7573
+ 1
7574
+ 1
7575
+ 1
7576
+ 1
7577
+ 1
7578
+ 1
7579
+ 1
7580
+ 1
7581
+ 1
7582
+ 1
7583
+ 1
7584
+ 1
7585
+ 1
7586
+ 1
7587
+ 1
7588
+ 1
7589
+ 1
7590
+ 1
7591
+ 1
7592
+ 1
7593
+ 1
7594
+ 1
7595
+ 1
7596
+ 1
7597
+ 1
7598
+ 1
7599
+ 1
7600
+ 1
7601
+ 1
7602
+ 1
7603
+ 1
7604
+ 1
7605
+ 1
7606
+ 1
7607
+ 1
7608
+ 1
7609
+ 1
7610
+ 1
7611
+ 1
7612
+ 1
7613
+ 1
7614
+ 1
7615
+ 1
7616
+ 1
7617
+ 1
7618
+ 1
7619
+ 1
7620
+ 1
7621
+ 2
7622
+ 1
7623
+ 1
7624
+ 1
7625
+ 1
7626
+ 1
7627
+ 1
7628
+ 1
7629
+ 1
7630
+ 1
7631
+ 1
7632
+ 1
7633
+ 1
7634
+ 1
7635
+ 1
7636
+ 1
7637
+ 1
7638
+ 1
7639
+ 1
7640
+ 1
7641
+ 1
7642
+ 1
7643
+ 1
7644
+ 1
7645
+ 1
7646
+ 1
7647
+ 1
7648
+ 1
7649
+ 1
7650
+ 1
7651
+ 1
7652
+ 1
7653
+ 1
7654
+ 1
7655
+ 2
7656
+ 1
7657
+ 1
7658
+ 1
7659
+ 1
7660
+ 1
7661
+ 1
7662
+ 1
7663
+ 1
7664
+ 1
7665
+ 1
7666
+ 1
7667
+ 1
7668
+ 1
7669
+ 1
7670
+ 1
7671
+ 1
7672
+ 1
7673
+ 1
7674
+ 1
7675
+ 1
7676
+ 1
7677
+ 1
7678
+ 1
7679
+ 1
7680
+ 1
7681
+ 1
7682
+ 1
7683
+ 1
7684
+ 1
7685
+ 1
7686
+ 1
7687
+ 1
7688
+ 1
7689
+ 1
7690
+ 1
7691
+ 1
7692
+ 2
7693
+ 1
7694
+ 1
7695
+ 1
7696
+ 1
7697
+ 1
7698
+ 1
7699
+ 1
7700
+ 1
7701
+ 1
7702
+ 1
7703
+ 1
7704
+ 1
7705
+ 2
7706
+ 1
7707
+ 1
7708
+ 1
7709
+ 1
7710
+ 1
7711
+ 1
7712
+ 1
7713
+ 1
7714
+ 1
7715
+ 1
7716
+ 1
7717
+ 1
7718
+ 1
7719
+ 1
7720
+ 1
7721
+ 1
7722
+ 1
7723
+ 2
7724
+ 1
7725
+ 1
7726
+ 1
7727
+ 1
7728
+ 1
7729
+ 1
7730
+ 1
7731
+ 1
7732
+ 1
7733
+ 1
7734
+ 1
7735
+ 1
7736
+ 1
7737
+ 1
7738
+ 1
7739
+ 1
7740
+ 1
7741
+ 1
7742
+ 1
7743
+ 1
7744
+ 1
7745
+ 1
7746
+ 1
7747
+ 2
7748
+ 1
7749
+ 1
7750
+ 1
7751
+ 2
7752
+ 1
7753
+ 1
7754
+ 1
7755
+ 1
7756
+ 1
7757
+ 1
7758
+ 1
7759
+ 1
7760
+ 1
7761
+ 1
7762
+ 1
7763
+ 1
7764
+ 1
7765
+ 1
7766
+ 1
7767
+ 1
7768
+ 1
7769
+ 1
7770
+ 1
7771
+ 1
7772
+ 1
7773
+ 1
7774
+ 2
7775
+ 1
7776
+ 1
7777
+ 1
7778
+ 1
7779
+ 1
7780
+ 1
7781
+ 1
7782
+ 1
7783
+ 1
7784
+ 1
7785
+ 1
7786
+ 1
7787
+ 1
7788
+ 1
7789
+ 1
7790
+ 1
7791
+ 1
7792
+ 1
7793
+ 1
7794
+ 1
7795
+ 1
7796
+ 1
7797
+ 1
7798
+ 1
7799
+ 1
7800
+ 1
7801
+ 1
7802
+ 1
7803
+ 1
7804
+ 1
7805
+ 1
7806
+ 1
7807
+ 2
7808
+ 1
7809
+ 1
7810
+ 1
7811
+ 1
7812
+ 1
7813
+ 1
7814
+ 1
7815
+ 1
7816
+ 1
7817
+ 1
7818
+ 1
7819
+ 1
7820
+ 1
7821
+ 1
7822
+ 2
7823
+ 1
7824
+ 1
7825
+ 1
7826
+ 1
7827
+ 1
7828
+ 2
7829
+ 1
7830
+ 1
7831
+ 1
7832
+ 1
7833
+ 1
7834
+ 1
7835
+ 1
7836
+ 1
7837
+ 1
7838
+ 1
7839
+ 1
7840
+ 1
7841
+ 1
7842
+ 1
7843
+ 1
7844
+ 1
7845
+ 1
7846
+ 1
7847
+ 1
7848
+ 1
7849
+ 1
7850
+ 1
7851
+ 1
7852
+ 2
7853
+ 1
7854
+ 1
7855
+ 1
7856
+ 1
7857
+ 1
7858
+ 1
7859
+ 1
7860
+ 1
7861
+ 1
7862
+ 1
7863
+ 1
7864
+ 1
7865
+ 1
7866
+ 1
7867
+ 1
7868
+ 1
7869
+ 1
7870
+ 1
7871
+ 1
7872
+ 1
7873
+ 1
7874
+ 2
7875
+ 1
7876
+ 1
7877
+ 1
7878
+ 1
7879
+ 1
7880
+ 1
7881
+ 2
7882
+ 1
7883
+ 1
7884
+ 1
7885
+ 2
7886
+ 1
7887
+ 1
7888
+ 1
7889
+ 1
7890
+ 1
7891
+ 1
7892
+ 1
7893
+ 1
7894
+ 1
7895
+ 1
7896
+ 1
7897
+ 1
7898
+ 1
7899
+ 1
7900
+ 1
7901
+ 1
7902
+ 1
7903
+ 1
7904
+ 1
7905
+ 2
7906
+ 1
7907
+ 1
7908
+ 1
7909
+ 1
7910
+ 1
7911
+ 1
7912
+ 1
7913
+ 2
7914
+ 1
7915
+ 1
7916
+ 1
7917
+ 1
7918
+ 1
7919
+ 1
7920
+ 1
7921
+ 1
7922
+ 1
7923
+ 1
7924
+ 1
7925
+ 1
7926
+ 1
7927
+ 1
7928
+ 1
7929
+ 1
7930
+ 1
7931
+ 1
7932
+ 1
7933
+ 1
7934
+ 1
7935
+ 1
7936
+ 1
7937
+ 1
7938
+ 1
7939
+ 1
7940
+ 1
7941
+ 1
7942
+ 1
7943
+ 1
7944
+ 1
7945
+ 1
7946
+ 1
7947
+ 1
7948
+ 1
7949
+ 2
7950
+ 1
7951
+ 1
7952
+ 1
7953
+ 1
7954
+ 1
7955
+ 1
7956
+ 1
7957
+ 1
7958
+ 1
7959
+ 1
7960
+ 1
7961
+ 1
7962
+ 1
7963
+ 1
7964
+ 1
7965
+ 2
7966
+ 2
7967
+ 1
7968
+ 1
7969
+ 1
7970
+ 1
7971
+ 1
7972
+ 1
7973
+ 1
7974
+ 1
7975
+ 1
7976
+ 1
7977
+ 1
7978
+ 1
7979
+ 1
7980
+ 1
7981
+ 1
7982
+ 1
7983
+ 1
7984
+ 1
7985
+ 2
7986
+ 1
7987
+ 1
7988
+ 1
7989
+ 1
7990
+ 2
7991
+ 1
7992
+ 1
7993
+ 2
7994
+ 1
7995
+ 1
7996
+ 1
7997
+ 1
7998
+ 1
7999
+ 1
8000
+ 1
8001
+ 1
8002
+ 1
8003
+ 1
8004
+ 1
8005
+ 1
8006
+ 1
8007
+ 1
8008
+ 1
8009
+ 1
8010
+ 1
8011
+ 1
8012
+ 1
8013
+ 1
8014
+ 1
8015
+ 1
8016
+ 1
8017
+ 1
8018
+ 1
8019
+ 1
8020
+ 1
8021
+ 1
8022
+ 1
8023
+ 1
8024
+ 1
8025
+ 1
8026
+ 1
8027
+ 1
8028
+ 1
8029
+ 1
8030
+ 1
8031
+ 1
8032
+ 1
8033
+ 1
8034
+ 1
8035
+ 1
8036
+ 2
8037
+ 1
8038
+ 1
8039
+ 1
8040
+ 1
8041
+ 1
8042
+ 1
8043
+ 1
8044
+ 1
8045
+ 1
8046
+ 1
8047
+ 1
8048
+ 1
8049
+ 1
8050
+ 1
8051
+ 2
8052
+ 1
8053
+ 1
8054
+ 1
8055
+ 1
8056
+ 1
8057
+ 1
8058
+ 1
8059
+ 1
8060
+ 1
8061
+ 1
8062
+ 1
8063
+ 1
8064
+ 1
8065
+ 1
8066
+ 1
8067
+ 1
8068
+ 1
8069
+ 1
8070
+ 1
8071
+ 1
8072
+ 1
8073
+ 1
8074
+ 1
8075
+ 2
8076
+ 1
8077
+ 1
8078
+ 1
8079
+ 1
8080
+ 1
8081
+ 1
8082
+ 1
8083
+ 1
8084
+ 1
8085
+ 1
8086
+ 1
8087
+ 1
8088
+ 1
8089
+ 1
8090
+ 1
8091
+ 1
8092
+ 2
8093
+ 1
8094
+ 1
8095
+ 2
8096
+ 1
8097
+ 1
8098
+ 1
8099
+ 1
8100
+ 1
8101
+ 1
8102
+ 1
8103
+ 1
8104
+ 1
8105
+ 1
8106
+ 1
8107
+ 1
8108
+ 1
8109
+ 1
8110
+ 1
8111
+ 1
8112
+ 1
8113
+ 1
8114
+ 2
8115
+ 1
8116
+ 1
8117
+ 2
8118
+ 1
8119
+ 1
8120
+ 1
8121
+ 1
8122
+ 1
8123
+ 1
8124
+ 1
8125
+ 1
8126
+ 1
8127
+ 1
8128
+ 1
8129
+ 1
8130
+ 1
8131
+ 1
8132
+ 1
8133
+ 1
8134
+ 1
8135
+ 1
8136
+ 1
8137
+ 1
8138
+ 1
8139
+ 1
8140
+ 1
8141
+ 1
8142
+ 1
8143
+ 1
8144
+ 1
8145
+ 1
8146
+ 1
8147
+ 1
8148
+ 1
8149
+ 1
8150
+ 1
8151
+ 1
8152
+ 2
8153
+ 1
8154
+ 1
8155
+ 1
8156
+ 1
8157
+ 1
8158
+ 1
8159
+ 1
8160
+ 2
8161
+ 1
8162
+ 1
8163
+ 1
8164
+ 1
8165
+ 1
8166
+ 1
8167
+ 1
8168
+ 1
8169
+ 1
8170
+ 1
8171
+ 1
8172
+ 2
8173
+ 1
8174
+ 1
8175
+ 1
8176
+ 1
8177
+ 1
8178
+ 1
8179
+ 1
8180
+ 2
8181
+ 1
8182
+ 1
8183
+ 1
8184
+ 1
8185
+ 1
8186
+ 1
8187
+ 1
8188
+ 1
8189
+ 1
8190
+ 1
8191
+ 1
8192
+ 1
8193
+ 1
8194
+ 1
8195
+ 2
8196
+ 2
8197
+ 1
8198
+ 1
8199
+ 1
8200
+ 1
8201
+ 1
8202
+ 1
8203
+ 1
8204
+ 1
8205
+ 1
8206
+ 1
8207
+ 1
8208
+ 1
8209
+ 1
8210
+ 1
8211
+ 1
8212
+ 1
8213
+ 1
8214
+ 1
8215
+ 1
8216
+ 1
8217
+ 1
8218
+ 1
8219
+ 1
8220
+ 1
8221
+ 1
8222
+ 1
8223
+ 1
8224
+ 1
8225
+ 1
8226
+ 1
8227
+ 1
8228
+ 1
8229
+ 1
8230
+ 1
8231
+ 1
8232
+ 1
8233
+ 1
8234
+ 1
8235
+ 1
8236
+ 1
8237
+ 1
8238
+ 1
8239
+ 1
8240
+ 2
8241
+ 1
8242
+ 1
8243
+ 1
8244
+ 1
8245
+ 1
8246
+ 1
8247
+ 1
8248
+ 1
8249
+ 1
8250
+ 1
8251
+ 1
8252
+ 1
8253
+ 1
8254
+ 1
8255
+ 1
8256
+ 1
8257
+ 1
8258
+ 1
8259
+ 1
8260
+ 1
8261
+ 1
8262
+ 1
8263
+ 1
8264
+ 2
8265
+ 1
8266
+ 1
8267
+ 1
8268
+ 1
8269
+ 1
8270
+ 1
8271
+ 1
8272
+ 1
8273
+ 1
8274
+ 1
8275
+ 1
8276
+ 1
8277
+ 1
8278
+ 1
8279
+ 1
8280
+ 1
8281
+ 1
8282
+ 1
8283
+ 1
8284
+ 1
8285
+ 1
8286
+ 1
8287
+ 1
8288
+ 1
8289
+ 1
8290
+ 1
8291
+ 2
8292
+ 1
8293
+ 1
8294
+ 1
8295
+ 1
8296
+ 2
8297
+ 1
8298
+ 1
8299
+ 1
8300
+ 1
8301
+ 1
8302
+ 1
8303
+ 1
8304
+ 1
8305
+ 1
8306
+ 1
8307
+ 1
8308
+ 1
8309
+ 1
8310
+ 1
8311
+ 1
8312
+ 1
8313
+ 2
8314
+ 1
8315
+ 1
8316
+ 1
8317
+ 1
8318
+ 1
8319
+ 1
8320
+ 1
8321
+ 1
8322
+ 1
8323
+ 1
8324
+ 1
8325
+ 1
8326
+ 1
8327
+ 1
8328
+ 1
8329
+ 1
8330
+ 1
8331
+ 1
8332
+ 1
8333
+ 1
8334
+ 1
8335
+ 1
8336
+ 1
8337
+ 1
8338
+ 1
8339
+ 1
8340
+ 1
8341
+ 1
8342
+ 1
8343
+ 1
8344
+ 1
8345
+ 1
8346
+ 1
8347
+ 1
8348
+ 1
8349
+ 1
8350
+ 1
8351
+ 1
8352
+ 1
8353
+ 1
8354
+ 1
8355
+ 1
8356
+ 1
8357
+ 1
8358
+ 1
8359
+ 1
8360
+ 1
8361
+ 1
8362
+ 1
8363
+ 1
8364
+ 1
8365
+ 1
8366
+ 1
8367
+ 1
8368
+ 2
8369
+ 1
8370
+ 1
8371
+ 1
8372
+ 1
8373
+ 1
8374
+ 1
8375
+ 2
8376
+ 1
8377
+ 1
8378
+ 1
8379
+ 1
8380
+ 1
8381
+ 1
8382
+ 1
8383
+ 1
8384
+ 1
8385
+ 1
8386
+ 1
8387
+ 1
8388
+ 2
8389
+ 1
8390
+ 1
8391
+ 1
8392
+ 1
8393
+ 1
8394
+ 1
8395
+ 1
8396
+ 1
8397
+ 1
8398
+ 1
8399
+ 1
8400
+ 1
8401
+ 1
8402
+ 1
8403
+ 1
8404
+ 1
8405
+ 1
8406
+ 1
8407
+ 1
8408
+ 2
8409
+ 1
8410
+ 1
8411
+ 1
8412
+ 1
8413
+ 1
8414
+ 1
8415
+ 1
8416
+ 1
8417
+ 1
8418
+ 1
8419
+ 1
8420
+ 1
8421
+ 1
8422
+ 1
8423
+ 1
8424
+ 1
8425
+ 1
8426
+ 1
8427
+ 1
8428
+ 1
8429
+ 1
8430
+ 1
8431
+ 1
8432
+ 1
8433
+ 1
8434
+ 1
8435
+ 1
8436
+ 1
8437
+ 1
8438
+ 2
8439
+ 1
8440
+ 2
8441
+ 1
8442
+ 1
8443
+ 1
8444
+ 1
8445
+ 1
8446
+ 1
8447
+ 1
8448
+ 1
8449
+ 1
8450
+ 1
8451
+ 1
8452
+ 1
8453
+ 1
8454
+ 1
8455
+ 1
8456
+ 1
8457
+ 1
8458
+ 1
8459
+ 1
8460
+ 1
8461
+ 1
8462
+ 1
8463
+ 1
8464
+ 1
8465
+ 1
8466
+ 1
8467
+ 1
8468
+ 1
8469
+ 1
8470
+ 1
8471
+ 1
8472
+ 1
8473
+ 1
8474
+ 1
8475
+ 1
8476
+ 1
8477
+ 1
8478
+ 1
8479
+ 1
8480
+ 1
8481
+ 1
8482
+ 1
8483
+ 1
8484
+ 1
8485
+ 1
8486
+ 1
8487
+ 1
8488
+ 1
8489
+ 1
8490
+ 1
8491
+ 1
8492
+ 1
8493
+ 1
8494
+ 1
8495
+ 1
8496
+ 1
8497
+ 1
8498
+ 1
8499
+ 1
8500
+ 1
8501
+ 1
8502
+ 1
8503
+ 1
8504
+ 1
8505
+ 1
8506
+ 1
8507
+ 1
8508
+ 1
8509
+ 1
8510
+ 1
8511
+ 1
8512
+ 1
8513
+ 1
8514
+ 1
8515
+ 1
8516
+ 1
8517
+ 1
8518
+ 1
8519
+ 2
8520
+ 1
8521
+ 1
8522
+ 1
8523
+ 1
8524
+ 1
8525
+ 1
8526
+ 1
8527
+ 1
8528
+ 1
8529
+ 1
8530
+ 1
8531
+ 1
8532
+ 1
8533
+ 1
8534
+ 1
8535
+ 1
8536
+ 1
8537
+ 1
8538
+ 1
8539
+ 1
8540
+ 1
8541
+ 1
8542
+ 1
8543
+ 1
8544
+ 1
8545
+ 1
8546
+ 1
8547
+ 1
8548
+ 1
8549
+ 1
8550
+ 1
8551
+ 1
8552
+ 1
8553
+ 1
8554
+ 1
8555
+ 1
8556
+ 1
8557
+ 2
8558
+ 1
8559
+ 1
8560
+ 1
8561
+ 1
8562
+ 1
8563
+ 1
8564
+ 1
8565
+ 1
8566
+ 1
8567
+ 1
8568
+ 1
8569
+ 1
8570
+ 1
8571
+ 1
8572
+ 1
8573
+ 1
8574
+ 1
8575
+ 1
8576
+ 1
8577
+ 1
8578
+ 1
8579
+ 1
8580
+ 1
8581
+ 1
8582
+ 1
8583
+ 1
8584
+ 1
8585
+ 1
8586
+ 1
8587
+ 1
8588
+ 1
8589
+ 2
8590
+ 1
8591
+ 1
8592
+ 1
8593
+ 1
8594
+ 1
8595
+ 1
8596
+ 1
8597
+ 1
8598
+ 2
8599
+ 1
8600
+ 1
8601
+ 1
8602
+ 2
8603
+ 1
8604
+ 1
8605
+ 1
8606
+ 1
8607
+ 1
8608
+ 1
8609
+ 1
8610
+ 1
8611
+ 1
8612
+ 1
8613
+ 1
8614
+ 1
8615
+ 1
8616
+ 1
8617
+ 1
8618
+ 1
8619
+ 1
8620
+ 1
8621
+ 1
8622
+ 1
8623
+ 1
8624
+ 1
8625
+ 1
8626
+ 1
8627
+ 1
8628
+ 1
8629
+ 1
8630
+ 1
8631
+ 1
8632
+ 1
8633
+ 1
8634
+ 1
8635
+ 1
8636
+ 1
8637
+ 1
8638
+ 1
8639
+ 1
8640
+ 1
8641
+ 1
8642
+ 1
8643
+ 1
8644
+ 1
8645
+ 1
8646
+ 1
8647
+ 1
8648
+ 1
8649
+ 1
8650
+ 1
8651
+ 1
8652
+ 2
8653
+ 1
8654
+ 1
8655
+ 1
8656
+ 1
8657
+ 1
8658
+ 2
8659
+ 1
8660
+ 1
8661
+ 1
8662
+ 1
8663
+ 1
8664
+ 1
8665
+ 1
8666
+ 1
8667
+ 1
8668
+ 1
8669
+ 1
8670
+ 1
8671
+ 1
8672
+ 1
8673
+ 1
8674
+ 1
8675
+ 1
8676
+ 1
8677
+ 1
8678
+ 1
8679
+ 1
8680
+ 1
8681
+ 1
8682
+ 1
8683
+ 1
8684
+ 1
8685
+ 1
8686
+ 1
8687
+ 1
8688
+ 1
8689
+ 1
8690
+ 1
8691
+ 1
8692
+ 1
8693
+ 1
8694
+ 1
8695
+ 1
8696
+ 1
8697
+ 1
8698
+ 1
8699
+ 1
8700
+ 1
8701
+ 1
8702
+ 1
8703
+ 1
8704
+ 1
8705
+ 1
8706
+ 1
8707
+ 1
8708
+ 1
8709
+ 1
8710
+ 1
8711
+ 1
8712
+ 1
8713
+ 1
8714
+ 1
8715
+ 1
8716
+ 1
8717
+ 1
8718
+ 1
8719
+ 1
8720
+ 1
8721
+ 1
8722
+ 1
8723
+ 1
8724
+ 2
8725
+ 1
8726
+ 1
8727
+ 1
8728
+ 1
8729
+ 1
8730
+ 1
8731
+ 1
8732
+ 1
8733
+ 1
8734
+ 1
8735
+ 1
8736
+ 1
8737
+ 1
8738
+ 1
8739
+ 1
8740
+ 1
8741
+ 1
8742
+ 1
8743
+ 1
8744
+ 1
8745
+ 1
8746
+ 1
8747
+ 1
8748
+ 1
8749
+ 1
8750
+ 1
8751
+ 1
8752
+ 1
8753
+ 1
8754
+ 1
8755
+ 1
8756
+ 1
8757
+ 1
8758
+ 1
8759
+ 1
8760
+ 2
8761
+ 1
8762
+ 1
8763
+ 1
8764
+ 2
8765
+ 1
8766
+ 1
8767
+ 1
8768
+ 1
8769
+ 1
8770
+ 1
8771
+ 1
8772
+ 1
8773
+ 1
8774
+ 1
8775
+ 1
8776
+ 1
8777
+ 1
8778
+ 1
8779
+ 1
8780
+ 1
8781
+ 1
8782
+ 1
8783
+ 1
8784
+ 1
8785
+ 1
8786
+ 2
8787
+ 1
8788
+ 1
8789
+ 1
8790
+ 1
8791
+ 1
8792
+ 1
8793
+ 1
8794
+ 1
8795
+ 1
8796
+ 1
8797
+ 1
8798
+ 1
8799
+ 1
8800
+ 1
8801
+ 1
8802
+ 1
8803
+ 2
8804
+ 1
8805
+ 1
8806
+ 1
8807
+ 1
8808
+ 1
8809
+ 1
8810
+ 1
8811
+ 1
8812
+ 1
8813
+ 1
8814
+ 2
8815
+ 1
8816
+ 1
8817
+ 1
8818
+ 1
8819
+ 1
8820
+ 1
8821
+ 1
8822
+ 1
8823
+ 1
8824
+ 1
8825
+ 1
8826
+ 1
8827
+ 2
8828
+ 1
8829
+ 1
8830
+ 1
8831
+ 1
8832
+ 1
8833
+ 1
8834
+ 1
8835
+ 1
8836
+ 1
8837
+ 1
8838
+ 1
8839
+ 1
8840
+ 1
8841
+ 1
8842
+ 1
8843
+ 1
8844
+ 1
8845
+ 1
8846
+ 1
8847
+ 1
8848
+ 1
8849
+ 1
8850
+ 1
8851
+ 1
8852
+ 1
8853
+ 1
8854
+ 1
8855
+ 2
8856
+ 1
8857
+ 2
8858
+ 1
8859
+ 1
8860
+ 1
8861
+ 1
8862
+ 1
8863
+ 1
8864
+ 1
8865
+ 1
8866
+ 1
8867
+ 2
8868
+ 1
8869
+ 1
8870
+ 1
8871
+ 1
8872
+ 1
8873
+ 1
8874
+ 1
8875
+ 1
8876
+ 1
8877
+ 1
8878
+ 1
8879
+ 1
8880
+ 1
8881
+ 1
8882
+ 1
8883
+ 1
8884
+ 1
8885
+ 1
8886
+ 1
8887
+ 1
8888
+ 1
8889
+ 1
8890
+ 1
8891
+ 1
8892
+ 1
8893
+ 1
8894
+ 1
8895
+ 1
8896
+ 1
8897
+ 1
8898
+ 1
8899
+ 1
8900
+ 1
8901
+ 1
8902
+ 1
8903
+ 1
8904
+ 1
8905
+ 1
8906
+ 1
8907
+ 1
8908
+ 1
8909
+ 1
8910
+ 1
8911
+ 1
8912
+ 1
8913
+ 1
8914
+ 1
8915
+ 1
8916
+ 1
8917
+ 1
8918
+ 1
8919
+ 2
8920
+ 1
8921
+ 1
8922
+ 1
8923
+ 2
8924
+ 2
8925
+ 1
8926
+ 1
8927
+ 1
8928
+ 1
8929
+ 1
8930
+ 1
8931
+ 1
8932
+ 1
8933
+ 2
8934
+ 1
8935
+ 1
8936
+ 1
8937
+ 1
8938
+ 1
8939
+ 1
8940
+ 1
8941
+ 1
8942
+ 1
8943
+ 1
8944
+ 1
8945
+ 1
8946
+ 1
8947
+ 1
8948
+ 1
8949
+ 1
8950
+ 1
8951
+ 1
8952
+ 1
8953
+ 1
8954
+ 1
8955
+ 1
8956
+ 1
8957
+ 1
8958
+ 1
8959
+ 2
8960
+ 1
8961
+ 1
8962
+ 1
8963
+ 1
8964
+ 1
8965
+ 1
8966
+ 1
8967
+ 1
8968
+ 2
8969
+ 1
8970
+ 1
8971
+ 1
8972
+ 1
8973
+ 1
8974
+ 1
8975
+ 1
8976
+ 1
8977
+ 1
8978
+ 1
8979
+ 1
8980
+ 1
8981
+ 1
8982
+ 1
8983
+ 1
8984
+ 1
8985
+ 1
8986
+ 1
8987
+ 1
8988
+ 1
8989
+ 1
8990
+ 1
8991
+ 1
8992
+ 1
8993
+ 1
8994
+ 1
8995
+ 1
8996
+ 1
8997
+ 1
8998
+ 1
8999
+ 1
9000
+ 1
9001
+ 1
9002
+ 1
9003
+ 1
9004
+ 2
9005
+ 1
9006
+ 1
9007
+ 1
9008
+ 1
9009
+ 1
9010
+ 1
9011
+ 1
9012
+ 1
9013
+ 1
9014
+ 1
9015
+ 1
9016
+ 1
9017
+ 1
9018
+ 1
9019
+ 2
9020
+ 1
9021
+ 1
9022
+ 1
9023
+ 1
9024
+ 1
9025
+ 1
9026
+ 1
9027
+ 1
9028
+ 1
9029
+ 1
9030
+ 1
9031
+ 1
9032
+ 1
9033
+ 1
9034
+ 1
9035
+ 1
9036
+ 1
9037
+ 1
9038
+ 1
9039
+ 1
9040
+ 1
9041
+ 1
9042
+ 1
9043
+ 1
9044
+ 1
9045
+ 1
9046
+ 1
9047
+ 1
9048
+ 1
9049
+ 1
9050
+ 1
9051
+ 1
9052
+ 1
9053
+ 1
9054
+ 1
9055
+ 1
9056
+ 1
9057
+ 1
9058
+ 1
9059
+ 1
9060
+ 1
9061
+ 1
9062
+ 1
9063
+ 1
9064
+ 1
9065
+ 1
9066
+ 1
9067
+ 1
9068
+ 1
9069
+ 1
9070
+ 1
9071
+ 1
9072
+ 1
9073
+ 1
9074
+ 1
9075
+ 1
9076
+ 1
9077
+ 1
9078
+ 1
9079
+ 2
9080
+ 1
9081
+ 1
9082
+ 1
9083
+ 1
9084
+ 1
9085
+ 1
9086
+ 1
9087
+ 1
9088
+ 1
9089
+ 1
9090
+ 1
9091
+ 1
9092
+ 1
9093
+ 1
9094
+ 1
9095
+ 1
9096
+ 2
9097
+ 1
9098
+ 1
9099
+ 1
9100
+ 1
9101
+ 1
9102
+ 1
9103
+ 1
9104
+ 1
9105
+ 2
9106
+ 1
9107
+ 1
9108
+ 1
9109
+ 1
9110
+ 1
9111
+ 1
9112
+ 1
9113
+ 2
9114
+ 1
9115
+ 1
9116
+ 1
9117
+ 1
9118
+ 1
9119
+ 1
9120
+ 1
9121
+ 1
9122
+ 1
9123
+ 1
9124
+ 1
9125
+ 1
9126
+ 1
9127
+ 1
9128
+ 1
9129
+ 1
9130
+ 2
9131
+ 1
9132
+ 1
9133
+ 1
9134
+ 1
9135
+ 1
9136
+ 1
9137
+ 1
9138
+ 1
9139
+ 1
9140
+ 1
9141
+ 1
9142
+ 1
9143
+ 1
9144
+ 1
9145
+ 1
9146
+ 1
9147
+ 1
9148
+ 2
9149
+ 1
9150
+ 1
9151
+ 1
9152
+ 1
9153
+ 1
9154
+ 1
9155
+ 1
9156
+ 1
9157
+ 1
9158
+ 1
9159
+ 1
9160
+ 1
9161
+ 1
9162
+ 1
9163
+ 1
9164
+ 1
9165
+ 1
9166
+ 1
9167
+ 1
9168
+ 1
9169
+ 1
9170
+ 1
9171
+ 2
9172
+ 2
9173
+ 1
9174
+ 1
9175
+ 1
9176
+ 1
9177
+ 1
9178
+ 1
9179
+ 1
9180
+ 1
9181
+ 1
9182
+ 1
9183
+ 1
9184
+ 1
9185
+ 1
9186
+ 1
9187
+ 1
9188
+ 1
9189
+ 1
9190
+ 1
9191
+ 1
9192
+ 1
9193
+ 1
9194
+ 1
9195
+ 1
9196
+ 1
9197
+ 1
9198
+ 2
9199
+ 1
9200
+ 1
9201
+ 2
9202
+ 1
9203
+ 1
9204
+ 1
9205
+ 1
9206
+ 1
9207
+ 1
9208
+ 1
9209
+ 1
9210
+ 1
9211
+ 1
9212
+ 1
9213
+ 1
9214
+ 1
9215
+ 1
9216
+ 1
9217
+ 1
9218
+ 1
9219
+ 1
9220
+ 1
9221
+ 1
9222
+ 1
9223
+ 1
9224
+ 1
9225
+ 1
9226
+ 1
9227
+ 1
9228
+ 1
9229
+ 1
9230
+ 1
9231
+ 1
9232
+ 1
9233
+ 1
9234
+ 1
9235
+ 1
9236
+ 1
9237
+ 1
9238
+ 1
9239
+ 1
9240
+ 1
9241
+ 1
9242
+ 1
9243
+ 1
9244
+ 1
9245
+ 1
9246
+ 1
9247
+ 1
9248
+ 1
9249
+ 1
9250
+ 2
9251
+ 1
9252
+ 1
9253
+ 1
9254
+ 2
9255
+ 1
9256
+ 1
9257
+ 1
9258
+ 1
9259
+ 1
9260
+ 1
9261
+ 1
9262
+ 1
9263
+ 1
9264
+ 1
9265
+ 1
9266
+ 1
9267
+ 1
9268
+ 1
9269
+ 1
9270
+ 1
9271
+ 2
9272
+ 1
9273
+ 1
9274
+ 1
9275
+ 1
9276
+ 1
9277
+ 1
9278
+ 1
9279
+ 1
9280
+ 1
9281
+ 1
9282
+ 1
9283
+ 2
9284
+ 1
9285
+ 1
9286
+ 1
9287
+ 1
9288
+ 1
9289
+ 2
9290
+ 1
9291
+ 1
9292
+ 1
9293
+ 1
9294
+ 1
9295
+ 1
9296
+ 2
9297
+ 1
9298
+ 1
9299
+ 1
9300
+ 1
9301
+ 1
9302
+ 1
9303
+ 1
9304
+ 1
9305
+ 1
9306
+ 1
9307
+ 1
9308
+ 1
9309
+ 1
9310
+ 1
9311
+ 1
9312
+ 1
9313
+ 1
9314
+ 1
9315
+ 1
9316
+ 1
9317
+ 1
9318
+ 1
9319
+ 1
9320
+ 1
9321
+ 1
9322
+ 2
9323
+ 1
9324
+ 2
9325
+ 2
9326
+ 1
9327
+ 1
9328
+ 1
9329
+ 1
9330
+ 1
9331
+ 1
9332
+ 1
9333
+ 1
9334
+ 1
9335
+ 1
9336
+ 1
9337
+ 1
9338
+ 1
9339
+ 1
9340
+ 1
9341
+ 1
9342
+ 1
9343
+ 1
9344
+ 1
9345
+ 1
9346
+ 1
9347
+ 1
9348
+ 2
9349
+ 1
9350
+ 1
9351
+ 1
9352
+ 1
9353
+ 1
9354
+ 1
9355
+ 1
9356
+ 1
9357
+ 1
9358
+ 1
9359
+ 1
9360
+ 1
9361
+ 1
9362
+ 1
9363
+ 1
9364
+ 1
9365
+ 1
9366
+ 1
9367
+ 1
9368
+ 1
9369
+ 1
9370
+ 1
9371
+ 1
9372
+ 1
9373
+ 1
9374
+ 1
9375
+ 1
9376
+ 1
9377
+ 1
9378
+ 1
9379
+ 1
9380
+ 1
9381
+ 1
9382
+ 1
9383
+ 1
9384
+ 1
9385
+ 1
9386
+ 1
9387
+ 1
9388
+ 1
9389
+ 1
9390
+ 1
9391
+ 1
9392
+ 1
9393
+ 1
9394
+ 1
9395
+ 1
9396
+ 1
9397
+ 1
9398
+ 1
9399
+ 1
9400
+ 2
9401
+ 1
9402
+ 1
9403
+ 1
9404
+ 2
9405
+ 1
9406
+ 1
9407
+ 1
9408
+ 1
9409
+ 1
9410
+ 1
9411
+ 1
9412
+ 1
9413
+ 1
9414
+ 1
9415
+ 1
9416
+ 1
9417
+ 1
9418
+ 2
9419
+ 1
9420
+ 1
9421
+ 1
9422
+ 1
9423
+ 1
9424
+ 1
9425
+ 1
9426
+ 1
9427
+ 2
9428
+ 2
9429
+ 1
9430
+ 1
9431
+ 1
9432
+ 1
9433
+ 1
9434
+ 1
9435
+ 1
9436
+ 1
9437
+ 1
9438
+ 1
9439
+ 1
9440
+ 2
9441
+ 1
9442
+ 1
9443
+ 1
9444
+ 1
9445
+ 1
9446
+ 1
9447
+ 1
9448
+ 1
9449
+ 1
9450
+ 1
9451
+ 1
9452
+ 1
9453
+ 1
9454
+ 1
9455
+ 1
9456
+ 1
9457
+ 1
9458
+ 1
9459
+ 1
9460
+ 1
9461
+ 1
9462
+ 1
9463
+ 1
9464
+ 1
9465
+ 1
9466
+ 1
9467
+ 1
9468
+ 1
9469
+ 2
9470
+ 1
9471
+ 1
9472
+ 1
9473
+ 1
9474
+ 1
9475
+ 1
9476
+ 1
9477
+ 1
9478
+ 1
9479
+ 1
9480
+ 1
9481
+ 1
9482
+ 1
9483
+ 1
9484
+ 1
9485
+ 1
9486
+ 1
9487
+ 2
9488
+ 1
9489
+ 1
9490
+ 1
9491
+ 1
9492
+ 1
9493
+ 1
9494
+ 1
9495
+ 1
9496
+ 1
9497
+ 2
9498
+ 1
9499
+ 1
9500
+ 1
9501
+ 1
9502
+ 1
9503
+ 1
9504
+ 1
9505
+ 1
9506
+ 1
9507
+ 1
9508
+ 1
9509
+ 1
9510
+ 1
9511
+ 1
9512
+ 2
9513
+ 1
9514
+ 1
9515
+ 1
9516
+ 1
9517
+ 2
9518
+ 1
9519
+ 2
9520
+ 1
9521
+ 1
9522
+ 1
9523
+ 1
9524
+ 1
9525
+ 1
9526
+ 1
9527
+ 1
9528
+ 1
9529
+ 1
9530
+ 2
9531
+ 1
9532
+ 1
9533
+ 1
9534
+ 1
9535
+ 1
9536
+ 1
9537
+ 1
9538
+ 1
9539
+ 1
9540
+ 1
9541
+ 1
9542
+ 1
9543
+ 1
9544
+ 1
9545
+ 1
9546
+ 1
9547
+ 1
9548
+ 1
9549
+ 1
9550
+ 1
9551
+ 1
9552
+ 1
9553
+ 1
9554
+ 1
9555
+ 1
9556
+ 1
9557
+ 1
9558
+ 2
9559
+ 1
9560
+ 1
9561
+ 1
9562
+ 1
9563
+ 1
9564
+ 2
9565
+ 2
9566
+ 1
9567
+ 1
9568
+ 1
9569
+ 1
9570
+ 1
9571
+ 1
9572
+ 1
9573
+ 1
9574
+ 1
9575
+ 1
9576
+ 1
9577
+ 1
9578
+ 1
9579
+ 1
9580
+ 1
9581
+ 1
9582
+ 1
9583
+ 1
9584
+ 1
9585
+ 2
9586
+ 1
9587
+ 2
9588
+ 1
9589
+ 1
9590
+ 1
9591
+ 1
9592
+ 2
9593
+ 1
9594
+ 1
9595
+ 1
9596
+ 1
9597
+ 1
9598
+ 1
9599
+ 1
9600
+ 2
9601
+ 2
9602
+ 2
9603
+ 1
9604
+ 1
9605
+ 1
9606
+ 1
9607
+ 1
9608
+ 1
9609
+ 1
9610
+ 1
9611
+ 1
9612
+ 1
9613
+ 1
9614
+ 1
9615
+ 1
9616
+ 1
9617
+ 1
9618
+ 1
9619
+ 1
9620
+ 1
9621
+ 1
9622
+ 1
9623
+ 1
9624
+ 1
9625
+ 1
9626
+ 1
9627
+ 1
9628
+ 1
9629
+ 1
9630
+ 1
9631
+ 1
9632
+ 1
9633
+ 2
9634
+ 1
9635
+ 1
9636
+ 1
9637
+ 1
9638
+ 1
9639
+ 1
9640
+ 1
9641
+ 1
9642
+ 1
9643
+ 1
9644
+ 1
9645
+ 1
9646
+ 1
9647
+ 1
9648
+ 1
9649
+ 1
9650
+ 1
9651
+ 1
9652
+ 1
9653
+ 1
9654
+ 1
9655
+ 2
9656
+ 1
9657
+ 1
9658
+ 1
9659
+ 1
9660
+ 1
9661
+ 1
9662
+ 1
9663
+ 1
9664
+ 1
9665
+ 1
9666
+ 1
9667
+ 1
9668
+ 2
9669
+ 1
9670
+ 1
9671
+ 1
9672
+ 1
9673
+ 1
9674
+ 1
9675
+ 1
9676
+ 1
9677
+ 1
9678
+ 1
9679
+ 2
9680
+ 1
9681
+ 1
9682
+ 1
9683
+ 1
9684
+ 1
9685
+ 1
9686
+ 1
9687
+ 1
9688
+ 1
9689
+ 1
9690
+ 1
9691
+ 1
9692
+ 1
9693
+ 1
9694
+ 1
9695
+ 1
9696
+ 1
9697
+ 2
9698
+ 1
9699
+ 1
9700
+ 1
9701
+ 1
9702
+ 1
9703
+ 1
9704
+ 1
9705
+ 1
9706
+ 1
9707
+ 1
9708
+ 1
9709
+ 1
9710
+ 1
9711
+ 1
9712
+ 1
9713
+ 1
9714
+ 1
9715
+ 1
9716
+ 1
9717
+ 2
9718
+ 1
9719
+ 1
9720
+ 1
9721
+ 1
9722
+ 1
9723
+ 1
9724
+ 2
9725
+ 1
9726
+ 1
9727
+ 1
9728
+ 1
9729
+ 1
9730
+ 1
9731
+ 1
9732
+ 1
9733
+ 1
9734
+ 1
9735
+ 1
9736
+ 1
9737
+ 1
9738
+ 1
9739
+ 1
9740
+ 1
9741
+ 2
9742
+ 1
9743
+ 1
9744
+ 1
9745
+ 1
9746
+ 1
9747
+ 1
9748
+ 1
9749
+ 1
9750
+ 1
9751
+ 1
9752
+ 1
9753
+ 1
9754
+ 1
9755
+ 1
9756
+ 1
9757
+ 1
9758
+ 1
9759
+ 1
9760
+ 1
9761
+ 1
9762
+ 1
9763
+ 1
9764
+ 1
9765
+ 1
9766
+ 1
9767
+ 1
9768
+ 1
9769
+ 1
9770
+ 1
9771
+ 1
9772
+ 1
9773
+ 1
9774
+ 1
9775
+ 1
9776
+ 1
9777
+ 1
9778
+ 1
9779
+ 1
9780
+ 1
9781
+ 1
9782
+ 1
9783
+ 1
9784
+ 1
9785
+ 1
9786
+ 1
9787
+ 1
9788
+ 1
9789
+ 1
9790
+ 1
9791
+ 1
9792
+ 1
9793
+ 1
9794
+ 1
9795
+ 1
9796
+ 1
9797
+ 1
9798
+ 1
9799
+ 1
9800
+ 1
9801
+ 1
9802
+ 1
9803
+ 1
9804
+ 1
9805
+ 1
9806
+ 1
9807
+ 1
9808
+ 1
9809
+ 1
9810
+ 1
9811
+ 1
9812
+ 1
9813
+ 1
9814
+ 1
9815
+ 1
9816
+ 1
9817
+ 1
9818
+ 1
9819
+ 1
9820
+ 1
9821
+ 2
9822
+ 1
9823
+ 1
9824
+ 1
9825
+ 2
9826
+ 2
9827
+ 1
9828
+ 1
9829
+ 2
9830
+ 1
9831
+ 1
9832
+ 1
9833
+ 1
9834
+ 1
9835
+ 1
9836
+ 1
9837
+ 1
9838
+ 1
9839
+ 1
9840
+ 1
9841
+ 1
9842
+ 1
9843
+ 1
9844
+ 1
9845
+ 1
9846
+ 1
9847
+ 1
9848
+ 1
9849
+ 1
9850
+ 1
9851
+ 1
9852
+ 1
9853
+ 1
9854
+ 1
9855
+ 1
9856
+ 1
9857
+ 1
9858
+ 1
9859
+ 1
9860
+ 1
9861
+ 1
9862
+ 1
9863
+ 1
9864
+ 2
9865
+ 1
9866
+ 1
9867
+ 2
9868
+ 1
9869
+ 2
9870
+ 1
9871
+ 1
9872
+ 1
9873
+ 1
9874
+ 1
9875
+ 1
9876
+ 1
9877
+ 1
9878
+ 1
9879
+ 1
9880
+ 1
9881
+ 1
9882
+ 1
9883
+ 1
9884
+ 1
9885
+ 1
9886
+ 1
9887
+ 1
9888
+ 1
9889
+ 1
9890
+ 2
9891
+ 1
9892
+ 1
9893
+ 1
9894
+ 1
9895
+ 1
9896
+ 1
9897
+ 1
9898
+ 1
9899
+ 1
9900
+ 1
9901
+ 1
9902
+ 1
9903
+ 1
9904
+ 1
9905
+ 1
9906
+ 1
9907
+ 1
9908
+ 1
9909
+ 1
9910
+ 1
9911
+ 1
9912
+ 1
9913
+ 1
9914
+ 1
9915
+ 1
9916
+ 1
9917
+ 1
9918
+ 1
9919
+ 1
9920
+ 1
9921
+ 1
9922
+ 1
9923
+ 1
9924
+ 1
9925
+ 1
9926
+ 1
9927
+ 1
9928
+ 1
9929
+ 1
9930
+ 2
9931
+ 1
9932
+ 1
9933
+ 1
9934
+ 1
9935
+ 1
9936
+ 1
9937
+ 1
9938
+ 1
9939
+ 2
9940
+ 1
9941
+ 1
9942
+ 1
9943
+ 1
9944
+ 1
9945
+ 1
9946
+ 1
9947
+ 1
9948
+ 1
9949
+ 1
9950
+ 1
9951
+ 1
9952
+ 1
9953
+ 1
9954
+ 2
9955
+ 1
9956
+ 1
9957
+ 1
9958
+ 1
9959
+ 1
9960
+ 1
9961
+ 1
9962
+ 1
9963
+ 1
9964
+ 1
9965
+ 1
9966
+ 1
9967
+ 1
9968
+ 2
9969
+ 1
9970
+ 1
9971
+ 1
9972
+ 1
9973
+ 1
9974
+ 1
9975
+ 1
9976
+ 1
9977
+ 1
9978
+ 1
9979
+ 1
9980
+ 1
9981
+ 1
9982
+ 1
9983
+ 1
9984
+ 1
9985
+ 1
9986
+ 1
9987
+ 1
9988
+ 1
9989
+ 1
9990
+ 1
9991
+ 1
9992
+ 1
9993
+ 1
9994
+ 1
9995
+ 1
9996
+ 1
9997
+ 1
9998
+ 1
9999
+ 1
10000
+ 1
10001
+ 1
10002
+ 1
10003
+ 1
10004
+ 1
10005
+ 1
10006
+ 1
10007
+ 1
10008
+ 1
10009
+ 1
10010
+ 1
10011
+ 1
10012
+ 1
10013
+ 1
10014
+ 1
10015
+ 1
10016
+ 1
10017
+ 1
10018
+ 1
10019
+ 1
10020
+ 2
10021
+ 2
10022
+ 1
10023
+ 1
10024
+ 1
10025
+ 1
10026
+ 2
10027
+ 1
10028
+ 1
10029
+ 1
10030
+ 1
10031
+ 1
10032
+ 1
10033
+ 1
10034
+ 1
10035
+ 1
10036
+ 1
10037
+ 1
10038
+ 1
10039
+ 1
10040
+ 1
10041
+ 1
10042
+ 1
10043
+ 1
10044
+ 1
10045
+ 1
10046
+ 1
10047
+ 1
10048
+ 1
10049
+ 1
10050
+ 1
10051
+ 1
10052
+ 1
10053
+ 1
10054
+ 1
10055
+ 1
10056
+ 1
10057
+ 1
10058
+ 1
10059
+ 1
10060
+ 2
10061
+ 1
10062
+ 1
10063
+ 1
10064
+ 1
10065
+ 1
10066
+ 1
10067
+ 1
10068
+ 1
10069
+ 1
10070
+ 1
10071
+ 1
10072
+ 1
10073
+ 1
10074
+ 1
10075
+ 1
10076
+ 1
10077
+ 1
10078
+ 1
10079
+ 1
10080
+ 1
10081
+ 1
10082
+ 1
10083
+ 1
10084
+ 1
10085
+ 1
10086
+ 1
10087
+ 1
10088
+ 1
10089
+ 1
10090
+ 1
10091
+ 1
10092
+ 1
10093
+ 1
10094
+ 1
10095
+ 1
10096
+ 1
10097
+ 1
10098
+ 1
10099
+ 1
10100
+ 1
10101
+ 1
10102
+ 1
10103
+ 1
10104
+ 1
10105
+ 1
10106
+ 1
10107
+ 1
10108
+ 1
10109
+ 1
10110
+ 1
10111
+ 1
10112
+ 2
10113
+ 1
10114
+ 1
10115
+ 1
10116
+ 1
10117
+ 1
10118
+ 1
10119
+ 2
10120
+ 1
10121
+ 1
10122
+ 1
10123
+ 1
10124
+ 1
10125
+ 1
10126
+ 2
10127
+ 1
10128
+ 1
10129
+ 1
10130
+ 1
10131
+ 1
10132
+ 1
10133
+ 1
10134
+ 1
10135
+ 1
10136
+ 1
10137
+ 1
10138
+ 1
10139
+ 1
10140
+ 1
10141
+ 1
10142
+ 1
10143
+ 1
10144
+ 1
10145
+ 1
10146
+ 1
10147
+ 1
10148
+ 1
10149
+ 1
10150
+ 1
10151
+ 1
10152
+ 1
10153
+ 1
10154
+ 1
10155
+ 1
10156
+ 1
10157
+ 1
10158
+ 1
10159
+ 1
10160
+ 1
10161
+ 1
10162
+ 1
10163
+ 1
10164
+ 1
10165
+ 1
10166
+ 1
10167
+ 1
10168
+ 1
10169
+ 1
10170
+ 1
10171
+ 1
10172
+ 1
10173
+ 1
10174
+ 1
10175
+ 2
10176
+ 1
10177
+ 1
10178
+ 1
10179
+ 1
10180
+ 1
10181
+ 1
10182
+ 1
10183
+ 1
10184
+ 1
10185
+ 1
10186
+ 1
10187
+ 1
10188
+ 1
10189
+ 1
10190
+ 1
10191
+ 1
10192
+ 1
10193
+ 1
10194
+ 1
10195
+ 2
10196
+ 1
10197
+ 1
10198
+ 1
10199
+ 1
10200
+ 1
10201
+ 1
10202
+ 2
10203
+ 2
10204
+ 1
10205
+ 1
10206
+ 1
10207
+ 1
10208
+ 1
10209
+ 1
10210
+ 1
10211
+ 1
10212
+ 1
10213
+ 1
10214
+ 1
10215
+ 1
10216
+ 1
10217
+ 1
10218
+ 1
10219
+ 1
10220
+ 1
10221
+ 2
10222
+ 2
10223
+ 1
10224
+ 1
10225
+ 1
10226
+ 1
10227
+ 2
10228
+ 1
10229
+ 1
10230
+ 1
10231
+ 1
10232
+ 1
10233
+ 1
10234
+ 1
10235
+ 1
10236
+ 1
10237
+ 1
10238
+ 1
10239
+ 1
10240
+ 1
10241
+ 1
10242
+ 1
10243
+ 2
10244
+ 1
10245
+ 1
10246
+ 1
10247
+ 1
10248
+ 1
10249
+ 1
10250
+ 1
10251
+ 2
10252
+ 1
10253
+ 1
10254
+ 1
10255
+ 1
10256
+ 1
10257
+ 1
10258
+ 1
10259
+ 1
10260
+ 1
10261
+ 1
10262
+ 1
10263
+ 1
10264
+ 1
10265
+ 1
10266
+ 1
10267
+ 1
10268
+ 1
10269
+ 1
10270
+ 1
10271
+ 1
10272
+ 1
10273
+ 1
10274
+ 1
10275
+ 1
10276
+ 1
10277
+ 2
10278
+ 1
10279
+ 1
10280
+ 1
10281
+ 1
10282
+ 1
10283
+ 1
10284
+ 1
10285
+ 1
10286
+ 1
10287
+ 1
10288
+ 1
10289
+ 1
10290
+ 1
10291
+ 1
10292
+ 1
10293
+ 1
10294
+ 1
10295
+ 1
10296
+ 2
10297
+ 1
10298
+ 1
10299
+ 1
10300
+ 1
10301
+ 1
10302
+ 1
10303
+ 2
10304
+ 1
10305
+ 1
10306
+ 2
10307
+ 1
10308
+ 1
10309
+ 1
10310
+ 1
10311
+ 1
10312
+ 1
10313
+ 1
10314
+ 1
10315
+ 1
10316
+ 1
10317
+ 1
10318
+ 1
10319
+ 1
10320
+ 1
10321
+ 1
10322
+ 1
10323
+ 1
10324
+ 1
10325
+ 1
10326
+ 1
10327
+ 1
10328
+ 2
10329
+ 1
10330
+ 1
10331
+ 1
10332
+ 1
10333
+ 1
10334
+ 1
10335
+ 1
10336
+ 1
10337
+ 1
10338
+ 1
10339
+ 1
10340
+ 1
10341
+ 1
10342
+ 1
10343
+ 1
10344
+ 1
10345
+ 1
10346
+ 1
10347
+ 1
10348
+ 1
10349
+ 1
10350
+ 1
10351
+ 1
10352
+ 2
10353
+ 1
10354
+ 1
10355
+ 1
10356
+ 1
10357
+ 1
10358
+ 1
10359
+ 1
10360
+ 1
10361
+ 2
10362
+ 1
10363
+ 1
10364
+ 1
10365
+ 1
10366
+ 1
10367
+ 1
10368
+ 1
10369
+ 1
10370
+ 2
10371
+ 1
10372
+ 1
10373
+ 1
10374
+ 1
10375
+ 1
10376
+ 1
10377
+ 1
10378
+ 1
10379
+ 1
10380
+ 1
10381
+ 1
10382
+ 1
10383
+ 1
10384
+ 1
10385
+ 1
10386
+ 1
10387
+ 1
10388
+ 1
10389
+ 1
10390
+ 1
10391
+ 1
10392
+ 1
10393
+ 1
10394
+ 2
10395
+ 1
10396
+ 1
10397
+ 1
10398
+ 1
10399
+ 1
10400
+ 1
10401
+ 1
10402
+ 1
10403
+ 1
10404
+ 1
10405
+ 1
10406
+ 1
10407
+ 1
10408
+ 2
10409
+ 1
10410
+ 1
10411
+ 1
10412
+ 1
10413
+ 1
10414
+ 1
10415
+ 1
10416
+ 1
10417
+ 1
10418
+ 1
10419
+ 1
10420
+ 1
10421
+ 1
10422
+ 1
10423
+ 1
10424
+ 1
10425
+ 1
10426
+ 1
10427
+ 1
10428
+ 1
10429
+ 1
10430
+ 1
10431
+ 1
10432
+ 1
10433
+ 1
10434
+ 1
10435
+ 1
10436
+ 1
10437
+ 1
10438
+ 1
10439
+ 2
10440
+ 1
10441
+ 1
10442
+ 1
10443
+ 1
10444
+ 1
10445
+ 1
10446
+ 1
10447
+ 1
10448
+ 1
10449
+ 1
10450
+ 1
10451
+ 1
10452
+ 1
10453
+ 1
10454
+ 1
10455
+ 1
10456
+ 2
10457
+ 1
10458
+ 1
10459
+ 1
10460
+ 1
10461
+ 1
10462
+ 1
10463
+ 1
10464
+ 2
10465
+ 1
10466
+ 2
10467
+ 1
10468
+ 1
10469
+ 1
10470
+ 1
10471
+ 2
10472
+ 1
10473
+ 1
10474
+ 1
10475
+ 1
10476
+ 1
10477
+ 1
10478
+ 1
10479
+ 2
10480
+ 1
10481
+ 1
10482
+ 1
10483
+ 1
10484
+ 1
10485
+ 1
10486
+ 1
10487
+ 1
10488
+ 1
10489
+ 1
10490
+ 1
10491
+ 1
10492
+ 1
10493
+ 1
10494
+ 1
10495
+ 1
10496
+ 1
10497
+ 1
10498
+ 1
10499
+ 1
10500
+ 1
10501
+ 1
10502
+ 1
10503
+ 1
10504
+ 2
10505
+ 1
10506
+ 1
10507
+ 1
10508
+ 1
10509
+ 2
10510
+ 2
10511
+ 1
10512
+ 1
10513
+ 1
10514
+ 1
10515
+ 1
10516
+ 1
10517
+ 1
10518
+ 1
10519
+ 1
10520
+ 1
10521
+ 1
10522
+ 1
10523
+ 1
10524
+ 1
10525
+ 1
10526
+ 1
10527
+ 1
10528
+ 1
10529
+ 1
10530
+ 1
10531
+ 1
10532
+ 1
10533
+ 1
10534
+ 1
10535
+ 1
10536
+ 1
10537
+ 1
10538
+ 1
10539
+ 1
10540
+ 1
10541
+ 1
10542
+ 1
10543
+ 1
10544
+ 1
10545
+ 1
10546
+ 1
10547
+ 1
10548
+ 1
10549
+ 1
10550
+ 1
10551
+ 1
10552
+ 1
10553
+ 1
10554
+ 1
10555
+ 1
10556
+ 1
10557
+ 1
10558
+ 1
10559
+ 1
10560
+ 1
10561
+ 1
10562
+ 1
10563
+ 1
10564
+ 1
10565
+ 1
10566
+ 1
10567
+ 1
10568
+ 1
10569
+ 1
10570
+ 1
10571
+ 1
10572
+ 1
10573
+ 1
10574
+ 1
10575
+ 1
10576
+ 1
10577
+ 1
10578
+ 1
10579
+ 1
10580
+ 1
10581
+ 1
10582
+ 1
10583
+ 1
10584
+ 1
10585
+ 1
10586
+ 1
10587
+ 1
10588
+ 1
10589
+ 1
10590
+ 1
10591
+ 1
10592
+ 1
10593
+ 1
10594
+ 1
10595
+ 1
10596
+ 1
10597
+ 1
10598
+ 1
10599
+ 1
10600
+ 1
10601
+ 1
10602
+ 1
10603
+ 1
10604
+ 1
10605
+ 1
10606
+ 1
10607
+ 1
10608
+ 1
10609
+ 1
10610
+ 1
10611
+ 1
10612
+ 1
10613
+ 1
10614
+ 1
10615
+ 1
10616
+ 1
10617
+ 1
10618
+ 1
10619
+ 1
10620
+ 1
10621
+ 1
10622
+ 1
10623
+ 1
10624
+ 1
10625
+ 1
10626
+ 1
10627
+ 1
10628
+ 1
10629
+ 1
10630
+ 1
10631
+ 1
10632
+ 1
10633
+ 1
10634
+ 1
10635
+ 1
10636
+ 1
10637
+ 1
10638
+ 1
10639
+ 1
10640
+ 1
10641
+ 1
10642
+ 1
10643
+ 1
10644
+ 1
10645
+ 1
10646
+ 1
10647
+ 1
10648
+ 1
10649
+ 1
10650
+ 1
10651
+ 1
10652
+ 1
10653
+ 1
10654
+ 1
10655
+ 1
10656
+ 1
10657
+ 1
10658
+ 1
10659
+ 1
10660
+ 1
10661
+ 1
10662
+ 1
10663
+ 1
10664
+ 1
10665
+ 1
10666
+ 1
10667
+ 1
10668
+ 1
10669
+ 1
10670
+ 1
10671
+ 1
10672
+ 1
10673
+ 1
10674
+ 1
10675
+ 1
10676
+ 1
10677
+ 1
10678
+ 1
10679
+ 1
10680
+ 1
10681
+ 1
10682
+ 1
10683
+ 1
10684
+ 1
10685
+ 1
10686
+ 1
10687
+ 1
10688
+ 1
10689
+ 1
10690
+ 1
10691
+ 1
10692
+ 1
10693
+ 1
10694
+ 1
10695
+ 1
10696
+ 1
10697
+ 1
10698
+ 1
10699
+ 1
10700
+ 1
10701
+ 1
10702
+ 1
10703
+ 1
10704
+ 1
10705
+ 1
10706
+ 1
10707
+ 1
10708
+ 1
10709
+ 1
10710
+ 1
10711
+ 1
10712
+ 1
10713
+ 1
10714
+ 1
10715
+ 1
10716
+ 1
10717
+ 1
10718
+ 1
10719
+ 1
10720
+ 1
10721
+ 1
10722
+ 1
10723
+ 1
10724
+ 1
10725
+ 1
10726
+ 1
10727
+ 1
10728
+ 1
10729
+ 1
10730
+ 1
10731
+ 1
10732
+ 1
10733
+ 1
10734
+ 1
10735
+ 1
10736
+ 1
10737
+ 1
10738
+ 1
10739
+ 1
10740
+ 1
10741
+ 1
10742
+ 1
10743
+ 1
10744
+ 1
10745
+ 1
10746
+ 1
10747
+ 1
10748
+ 1
10749
+ 1
10750
+ 1
10751
+ 1
10752
+ 1
10753
+ 1
10754
+ 1
10755
+ 1
10756
+ 1
10757
+ 1
10758
+ 1
10759
+ 1
10760
+ 1
10761
+ 1
10762
+ 1
10763
+ 1
10764
+ 1
10765
+ 1
10766
+ 1
10767
+ 1
10768
+ 1
10769
+ 1
10770
+ 1
10771
+ 1
10772
+ 1
10773
+ 1
10774
+ 1
10775
+ 1
10776
+ 1
10777
+ 1
10778
+ 1
10779
+ 1
10780
+ 1
10781
+ 1
10782
+ 1
10783
+ 1
10784
+ 1
10785
+ 1
10786
+ 1
10787
+ 1
10788
+ 1
10789
+ 1
10790
+ 1
10791
+ 1
10792
+ 1
10793
+ 1
10794
+ 1
10795
+ 1
10796
+ 1
10797
+ 1
10798
+ 1
10799
+ 1
10800
+ 1
10801
+ 1
10802
+ 1
10803
+ 1
10804
+ 1
10805
+ 1
10806
+ 1
10807
+ 1
10808
+ 1
10809
+ 1
10810
+ 2
10811
+ 1
10812
+ 1
10813
+ 1
10814
+ 1
10815
+ 1
10816
+ 1
10817
+ 1
10818
+ 1
10819
+ 1
10820
+ 1
10821
+ 1
10822
+ 1
10823
+ 1
10824
+ 1
10825
+ 1
10826
+ 1
10827
+ 1
10828
+ 1
10829
+ 1
10830
+ 1
10831
+ 1
10832
+ 1
10833
+ 1
10834
+ 1
10835
+ 1
10836
+ 1
10837
+ 1
10838
+ 1
10839
+ 1
10840
+ 1
10841
+ 1
10842
+ 1
10843
+ 1
10844
+ 1
10845
+ 1
10846
+ 1
10847
+ 1
10848
+ 1
10849
+ 1
10850
+ 1
10851
+ 1
10852
+ 1
10853
+ 1
10854
+ 1
10855
+ 1
10856
+ 1
10857
+ 1
10858
+ 1
10859
+ 1
10860
+ 1
10861
+ 1
10862
+ 1
10863
+ 1
10864
+ 1
10865
+ 1
10866
+ 1
10867
+ 1
10868
+ 1
10869
+ 1
10870
+ 1
10871
+ 1
10872
+ 1
10873
+ 1
10874
+ 1
10875
+ 1
10876
+ 1
10877
+ 1
10878
+ 1
10879
+ 1
10880
+ 1
10881
+ 1
10882
+ 1
10883
+ 1
10884
+ 1
10885
+ 1
10886
+ 1
10887
+ 1
10888
+ 1
10889
+ 1
10890
+ 1
10891
+ 1
10892
+ 1
10893
+ 1
10894
+ 1
10895
+ 1
10896
+ 1
10897
+ 1
10898
+ 1
10899
+ 1
10900
+ 1
10901
+ 1
10902
+ 1
10903
+ 1
10904
+ 1
10905
+ 1
10906
+ 1
10907
+ 1
10908
+ 1
10909
+ 1
10910
+ 1
10911
+ 1
10912
+ 1
10913
+ 1
10914
+ 1
10915
+ 1
10916
+ 1
10917
+ 1
10918
+ 1
10919
+ 1
10920
+ 1
10921
+ 1
10922
+ 1
10923
+ 1
10924
+ 1
10925
+ 1
10926
+ 1
10927
+ 1
10928
+ 1
10929
+ 1
10930
+ 1
10931
+ 1
10932
+ 1
10933
+ 1
10934
+ 1
10935
+ 1
10936
+ 1
10937
+ 1
10938
+ 1
10939
+ 1
10940
+ 1
10941
+ 1
10942
+ 1
10943
+ 1
10944
+ 1
10945
+ 1
10946
+ 1
10947
+ 1
10948
+ 1
10949
+ 1
10950
+ 1
10951
+ 1
10952
+ 1
10953
+ 1
10954
+ 1
10955
+ 1
10956
+ 1
10957
+ 1
10958
+ 1
10959
+ 1
10960
+ 1
10961
+ 1
10962
+ 1
10963
+ 1
10964
+ 1
10965
+ 1
10966
+ 1
10967
+ 1
10968
+ 1
10969
+ 1
10970
+ 1
10971
+ 1
10972
+ 1
10973
+ 1
10974
+ 1
10975
+ 1
10976
+ 1
10977
+ 1
10978
+ 1
10979
+ 1
10980
+ 1
10981
+ 1
10982
+ 1
10983
+ 1
10984
+ 1
10985
+ 1
10986
+ 1
10987
+ 1
10988
+ 1
10989
+ 1
10990
+ 1
10991
+ 1
10992
+ 1
10993
+ 1
10994
+ 1
10995
+ 1
10996
+ 1
10997
+ 1
10998
+ 1
10999
+ 1
11000
+ 1
11001
+ 1
11002
+ 1
11003
+ 1
11004
+ 1
11005
+ 1
11006
+ 1
11007
+ 1
11008
+ 1
11009
+ 1
11010
+ 1
11011
+ 1
11012
+ 1
11013
+ 1
11014
+ 1
11015
+ 1
11016
+ 1
11017
+ 1
11018
+ 1
11019
+ 1
11020
+ 1
11021
+ 1
11022
+ 1
11023
+ 1
11024
+ 1
11025
+ 1
11026
+ 1
11027
+ 1
11028
+ 1
11029
+ 1
11030
+ 1
11031
+ 1
11032
+ 1
11033
+ 1
11034
+ 1
11035
+ 1
11036
+ 1
11037
+ 1
11038
+ 1
11039
+ 1
11040
+ 1
11041
+ 1
11042
+ 1
11043
+ 1
11044
+ 1
11045
+ 1
11046
+ 1
11047
+ 1
11048
+ 1
11049
+ 1
11050
+ 1
11051
+ 1
11052
+ 1
11053
+ 1
11054
+ 1
11055
+ 1
11056
+ 1
11057
+ 1
11058
+ 1
11059
+ 1
11060
+ 1
11061
+ 1
11062
+ 1
11063
+ 1
11064
+ 1
11065
+ 1
11066
+ 1
11067
+ 1
11068
+ 1
11069
+ 1
11070
+ 1
11071
+ 1
11072
+ 1
11073
+ 1
11074
+ 1
11075
+ 1
11076
+ 1
11077
+ 1
11078
+ 1
11079
+ 1
11080
+ 1
11081
+ 2
11082
+ 1
11083
+ 1
11084
+ 1
11085
+ 1
11086
+ 1
11087
+ 1
11088
+ 1
11089
+ 1
11090
+ 1
11091
+ 1
11092
+ 1
11093
+ 1
11094
+ 1
11095
+ 1
11096
+ 1
11097
+ 1
11098
+ 1
11099
+ 1
11100
+ 1
11101
+ 1
11102
+ 1
11103
+ 1
11104
+ 1
11105
+ 1
11106
+ 1
11107
+ 1
11108
+ 1
11109
+ 1
11110
+ 1
11111
+ 1
11112
+ 1
11113
+ 1
11114
+ 1
11115
+ 1
11116
+ 1
11117
+ 1
11118
+ 1
11119
+ 1
11120
+ 1
11121
+ 1
11122
+ 1
11123
+ 1
11124
+ 1
11125
+ 1
11126
+ 1
11127
+ 1
11128
+ 1
11129
+ 1
11130
+ 1
11131
+ 1
11132
+ 1
11133
+ 1
11134
+ 1
11135
+ 1
11136
+ 1
11137
+ 1
11138
+ 1
11139
+ 1
11140
+ 1
11141
+ 1
11142
+ 1
11143
+ 1
11144
+ 1
11145
+ 1
11146
+ 1
11147
+ 1
11148
+ 1
11149
+ 1
11150
+ 1
11151
+ 1
11152
+ 1
11153
+ 1
11154
+ 1
11155
+ 1
11156
+ 1
11157
+ 1
11158
+ 1
11159
+ 1
11160
+ 1
11161
+ 1
11162
+ 1
11163
+ 1
11164
+ 1
11165
+ 1
11166
+ 1
11167
+ 1
11168
+ 1
11169
+ 1
11170
+ 1
11171
+ 1
11172
+ 1
11173
+ 1
11174
+ 1
11175
+ 1
11176
+ 1
11177
+ 1
11178
+ 1
11179
+ 1
11180
+ 1
11181
+ 1
11182
+ 1
11183
+ 1
11184
+ 1
11185
+ 1
11186
+ 1
11187
+ 1
11188
+ 1
11189
+ 1
11190
+ 1
11191
+ 1
11192
+ 1
11193
+ 1
11194
+ 1
11195
+ 1
11196
+ 1
11197
+ 1
11198
+ 1
11199
+ 1
11200
+ 1
11201
+ 1
11202
+ 1
11203
+ 1
11204
+ 1
11205
+ 1
11206
+ 1
11207
+ 1
11208
+ 1
11209
+ 1
11210
+ 1
11211
+ 1
11212
+ 1
11213
+ 1
11214
+ 1
11215
+ 1
11216
+ 1
11217
+ 1
11218
+ 1
11219
+ 1
11220
+ 1
11221
+ 1
11222
+ 1
11223
+ 1
11224
+ 1
11225
+ 1
11226
+ 1
11227
+ 1
11228
+ 1
11229
+ 1
11230
+ 1
11231
+ 1
11232
+ 1
11233
+ 1
11234
+ 1
11235
+ 1
11236
+ 1
11237
+ 1
11238
+ 1
11239
+ 1
11240
+ 1
11241
+ 1
11242
+ 1
11243
+ 1
11244
+ 1
11245
+ 1
11246
+ 1
11247
+ 1
11248
+ 1
11249
+ 1
11250
+ 1
11251
+ 1
11252
+ 1
11253
+ 1
11254
+ 1
11255
+ 1
11256
+ 1
11257
+ 1
11258
+ 1
11259
+ 1
11260
+ 1
11261
+ 1
11262
+ 1
11263
+ 1
11264
+ 1
11265
+ 1
11266
+ 1
11267
+ 1
11268
+ 1
11269
+ 1
11270
+ 1
11271
+ 1
11272
+ 1
11273
+ 1
11274
+ 1
11275
+ 1
11276
+ 1
11277
+ 1
11278
+ 1
11279
+ 1
11280
+ 1
11281
+ 1
11282
+ 1
11283
+ 1
11284
+ 1
11285
+ 1
11286
+ 1
11287
+ 1
11288
+ 1
11289
+ 1
11290
+ 1
11291
+ 1
11292
+ 1
11293
+ 1
11294
+ 1
11295
+ 1
11296
+ 1
11297
+ 1
11298
+ 1
11299
+ 1
11300
+ 1
11301
+ 1
11302
+ 1
11303
+ 1
11304
+ 1
11305
+ 1
11306
+ 1
11307
+ 1
11308
+ 1
11309
+ 1
11310
+ 1
11311
+ 1
11312
+ 1
11313
+ 1
11314
+ 1
11315
+ 1
11316
+ 1
11317
+ 1
11318
+ 1
11319
+ 1
11320
+ 1
11321
+ 1
11322
+ 1
11323
+ 1
11324
+ 1
11325
+ 1
11326
+ 1
11327
+ 1
11328
+ 1
11329
+ 1
11330
+ 1
11331
+ 1
11332
+ 2
11333
+ 1
11334
+ 1
11335
+ 1
11336
+ 1
11337
+ 1
11338
+ 1
11339
+ 1
11340
+ 1
11341
+ 1
11342
+ 1
11343
+ 1
11344
+ 1
11345
+ 1
11346
+ 1
11347
+ 1
11348
+ 1
11349
+ 1
11350
+ 1
11351
+ 1
11352
+ 1
11353
+ 1
11354
+ 1
11355
+ 1
11356
+ 1
11357
+ 1
11358
+ 1
11359
+ 1
11360
+ 1
11361
+ 1
11362
+ 1
11363
+ 1
11364
+ 1
11365
+ 1
11366
+ 1
11367
+ 1
11368
+ 1
11369
+ 1
11370
+ 1
11371
+ 1
11372
+ 1
11373
+ 1
11374
+ 1
11375
+ 1
11376
+ 1
11377
+ 1
11378
+ 1
11379
+ 1
11380
+ 1
11381
+ 1
11382
+ 1
11383
+ 1
11384
+ 1
11385
+ 1
11386
+ 1
11387
+ 1
11388
+ 1
11389
+ 1
11390
+ 1
11391
+ 1
11392
+ 1
11393
+ 1
11394
+ 1
11395
+ 1
11396
+ 1
11397
+ 1
11398
+ 1
11399
+ 1
11400
+ 1
11401
+ 1
11402
+ 1
11403
+ 1
11404
+ 1
11405
+ 1
11406
+ 1
11407
+ 1
11408
+ 1
11409
+ 1
11410
+ 1
11411
+ 1
11412
+ 1
11413
+ 1
11414
+ 1
11415
+ 1
11416
+ 1
11417
+ 1
11418
+ 1
11419
+ 1
11420
+ 1
11421
+ 1
11422
+ 1
11423
+ 1
11424
+ 1
11425
+ 1
11426
+ 1
11427
+ 1
11428
+ 1
11429
+ 1
11430
+ 1
11431
+ 1
11432
+ 1
11433
+ 1
11434
+ 1
11435
+ 1
11436
+ 1
11437
+ 1
11438
+ 1
11439
+ 1
11440
+ 1
11441
+ 1
11442
+ 1
11443
+ 1
11444
+ 1
11445
+ 1
11446
+ 1
11447
+ 1
11448
+ 1
11449
+ 1
11450
+ 1
11451
+ 1
11452
+ 1
11453
+ 1
11454
+ 1
11455
+ 1
11456
+ 1
11457
+ 1
11458
+ 1
11459
+ 1
11460
+ 1
11461
+ 1
11462
+ 1
11463
+ 1
11464
+ 1
11465
+ 1
11466
+ 1
11467
+ 1
11468
+ 1
11469
+ 1
11470
+ 1
11471
+ 1
11472
+ 1
11473
+ 1
11474
+ 1
11475
+ 1
11476
+ 1
11477
+ 1
11478
+ 1
11479
+ 1
11480
+ 1
11481
+ 1
11482
+ 1
11483
+ 1
11484
+ 1
11485
+ 1
11486
+ 1
11487
+ 1
11488
+ 1
11489
+ 1
11490
+ 1
11491
+ 1
11492
+ 1
11493
+ 1
11494
+ 1
11495
+ 1
11496
+ 1
11497
+ 1
11498
+ 1
11499
+ 1
11500
+ 1
11501
+ 1
11502
+ 1
11503
+ 1
11504
+ 1
11505
+ 1
11506
+ 1
11507
+ 1
11508
+ 1
11509
+ 1
11510
+ 1
11511
+ 1
11512
+ 1
11513
+ 1
11514
+ 1
11515
+ 1
11516
+ 1
11517
+ 1
11518
+ 1
11519
+ 1
11520
+ 1
11521
+ 1
11522
+ 1
11523
+ 1
11524
+ 1
11525
+ 1
11526
+ 1
11527
+ 1
11528
+ 1
11529
+ 1
11530
+ 1
11531
+ 1
11532
+ 1
11533
+ 1
11534
+ 1
11535
+ 1
11536
+ 1
11537
+ 1
11538
+ 1
11539
+ 1
11540
+ 1
11541
+ 1
11542
+ 1
11543
+ 1
11544
+ 1
11545
+ 1
11546
+ 1
11547
+ 1
11548
+ 1
11549
+ 1
11550
+ 1
11551
+ 1
11552
+ 1
11553
+ 1
11554
+ 1
11555
+ 1
11556
+ 1
11557
+ 1
11558
+ 1
11559
+ 1
11560
+ 1
11561
+ 1
11562
+ 1
11563
+ 1
11564
+ 1
11565
+ 1
11566
+ 1
11567
+ 1
11568
+ 1
11569
+ 1
11570
+ 1
11571
+ 1
11572
+ 1
11573
+ 1
11574
+ 1
11575
+ 1
11576
+ 1
11577
+ 1
11578
+ 1
11579
+ 1
11580
+ 1
11581
+ 1
11582
+ 1
11583
+ 1
11584
+ 1
11585
+ 1
11586
+ 1
11587
+ 1
11588
+ 1
11589
+ 1
11590
+ 1
11591
+ 1
11592
+ 1
11593
+ 1
11594
+ 1
11595
+ 1
11596
+ 1
11597
+ 1
11598
+ 1
11599
+ 1
11600
+ 1
11601
+ 1
11602
+ 1
11603
+ 1
11604
+ 1
11605
+ 1
11606
+ 1
11607
+ 1
11608
+ 2
11609
+ 1
11610
+ 1
11611
+ 2
11612
+ 1
11613
+ 1
11614
+ 1
11615
+ 1
11616
+ 1
11617
+ 1
11618
+ 1
11619
+ 1
11620
+ 1
11621
+ 1
11622
+ 1
11623
+ 1
11624
+ 1
11625
+ 1
11626
+ 1
11627
+ 1
11628
+ 1
11629
+ 1
11630
+ 1
11631
+ 1
11632
+ 1
11633
+ 1
11634
+ 1
11635
+ 1
11636
+ 1
11637
+ 1
11638
+ 1
11639
+ 1
11640
+ 1
11641
+ 1
11642
+ 1
11643
+ 1
11644
+ 1
11645
+ 1
11646
+ 1
11647
+ 1
11648
+ 1
11649
+ 1
11650
+ 1
11651
+ 1
11652
+ 1
11653
+ 1
11654
+ 1
11655
+ 1
11656
+ 1
11657
+ 1
11658
+ 1
11659
+ 1
11660
+ 1
11661
+ 1
11662
+ 1
11663
+ 1
11664
+ 1
11665
+ 1
11666
+ 1
11667
+ 1
11668
+ 1
11669
+ 1
11670
+ 1
11671
+ 1
11672
+ 1
11673
+ 1
11674
+ 1
11675
+ 1
11676
+ 1
11677
+ 1
11678
+ 1
11679
+ 1
11680
+ 1
11681
+ 1
11682
+ 1
11683
+ 1
11684
+ 1
11685
+ 1
11686
+ 1
11687
+ 1
11688
+ 1
11689
+ 1
11690
+ 1
11691
+ 1
11692
+ 1
11693
+ 1
11694
+ 1
11695
+ 1
11696
+ 1
11697
+ 1
11698
+ 1
11699
+ 1
11700
+ 1
11701
+ 1
11702
+ 1
11703
+ 1
11704
+ 1
11705
+ 1
11706
+ 1
11707
+ 1
11708
+ 1
11709
+ 1
11710
+ 1
11711
+ 1
11712
+ 1
11713
+ 1
11714
+ 1
11715
+ 1
11716
+ 1
11717
+ 1
11718
+ 1
11719
+ 1
11720
+ 1
11721
+ 1
11722
+ 1
11723
+ 1
11724
+ 1
11725
+ 1
11726
+ 1
11727
+ 1
11728
+ 1
11729
+ 1
11730
+ 1
11731
+ 1
11732
+ 1
11733
+ 1
11734
+ 1
11735
+ 1
11736
+ 1
11737
+ 1
11738
+ 1
11739
+ 1
11740
+ 1
11741
+ 1
11742
+ 1
11743
+ 1
11744
+ 1
11745
+ 1
11746
+ 1
11747
+ 1
11748
+ 1
11749
+ 1
11750
+ 1
11751
+ 1
11752
+ 1
11753
+ 1
11754
+ 1
11755
+ 1
11756
+ 1
11757
+ 1
11758
+ 1
11759
+ 1
11760
+ 1
11761
+ 1
11762
+ 1
11763
+ 1
11764
+ 1
11765
+ 1
11766
+ 1
11767
+ 1
11768
+ 1
11769
+ 1
11770
+ 1
11771
+ 1
11772
+ 1
11773
+ 1
11774
+ 1
11775
+ 1
11776
+ 1
11777
+ 1
11778
+ 1
11779
+ 1
11780
+ 1
11781
+ 1
11782
+ 1
11783
+ 1
11784
+ 1
11785
+ 1
11786
+ 1
11787
+ 1
11788
+ 1
11789
+ 1
11790
+ 1
11791
+ 1
11792
+ 1
11793
+ 1
11794
+ 1
11795
+ 1
11796
+ 1
11797
+ 1
11798
+ 1
11799
+ 1
11800
+ 1
11801
+ 1
11802
+ 1
11803
+ 1
11804
+ 1
11805
+ 1
11806
+ 1
11807
+ 1
11808
+ 1
11809
+ 1
11810
+ 1
11811
+ 1
11812
+ 1
11813
+ 1
11814
+ 1
11815
+ 1
11816
+ 1
11817
+ 1
11818
+ 1
11819
+ 1
11820
+ 1
11821
+ 1
11822
+ 1
11823
+ 1
11824
+ 1
11825
+ 1
11826
+ 1
11827
+ 1
11828
+ 1
11829
+ 1
11830
+ 1
11831
+ 1
11832
+ 1
11833
+ 1
11834
+ 1
11835
+ 1
11836
+ 1
11837
+ 1
11838
+ 1
11839
+ 1
11840
+ 1
11841
+ 1
11842
+ 1
11843
+ 1
11844
+ 1
11845
+ 1
11846
+ 1
11847
+ 1
11848
+ 1
11849
+ 1
11850
+ 1
11851
+ 1
11852
+ 1
11853
+ 1
11854
+ 1
11855
+ 1
11856
+ 1
11857
+ 1
11858
+ 1
11859
+ 1
11860
+ 1
11861
+ 1
11862
+ 1
11863
+ 1
11864
+ 1
11865
+ 2
11866
+ 1
11867
+ 1
11868
+ 1
11869
+ 1
11870
+ 1
11871
+ 1
11872
+ 1
11873
+ 1
11874
+ 1
11875
+ 1
11876
+ 1
11877
+ 1
11878
+ 1
11879
+ 1
11880
+ 1
11881
+ 1
11882
+ 1
11883
+ 1
11884
+ 1
11885
+ 1
11886
+ 1
11887
+ 1
11888
+ 1
11889
+ 1
11890
+ 1
11891
+ 1
11892
+ 1
11893
+ 1
11894
+ 1
11895
+ 1
11896
+ 1
11897
+ 1
11898
+ 1
11899
+ 1
11900
+ 1
11901
+ 1
11902
+ 1
11903
+ 1
11904
+ 1
11905
+ 1
11906
+ 1
11907
+ 1
11908
+ 1
11909
+ 1
11910
+ 1
11911
+ 1
11912
+ 1
11913
+ 1
11914
+ 1
11915
+ 1
11916
+ 1
11917
+ 1
11918
+ 1
11919
+ 1
11920
+ 1
11921
+ 1
11922
+ 1
11923
+ 1
11924
+ 1
11925
+ 1
11926
+ 1
11927
+ 1
11928
+ 1
11929
+ 1
11930
+ 1
11931
+ 1
11932
+ 1
11933
+ 1
11934
+ 1
11935
+ 1
11936
+ 1
11937
+ 1
11938
+ 1
11939
+ 1
11940
+ 1
11941
+ 1
11942
+ 1
11943
+ 1
11944
+ 1
11945
+ 1
11946
+ 1
11947
+ 1
11948
+ 1
11949
+ 1
11950
+ 1
11951
+ 1
11952
+ 1
11953
+ 1
11954
+ 1
11955
+ 1
11956
+ 1
11957
+ 1
11958
+ 1
11959
+ 1
11960
+ 1
11961
+ 1
11962
+ 1
11963
+ 1
11964
+ 1
11965
+ 1
11966
+ 1
11967
+ 1
11968
+ 1
11969
+ 1
11970
+ 1
11971
+ 1
11972
+ 1
11973
+ 1
11974
+ 1
11975
+ 1
11976
+ 1
11977
+ 1
11978
+ 1
11979
+ 1
11980
+ 1
11981
+ 1
11982
+ 1
11983
+ 1
11984
+ 1
11985
+ 1
11986
+ 1
11987
+ 1
11988
+ 1
11989
+ 1
11990
+ 1
11991
+ 1
11992
+ 1
11993
+ 1
11994
+ 1
11995
+ 1
11996
+ 1
11997
+ 1
11998
+ 1
11999
+ 1
12000
+ 1
12001
+ 1
12002
+ 1
12003
+ 1
12004
+ 1
12005
+ 1
12006
+ 1
12007
+ 1
12008
+ 1
12009
+ 1
12010
+ 1
12011
+ 1
12012
+ 1
12013
+ 1
12014
+ 1
12015
+ 1
12016
+ 1
12017
+ 1
12018
+ 1
12019
+ 1
12020
+ 1
12021
+ 1
12022
+ 1
12023
+ 1
12024
+ 1
12025
+ 1
12026
+ 1
12027
+ 1
12028
+ 1
12029
+ 1
12030
+ 1
12031
+ 1
12032
+ 1
12033
+ 1
12034
+ 1
12035
+ 1
12036
+ 1
12037
+ 1
12038
+ 1
12039
+ 1
12040
+ 1
12041
+ 1
12042
+ 1
12043
+ 1
12044
+ 1
12045
+ 1
12046
+ 1
12047
+ 1
12048
+ 1
12049
+ 1
12050
+ 1
12051
+ 1
12052
+ 1
12053
+ 1
12054
+ 1
12055
+ 1
12056
+ 1
12057
+ 1
12058
+ 1
12059
+ 1
12060
+ 1
12061
+ 1
12062
+ 1
12063
+ 1
12064
+ 1
12065
+ 1
12066
+ 1
12067
+ 1
12068
+ 1
12069
+ 1
12070
+ 1
12071
+ 1
12072
+ 1
12073
+ 1
12074
+ 1
12075
+ 1
12076
+ 1
12077
+ 1
12078
+ 1
12079
+ 1
12080
+ 1
12081
+ 1
12082
+ 1
12083
+ 1
12084
+ 1
12085
+ 1
12086
+ 1
12087
+ 1
12088
+ 1
12089
+ 1
12090
+ 1
12091
+ 1
12092
+ 1
12093
+ 1
12094
+ 1
12095
+ 1
12096
+ 1
12097
+ 1
12098
+ 1
12099
+ 1
12100
+ 1
12101
+ 1
12102
+ 1
12103
+ 1
12104
+ 1
12105
+ 1
12106
+ 1
12107
+ 1
12108
+ 1
12109
+ 1
12110
+ 1
12111
+ 1
12112
+ 1
12113
+ 1
12114
+ 1
12115
+ 1
12116
+ 1
12117
+ 1
12118
+ 1
12119
+ 1
12120
+ 1
12121
+ 1
12122
+ 1
12123
+ 1
12124
+ 1
12125
+ 1
12126
+ 1
12127
+ 1
12128
+ 1
12129
+ 1
12130
+ 1
12131
+ 1
12132
+ 1
12133
+ 1
12134
+ 1
12135
+ 1
12136
+ 1
12137
+ 1
12138
+ 1
12139
+ 1
12140
+ 1
12141
+ 1
12142
+ 1
12143
+ 1
12144
+ 1
12145
+ 1
12146
+ 1
12147
+ 1
12148
+ 1
12149
+ 1
12150
+ 1
12151
+ 1
12152
+ 1
12153
+ 1
12154
+ 1
12155
+ 1
12156
+ 1
12157
+ 1
12158
+ 1
12159
+ 1
12160
+ 1
12161
+ 1
12162
+ 1
12163
+ 1
12164
+ 1
12165
+ 1
12166
+ 1
12167
+ 1
12168
+ 1
12169
+ 1
12170
+ 1
12171
+ 1
12172
+ 1
12173
+ 1
12174
+ 1
12175
+ 1
12176
+ 1
12177
+ 1
12178
+ 1
12179
+ 1
12180
+ 1
12181
+ 1
12182
+ 1
12183
+ 1
12184
+ 1
12185
+ 1
12186
+ 1
12187
+ 1
12188
+ 1
12189
+ 1
12190
+ 1
12191
+ 1
12192
+ 1
12193
+ 1
12194
+ 1
12195
+ 1
12196
+ 1
12197
+ 1
12198
+ 1
12199
+ 1
12200
+ 1
12201
+ 1
12202
+ 1
12203
+ 1
12204
+ 1
12205
+ 1
12206
+ 1
12207
+ 1
12208
+ 1
12209
+ 1
12210
+ 1
12211
+ 1
12212
+ 1
12213
+ 1
12214
+ 1
12215
+ 1
12216
+ 1
12217
+ 1
12218
+ 1
12219
+ 1
12220
+ 1
12221
+ 1
12222
+ 1
12223
+ 1
12224
+ 1
12225
+ 1
12226
+ 1
12227
+ 1
12228
+ 1
12229
+ 1
12230
+ 1
12231
+ 1
12232
+ 1
12233
+ 1
12234
+ 1
12235
+ 1
12236
+ 1
12237
+ 1
12238
+ 1
12239
+ 1
12240
+ 1
12241
+ 1
12242
+ 1
12243
+ 1
12244
+ 1
12245
+ 1
12246
+ 1
12247
+ 1
12248
+ 1
12249
+ 1
12250
+ 1
12251
+ 1
12252
+ 1
12253
+ 1
12254
+ 1
12255
+ 1
12256
+ 1
12257
+ 1
12258
+ 1
12259
+ 1
12260
+ 1
12261
+ 1
12262
+ 1
12263
+ 1
12264
+ 1
12265
+ 1
12266
+ 1
12267
+ 1
12268
+ 1
12269
+ 1
12270
+ 1
12271
+ 1
12272
+ 1
12273
+ 1
12274
+ 1
12275
+ 1
12276
+ 1
12277
+ 1
12278
+ 1
12279
+ 1
12280
+ 1
12281
+ 1
12282
+ 1
12283
+ 1
12284
+ 1
12285
+ 1
12286
+ 1
12287
+ 1
12288
+ 1
12289
+ 1
12290
+ 1
12291
+ 1
12292
+ 1
12293
+ 1
12294
+ 1
12295
+ 1
12296
+ 1
12297
+ 1
12298
+ 1
12299
+ 1
12300
+ 1
12301
+ 1
12302
+ 1
12303
+ 1
12304
+ 1
12305
+ 1
12306
+ 1
12307
+ 1
12308
+ 1
12309
+ 1
12310
+ 1
12311
+ 1
12312
+ 1
12313
+ 1
12314
+ 1
12315
+ 1
12316
+ 1
12317
+ 1
12318
+ 1
12319
+ 1
12320
+ 1
12321
+ 1
12322
+ 1
12323
+ 1
12324
+ 1
12325
+ 1
12326
+ 1
12327
+ 1
12328
+ 1
12329
+ 1
12330
+ 1
12331
+ 1
12332
+ 1
12333
+ 1
12334
+ 1
12335
+ 1
12336
+ 1
12337
+ 1
12338
+ 1
12339
+ 1
12340
+ 1
12341
+ 1
12342
+ 1
12343
+ 1
12344
+ 1
12345
+ 1
12346
+ 1
12347
+ 1
12348
+ 1
12349
+ 1
12350
+ 1
12351
+ 1
12352
+ 1
12353
+ 1
12354
+ 1
12355
+ 1
12356
+ 1
12357
+ 1
12358
+ 1
12359
+ 1
12360
+ 1
12361
+ 1
12362
+ 1
12363
+ 1
12364
+ 1
12365
+ 1
12366
+ 1
12367
+ 1
12368
+ 1
12369
+ 1
12370
+ 1
12371
+ 1
12372
+ 1
12373
+ 1
12374
+ 1
12375
+ 1
12376
+ 1
12377
+ 1
12378
+ 1
12379
+ 1
12380
+ 1
12381
+ 1
12382
+ 1
12383
+ 1
12384
+ 1
12385
+ 1
12386
+ 1
12387
+ 1
12388
+ 1
12389
+ 1
12390
+ 1
12391
+ 2
12392
+ 1
12393
+ 1
12394
+ 2
12395
+ 1
12396
+ 1
12397
+ 2
12398
+ 1
12399
+ 1
12400
+ 2
12401
+ 1
12402
+ 1
12403
+ 1
12404
+ 1
12405
+ 1
12406
+ 1
12407
+ 1
12408
+ 1
12409
+ 1
12410
+ 1
12411
+ 1
12412
+ 1
12413
+ 1
12414
+ 1
12415
+ 1
12416
+ 1
12417
+ 1
12418
+ 1
12419
+ 1
12420
+ 1
12421
+ 1
12422
+ 1
12423
+ 1
12424
+ 1
12425
+ 1
12426
+ 1
12427
+ 1
12428
+ 1
12429
+ 1
12430
+ 1
12431
+ 1
12432
+ 1
12433
+ 1
12434
+ 1
12435
+ 1
12436
+ 1
12437
+ 1
12438
+ 1
12439
+ 1
12440
+ 1
12441
+ 1
12442
+ 1
12443
+ 1
12444
+ 1
12445
+ 1
12446
+ 1
12447
+ 1
12448
+ 1
12449
+ 1
12450
+ 1
12451
+ 1
12452
+ 1
12453
+ 1
12454
+ 1
12455
+ 1
12456
+ 1
12457
+ 1
12458
+ 1
12459
+ 1
12460
+ 1
12461
+ 1
12462
+ 1
12463
+ 1
12464
+ 1
12465
+ 1
12466
+ 1
12467
+ 1
12468
+ 1
12469
+ 1
12470
+ 1
12471
+ 1
12472
+ 1
12473
+ 1
12474
+ 1
12475
+ 1
12476
+ 1
12477
+ 1
12478
+ 1
12479
+ 1
12480
+ 1
12481
+ 1
12482
+ 1
12483
+ 1
12484
+ 1
12485
+ 1
12486
+ 1
12487
+ 1
12488
+ 1
12489
+ 1
12490
+ 1
12491
+ 1
12492
+ 1
12493
+ 1
12494
+ 1
12495
+ 1
12496
+ 1
12497
+ 1
12498
+ 1
12499
+ 1
12500
+ 1
12501
+ 1
12502
+ 1
12503
+ 1
12504
+ 1
12505
+ 1
12506
+ 1
12507
+ 1
12508
+ 1
12509
+ 1
12510
+ 1
12511
+ 1
12512
+ 1
12513
+ 1
12514
+ 1
12515
+ 1
12516
+ 1
12517
+ 1
12518
+ 1
12519
+ 1
12520
+ 1
12521
+ 1
12522
+ 1
12523
+ 1
12524
+ 1
12525
+ 1
12526
+ 1
12527
+ 1
12528
+ 1
12529
+ 1
12530
+ 1
12531
+ 1
12532
+ 1
12533
+ 1
12534
+ 1
12535
+ 1
12536
+ 1
12537
+ 1
12538
+ 1
12539
+ 1
12540
+ 1
12541
+ 1
12542
+ 1
12543
+ 1
12544
+ 1
12545
+ 1
12546
+ 1
12547
+ 1
12548
+ 1
12549
+ 1
12550
+ 1
12551
+ 1
12552
+ 1
12553
+ 1
12554
+ 1
12555
+ 1
12556
+ 1
12557
+ 1
12558
+ 1
12559
+ 1
12560
+ 1
12561
+ 1
12562
+ 1
12563
+ 1
12564
+ 1
12565
+ 1
12566
+ 1
12567
+ 1
12568
+ 1
12569
+ 1
12570
+ 1
12571
+ 1
12572
+ 1
12573
+ 1
12574
+ 1
12575
+ 1
12576
+ 1
12577
+ 1
12578
+ 1
12579
+ 1
12580
+ 1
12581
+ 1
12582
+ 1
12583
+ 1
12584
+ 1
12585
+ 1
12586
+ 1
12587
+ 1
12588
+ 1
12589
+ 1
12590
+ 1
12591
+ 1
12592
+ 1
12593
+ 1
12594
+ 1
12595
+ 1
12596
+ 1
12597
+ 1
12598
+ 1
12599
+ 1
12600
+ 1
12601
+ 1
12602
+ 1
12603
+ 1
12604
+ 1
12605
+ 1
12606
+ 1
12607
+ 1
12608
+ 1
12609
+ 1
12610
+ 1
12611
+ 1
12612
+ 1
12613
+ 1
12614
+ 1
12615
+ 1
12616
+ 1
12617
+ 1
12618
+ 1
12619
+ 1
12620
+ 1
12621
+ 1
12622
+ 1
12623
+ 1
12624
+ 1
12625
+ 1
12626
+ 1
12627
+ 1
12628
+ 1
12629
+ 1
12630
+ 1
12631
+ 1
12632
+ 1
12633
+ 1
12634
+ 1
12635
+ 1
12636
+ 1
12637
+ 1
12638
+ 1
12639
+ 1
12640
+ 1
12641
+ 1
12642
+ 1
12643
+ 1
12644
+ 1
12645
+ 1
12646
+ 1
12647
+ 1
12648
+ 1
12649
+ 1
12650
+ 1
12651
+ 1
12652
+ 1
12653
+ 1
12654
+ 1
12655
+ 1
12656
+ 1
12657
+ 1
12658
+ 1
12659
+ 1
12660
+ 1
12661
+ 1
12662
+ 1
12663
+ 1
12664
+ 1
12665
+ 1
12666
+ 1
12667
+ 1
12668
+ 1
12669
+ 1
12670
+ 1
12671
+ 1
12672
+ 2
12673
+ 1
12674
+ 1
12675
+ 1
12676
+ 1
12677
+ 1
12678
+ 1
12679
+ 1
12680
+ 1
12681
+ 1
12682
+ 1
12683
+ 1
12684
+ 1
12685
+ 1
12686
+ 1
12687
+ 1
12688
+ 1
12689
+ 1
12690
+ 1
12691
+ 1
12692
+ 1
12693
+ 1
12694
+ 1
12695
+ 1
12696
+ 1
12697
+ 1
12698
+ 1
12699
+ 1
12700
+ 1
12701
+ 1
12702
+ 1
12703
+ 1
12704
+ 1
12705
+ 1
12706
+ 1
12707
+ 1
12708
+ 1
12709
+ 1
12710
+ 1
12711
+ 1
12712
+ 1
12713
+ 1
12714
+ 1
12715
+ 1
12716
+ 1
12717
+ 1
12718
+ 1
12719
+ 1
12720
+ 1
12721
+ 1
12722
+ 1
12723
+ 1
12724
+ 1
12725
+ 1
12726
+ 1
12727
+ 1
12728
+ 1
12729
+ 1
12730
+ 1
12731
+ 1
12732
+ 1
12733
+ 1
12734
+ 1
12735
+ 1
12736
+ 1
12737
+ 1
12738
+ 1
12739
+ 1
12740
+ 1
12741
+ 1
12742
+ 1
12743
+ 1
12744
+ 1
12745
+ 1
12746
+ 1
12747
+ 1
12748
+ 1
12749
+ 1
12750
+ 1
12751
+ 1
12752
+ 1
12753
+ 1
12754
+ 1
12755
+ 1
12756
+ 1
12757
+ 1
12758
+ 1
12759
+ 1
12760
+ 1
12761
+ 1
12762
+ 1
12763
+ 1
12764
+ 1
12765
+ 1
12766
+ 1
12767
+ 1
12768
+ 1
12769
+ 1
12770
+ 1
12771
+ 1
12772
+ 1
12773
+ 1
12774
+ 1
12775
+ 1
12776
+ 1
12777
+ 1
12778
+ 1
12779
+ 1
12780
+ 1
12781
+ 1
12782
+ 1
12783
+ 1
12784
+ 1
12785
+ 1
12786
+ 1
12787
+ 1
12788
+ 1
12789
+ 1
12790
+ 1
12791
+ 1
12792
+ 1
12793
+ 1
12794
+ 1
12795
+ 1
12796
+ 1
12797
+ 1
12798
+ 1
12799
+ 1
12800
+ 1
12801
+ 1
12802
+ 1
12803
+ 1
12804
+ 1
12805
+ 1
12806
+ 1
12807
+ 1
12808
+ 1
12809
+ 1
12810
+ 1
12811
+ 1
12812
+ 1
12813
+ 1
12814
+ 1
12815
+ 1
12816
+ 1
12817
+ 1
12818
+ 1
12819
+ 1
12820
+ 1
12821
+ 1
12822
+ 1
12823
+ 1
12824
+ 1
12825
+ 1
12826
+ 1
12827
+ 1
12828
+ 1
12829
+ 1
12830
+ 1
12831
+ 1
12832
+ 1
12833
+ 1
12834
+ 1
12835
+ 1
12836
+ 1
12837
+ 1
12838
+ 1
12839
+ 1
12840
+ 1
12841
+ 1
12842
+ 1
12843
+ 1
12844
+ 1
12845
+ 1
12846
+ 1
12847
+ 1
12848
+ 1
12849
+ 1
12850
+ 1
12851
+ 1
12852
+ 1
12853
+ 1
12854
+ 1
12855
+ 1
12856
+ 1
12857
+ 1
12858
+ 1
12859
+ 1
12860
+ 1
12861
+ 1
12862
+ 1
12863
+ 1
12864
+ 1
12865
+ 1
12866
+ 1
12867
+ 1
12868
+ 1
12869
+ 1
12870
+ 1
12871
+ 1
12872
+ 1
12873
+ 1
12874
+ 1
12875
+ 1
12876
+ 1
12877
+ 1
12878
+ 1
12879
+ 1
12880
+ 1
12881
+ 1
12882
+ 1
12883
+ 1
12884
+ 1
12885
+ 1
12886
+ 1
12887
+ 1
12888
+ 1
12889
+ 1
12890
+ 1
12891
+ 1
12892
+ 1
12893
+ 1
12894
+ 1
12895
+ 1
12896
+ 1
12897
+ 1
12898
+ 1
12899
+ 1
12900
+ 1
12901
+ 1
12902
+ 1
12903
+ 1
12904
+ 1
12905
+ 1
12906
+ 1
12907
+ 1
12908
+ 1
12909
+ 1
12910
+ 1
12911
+ 1
12912
+ 1
12913
+ 1
12914
+ 1
12915
+ 1
12916
+ 1
12917
+ 1
12918
+ 1
12919
+ 1
12920
+ 1
12921
+ 1
12922
+ 2
12923
+ 1
12924
+ 1
12925
+ 1
12926
+ 1
12927
+ 1
12928
+ 1
12929
+ 1
12930
+ 1
12931
+ 2
12932
+ 1
12933
+ 1
12934
+ 1
12935
+ 1
12936
+ 1
12937
+ 1
12938
+ 1
12939
+ 1
12940
+ 1
12941
+ 1
12942
+ 1
12943
+ 1
12944
+ 1
12945
+ 1
12946
+ 1
12947
+ 1
12948
+ 1
12949
+ 1
12950
+ 1
12951
+ 1
12952
+ 1
12953
+ 1
12954
+ 1
12955
+ 1
12956
+ 1
12957
+ 1
12958
+ 1
12959
+ 1
12960
+ 1
12961
+ 1
12962
+ 1
12963
+ 1
12964
+ 1
12965
+ 1
12966
+ 1
12967
+ 1
12968
+ 1
12969
+ 1
12970
+ 1
12971
+ 1
12972
+ 1
12973
+ 1
12974
+ 1
12975
+ 1
12976
+ 1
12977
+ 1
12978
+ 1
12979
+ 1
12980
+ 1
12981
+ 1
12982
+ 1
12983
+ 1
12984
+ 1
12985
+ 1
12986
+ 1
12987
+ 1
12988
+ 1
12989
+ 1
12990
+ 1
12991
+ 1
12992
+ 1
12993
+ 1
12994
+ 1
12995
+ 1
12996
+ 1
12997
+ 1
12998
+ 1
12999
+ 1
13000
+ 1
13001
+ 1
13002
+ 1
13003
+ 1
13004
+ 1
13005
+ 1
13006
+ 1
13007
+ 1
13008
+ 1
13009
+ 1
13010
+ 1
13011
+ 1
13012
+ 1
13013
+ 1
13014
+ 1
13015
+ 1
13016
+ 1
13017
+ 1
13018
+ 1
13019
+ 1
13020
+ 1
13021
+ 1
13022
+ 1
13023
+ 1
13024
+ 1
13025
+ 1
13026
+ 1
13027
+ 1
13028
+ 1
13029
+ 1
13030
+ 1
13031
+ 1
13032
+ 1
13033
+ 1
13034
+ 1
13035
+ 1
13036
+ 1
13037
+ 1
13038
+ 1
13039
+ 1
13040
+ 1
13041
+ 1
13042
+ 1
13043
+ 1
13044
+ 1
13045
+ 1
13046
+ 1
13047
+ 1
13048
+ 1
13049
+ 1
13050
+ 1
13051
+ 1
13052
+ 1
13053
+ 1
13054
+ 1
13055
+ 1
13056
+ 1
13057
+ 1
13058
+ 1
13059
+ 1
13060
+ 1
13061
+ 1
13062
+ 1
13063
+ 1
13064
+ 1
13065
+ 1
13066
+ 1
13067
+ 1
13068
+ 1
13069
+ 1
13070
+ 1
13071
+ 1
13072
+ 1
13073
+ 1
13074
+ 1
13075
+ 1
13076
+ 1
13077
+ 1
13078
+ 1
13079
+ 1
13080
+ 1
13081
+ 1
13082
+ 1
13083
+ 1
13084
+ 1
13085
+ 1
13086
+ 1
13087
+ 1
13088
+ 1
13089
+ 1
13090
+ 1
13091
+ 1
13092
+ 1
13093
+ 1
13094
+ 1
13095
+ 1
13096
+ 1
13097
+ 1
13098
+ 1
13099
+ 1
13100
+ 1
13101
+ 1
13102
+ 1
13103
+ 1
13104
+ 1
13105
+ 1
13106
+ 1
13107
+ 1
13108
+ 1
13109
+ 1
13110
+ 1
13111
+ 1
13112
+ 1
13113
+ 1
13114
+ 1
13115
+ 1
13116
+ 1
13117
+ 1
13118
+ 1
13119
+ 1
13120
+ 1
13121
+ 1
13122
+ 1
13123
+ 1
13124
+ 1
13125
+ 1
13126
+ 1
13127
+ 1
13128
+ 1
13129
+ 1
13130
+ 1
13131
+ 1
13132
+ 1
13133
+ 1
13134
+ 1
13135
+ 1
13136
+ 1
13137
+ 1
13138
+ 1
13139
+ 1
13140
+ 1
13141
+ 1
13142
+ 1
13143
+ 1
13144
+ 1
13145
+ 1
13146
+ 1
13147
+ 1
13148
+ 1
13149
+ 1
13150
+ 1
13151
+ 1
13152
+ 1
13153
+ 1
13154
+ 1
13155
+ 1
13156
+ 1
13157
+ 1
13158
+ 1
13159
+ 1
13160
+ 1
13161
+ 1
13162
+ 1
13163
+ 1
13164
+ 1
13165
+ 1
13166
+ 1
13167
+ 1
13168
+ 1
13169
+ 1
13170
+ 1
13171
+ 1
13172
+ 1
13173
+ 1
13174
+ 1
13175
+ 1
13176
+ 1
13177
+ 1
13178
+ 1
13179
+ 2
13180
+ 1
13181
+ 1
13182
+ 1
13183
+ 1
13184
+ 1
13185
+ 1
13186
+ 1
13187
+ 1
13188
+ 1
13189
+ 1
13190
+ 1
13191
+ 1
13192
+ 1
13193
+ 1
13194
+ 1
13195
+ 1
13196
+ 1
13197
+ 1
13198
+ 1
13199
+ 1
13200
+ 1
13201
+ 1
13202
+ 1
13203
+ 1
13204
+ 1
13205
+ 1
13206
+ 1
13207
+ 1
13208
+ 1
13209
+ 1
13210
+ 1
13211
+ 1
13212
+ 1
13213
+ 1
13214
+ 1
13215
+ 1
13216
+ 1
13217
+ 1
13218
+ 1
13219
+ 1
13220
+ 1
13221
+ 1
13222
+ 1
13223
+ 1
13224
+ 1
13225
+ 1
13226
+ 1
13227
+ 1
13228
+ 1
13229
+ 1
13230
+ 1
13231
+ 1
13232
+ 1
13233
+ 1
13234
+ 1
13235
+ 1
13236
+ 1
13237
+ 1
13238
+ 1
13239
+ 1
13240
+ 1
13241
+ 1
13242
+ 1
13243
+ 1
13244
+ 1
13245
+ 1
13246
+ 1
13247
+ 1
13248
+ 1
13249
+ 1
13250
+ 1
13251
+ 1
13252
+ 1
13253
+ 1
13254
+ 1
13255
+ 1
13256
+ 1
13257
+ 1
13258
+ 1
13259
+ 1
13260
+ 1
13261
+ 1
13262
+ 1
13263
+ 1
13264
+ 1
13265
+ 1
13266
+ 1
13267
+ 1
13268
+ 1
13269
+ 1
13270
+ 1
13271
+ 1
13272
+ 1
13273
+ 1
13274
+ 1
13275
+ 1
13276
+ 1
13277
+ 1
13278
+ 1
13279
+ 1
13280
+ 1
13281
+ 1
13282
+ 1
13283
+ 1
13284
+ 1
13285
+ 1
13286
+ 1
13287
+ 1
13288
+ 1
13289
+ 1
13290
+ 1
13291
+ 1
13292
+ 1
13293
+ 1
13294
+ 1
13295
+ 1
13296
+ 1
13297
+ 1
13298
+ 1
13299
+ 1
13300
+ 1
13301
+ 1
13302
+ 1
13303
+ 1
13304
+ 1
13305
+ 1
13306
+ 1
13307
+ 1
13308
+ 1
13309
+ 1
13310
+ 1
13311
+ 1
13312
+ 1
13313
+ 1
13314
+ 1
13315
+ 1
13316
+ 1
13317
+ 1
13318
+ 1
13319
+ 1
13320
+ 1
13321
+ 1
13322
+ 1
13323
+ 1
13324
+ 1
13325
+ 1
13326
+ 1
13327
+ 1
13328
+ 1
13329
+ 1
13330
+ 1
13331
+ 1
13332
+ 1
13333
+ 1
13334
+ 1
13335
+ 1
13336
+ 1
13337
+ 1
13338
+ 1
13339
+ 1
13340
+ 1
13341
+ 1
13342
+ 1
13343
+ 1
13344
+ 1
13345
+ 1
13346
+ 1
13347
+ 1
13348
+ 1
13349
+ 1
13350
+ 1
13351
+ 1
13352
+ 1
13353
+ 1
13354
+ 1
13355
+ 1
13356
+ 1
13357
+ 1
13358
+ 1
13359
+ 1
13360
+ 1
13361
+ 1
13362
+ 1
13363
+ 1
13364
+ 1
13365
+ 1
13366
+ 1
13367
+ 1
13368
+ 1
13369
+ 1
13370
+ 1
13371
+ 1
13372
+ 1
13373
+ 1
13374
+ 1
13375
+ 1
13376
+ 1
13377
+ 1
13378
+ 1
13379
+ 1
13380
+ 1
13381
+ 1
13382
+ 1
13383
+ 1
13384
+ 1
13385
+ 1
13386
+ 1
13387
+ 1
13388
+ 1
13389
+ 1
13390
+ 1
13391
+ 1
13392
+ 1
13393
+ 1
13394
+ 1
13395
+ 1
13396
+ 1
13397
+ 1
13398
+ 1
13399
+ 1
13400
+ 1
13401
+ 1
13402
+ 1
13403
+ 1
13404
+ 1
13405
+ 1
13406
+ 1
13407
+ 1
13408
+ 1
13409
+ 1
13410
+ 1
13411
+ 1
13412
+ 1
13413
+ 1
13414
+ 1
13415
+ 1
13416
+ 1
13417
+ 1
13418
+ 1
13419
+ 1
13420
+ 1
13421
+ 1
13422
+ 1
13423
+ 1
13424
+ 1
13425
+ 1
13426
+ 1
13427
+ 1
13428
+ 1
13429
+ 1
13430
+ 1
13431
+ 1
13432
+ 1
13433
+ 1
13434
+ 1
13435
+ 1
13436
+ 1
13437
+ 1
13438
+ 1
13439
+ 1
13440
+ 1
13441
+ 1
13442
+ 1
13443
+ 1
13444
+ 1
13445
+ 1
13446
+ 1
13447
+ 1
13448
+ 1
13449
+ 1
13450
+ 1
13451
+ 1
13452
+ 1
13453
+ 1
13454
+ 2
13455
+ 1
13456
+ 1
13457
+ 1
13458
+ 1
13459
+ 1
13460
+ 1
13461
+ 1
13462
+ 1
13463
+ 1
13464
+ 1
13465
+ 1
13466
+ 1
13467
+ 1
13468
+ 1
13469
+ 1
13470
+ 1
13471
+ 1
13472
+ 1
13473
+ 1
13474
+ 1
13475
+ 1
13476
+ 1
13477
+ 1
13478
+ 1
13479
+ 1
13480
+ 1
13481
+ 1
13482
+ 1
13483
+ 1
13484
+ 1
13485
+ 1
13486
+ 1
13487
+ 1
13488
+ 1
13489
+ 1
13490
+ 1
13491
+ 1
13492
+ 1
13493
+ 1
13494
+ 1
13495
+ 1
13496
+ 1
13497
+ 1
13498
+ 1
13499
+ 1
13500
+ 1
13501
+ 1
13502
+ 1
13503
+ 1
13504
+ 1
13505
+ 1
13506
+ 1
13507
+ 1
13508
+ 1
13509
+ 1
13510
+ 1
13511
+ 1
13512
+ 1
13513
+ 1
13514
+ 1
13515
+ 1
13516
+ 1
13517
+ 1
13518
+ 1
13519
+ 1
13520
+ 1
13521
+ 1
13522
+ 1
13523
+ 1
13524
+ 1
13525
+ 1
13526
+ 1
13527
+ 1
13528
+ 1
13529
+ 1
13530
+ 1
13531
+ 1
13532
+ 1
13533
+ 1
13534
+ 1
13535
+ 1
13536
+ 1
13537
+ 1
13538
+ 1
13539
+ 1
13540
+ 1
13541
+ 1
13542
+ 1
13543
+ 1
13544
+ 1
13545
+ 1
13546
+ 1
13547
+ 1
13548
+ 1
13549
+ 1
13550
+ 1
13551
+ 1
13552
+ 1
13553
+ 1
13554
+ 1
13555
+ 1
13556
+ 1
13557
+ 1
13558
+ 1
13559
+ 1
13560
+ 1
13561
+ 1
13562
+ 1
13563
+ 1
13564
+ 1
13565
+ 1
13566
+ 1
13567
+ 1
13568
+ 1
13569
+ 1
13570
+ 1
13571
+ 1
13572
+ 1
13573
+ 1
13574
+ 1
13575
+ 1
13576
+ 1
13577
+ 1
13578
+ 1
13579
+ 1
13580
+ 1
13581
+ 1
13582
+ 1
13583
+ 1
13584
+ 1
13585
+ 1
13586
+ 1
13587
+ 1
13588
+ 1
13589
+ 1
13590
+ 1
13591
+ 1
13592
+ 1
13593
+ 1
13594
+ 1
13595
+ 1
13596
+ 1
13597
+ 1
13598
+ 1
13599
+ 1
13600
+ 1
13601
+ 1
13602
+ 1
13603
+ 1
13604
+ 1
13605
+ 1
13606
+ 1
13607
+ 1
13608
+ 1
13609
+ 1
13610
+ 1
13611
+ 1
13612
+ 1
13613
+ 1
13614
+ 1
13615
+ 1
13616
+ 1
13617
+ 1
13618
+ 1
13619
+ 1
13620
+ 1
13621
+ 1
13622
+ 1
13623
+ 1
13624
+ 1
13625
+ 1
13626
+ 1
13627
+ 1
13628
+ 1
13629
+ 1
13630
+ 1
13631
+ 1
13632
+ 1
13633
+ 1
13634
+ 1
13635
+ 1
13636
+ 1
13637
+ 1
13638
+ 1
13639
+ 1
13640
+ 1
13641
+ 1
13642
+ 1
13643
+ 1
13644
+ 1
13645
+ 1
13646
+ 1
13647
+ 1
13648
+ 1
13649
+ 1
13650
+ 1
13651
+ 1
13652
+ 1
13653
+ 1
13654
+ 1
13655
+ 1
13656
+ 1
13657
+ 1
13658
+ 1
13659
+ 1
13660
+ 1
13661
+ 1
13662
+ 1
13663
+ 1
13664
+ 1
13665
+ 1
13666
+ 1
13667
+ 1
13668
+ 1
13669
+ 1
13670
+ 1
13671
+ 1
13672
+ 1
13673
+ 1
13674
+ 1
13675
+ 1
13676
+ 1
13677
+ 1
13678
+ 1
13679
+ 1
13680
+ 1
13681
+ 1
13682
+ 1
13683
+ 1
13684
+ 1
13685
+ 1
13686
+ 1
13687
+ 1
13688
+ 1
13689
+ 1
13690
+ 1
13691
+ 1
13692
+ 1
13693
+ 1
13694
+ 1
13695
+ 1
13696
+ 1
13697
+ 1
13698
+ 1
13699
+ 1
13700
+ 1
13701
+ 1
13702
+ 1
13703
+ 1
13704
+ 1
13705
+ 1
13706
+ 1
13707
+ 1
13708
+ 1
13709
+ 1
13710
+ 1
13711
+ 1
13712
+ 1
13713
+ 1
13714
+ 1
13715
+ 1
13716
+ 1
13717
+ 1
13718
+ 2
13719
+ 1
13720
+ 1
13721
+ 1
13722
+ 1
13723
+ 1
13724
+ 1
13725
+ 1
13726
+ 1
13727
+ 1
13728
+ 1
13729
+ 1
13730
+ 1
13731
+ 1
13732
+ 1
13733
+ 1
13734
+ 1
13735
+ 1
13736
+ 1
13737
+ 1
13738
+ 1
13739
+ 1
13740
+ 1
13741
+ 1
13742
+ 1
13743
+ 1
13744
+ 1
13745
+ 1
13746
+ 1
13747
+ 1
13748
+ 1
13749
+ 1
13750
+ 1
13751
+ 1
13752
+ 1
13753
+ 1
13754
+ 1
13755
+ 1
13756
+ 1
13757
+ 1
13758
+ 1
13759
+ 1
13760
+ 1
13761
+ 1
13762
+ 1
13763
+ 1
13764
+ 1
13765
+ 1
13766
+ 1
13767
+ 1
13768
+ 1
13769
+ 1
13770
+ 1
13771
+ 1
13772
+ 1
13773
+ 1
13774
+ 1
13775
+ 1
13776
+ 1
13777
+ 1
13778
+ 1
13779
+ 1
13780
+ 1
13781
+ 1
13782
+ 1
13783
+ 1
13784
+ 1
13785
+ 1
13786
+ 1
13787
+ 1
13788
+ 1
13789
+ 1
13790
+ 1
13791
+ 1
13792
+ 1
13793
+ 1
13794
+ 1
13795
+ 1
13796
+ 1
13797
+ 1
13798
+ 1
13799
+ 1
13800
+ 1
13801
+ 1
13802
+ 1
13803
+ 1
13804
+ 1
13805
+ 1
13806
+ 1
13807
+ 1
13808
+ 1
13809
+ 1
13810
+ 1
13811
+ 1
13812
+ 1
13813
+ 1
13814
+ 1
13815
+ 1
13816
+ 1
13817
+ 1
13818
+ 1
13819
+ 1
13820
+ 1
13821
+ 1
13822
+ 1
13823
+ 1
13824
+ 1
13825
+ 1
13826
+ 1
13827
+ 1
13828
+ 1
13829
+ 1
13830
+ 1
13831
+ 1
13832
+ 1
13833
+ 1
13834
+ 1
13835
+ 1
13836
+ 1
13837
+ 1
13838
+ 1
13839
+ 1
13840
+ 1
13841
+ 1
13842
+ 1
13843
+ 1
13844
+ 1
13845
+ 1
13846
+ 1
13847
+ 1
13848
+ 1
13849
+ 1
13850
+ 1
13851
+ 1
13852
+ 1
13853
+ 1
13854
+ 1
13855
+ 1
13856
+ 1
13857
+ 1
13858
+ 1
13859
+ 1
13860
+ 1
13861
+ 1
13862
+ 1
13863
+ 1
13864
+ 1
13865
+ 1
13866
+ 1
13867
+ 1
13868
+ 1
13869
+ 1
13870
+ 1
13871
+ 1
13872
+ 1
13873
+ 1
13874
+ 1
13875
+ 1
13876
+ 1
13877
+ 1
13878
+ 1
13879
+ 1
13880
+ 1
13881
+ 1
13882
+ 1
13883
+ 1
13884
+ 1
13885
+ 1
13886
+ 1
13887
+ 1
13888
+ 1
13889
+ 1
13890
+ 1
13891
+ 1
13892
+ 1
13893
+ 1
13894
+ 1
13895
+ 1
13896
+ 1
13897
+ 1
13898
+ 1
13899
+ 1
13900
+ 1
13901
+ 1
13902
+ 1
13903
+ 1
13904
+ 1
13905
+ 1
13906
+ 1
13907
+ 1
13908
+ 1
13909
+ 1
13910
+ 1
13911
+ 1
13912
+ 1
13913
+ 1
13914
+ 1
13915
+ 1
13916
+ 1
13917
+ 1
13918
+ 1
13919
+ 1
13920
+ 1
13921
+ 1
13922
+ 1
13923
+ 1
13924
+ 1
13925
+ 1
13926
+ 1
13927
+ 1
13928
+ 1
13929
+ 1
13930
+ 1
13931
+ 1
13932
+ 1
13933
+ 1
13934
+ 1
13935
+ 1
13936
+ 1
13937
+ 1
13938
+ 1
13939
+ 1
13940
+ 1
13941
+ 1
13942
+ 1
13943
+ 1
13944
+ 1
13945
+ 1
13946
+ 1
13947
+ 1
13948
+ 1
13949
+ 1
13950
+ 1
13951
+ 1
13952
+ 1
13953
+ 1
13954
+ 1
13955
+ 1
13956
+ 1
13957
+ 1
13958
+ 1
13959
+ 1
13960
+ 1
13961
+ 1
13962
+ 1
13963
+ 1
13964
+ 1
13965
+ 1
13966
+ 1
13967
+ 1
13968
+ 1
13969
+ 1
13970
+ 1
13971
+ 1
13972
+ 1
13973
+ 1
13974
+ 1
13975
+ 1
13976
+ 1
13977
+ 1
13978
+ 1
13979
+ 1
13980
+ 1
13981
+ 1
13982
+ 1
13983
+ 1
13984
+ 1
13985
+ 1
13986
+ 1
13987
+ 1
13988
+ 1
13989
+ 1
13990
+ 1
13991
+ 1
13992
+ 1
13993
+ 1
13994
+ 1
13995
+ 1
13996
+ 1
13997
+ 1
13998
+ 1
13999
+ 1
14000
+ 1
14001
+ 1
14002
+ 1
14003
+ 1
14004
+ 1
14005
+ 1
14006
+ 1
14007
+ 1
14008
+ 1
14009
+ 1
14010
+ 1
14011
+ 1
14012
+ 1
14013
+ 1
14014
+ 1
14015
+ 1
14016
+ 1
14017
+ 1
14018
+ 1
14019
+ 1
14020
+ 1
14021
+ 1
14022
+ 1
14023
+ 1
14024
+ 1
14025
+ 1
14026
+ 1
14027
+ 1
14028
+ 1
14029
+ 1
14030
+ 1
14031
+ 1
14032
+ 1
14033
+ 1
14034
+ 1
14035
+ 1
14036
+ 1
14037
+ 1
14038
+ 1
14039
+ 1
14040
+ 1
14041
+ 1
14042
+ 1
14043
+ 1
14044
+ 1
14045
+ 1
14046
+ 1
14047
+ 1
14048
+ 1
14049
+ 1
14050
+ 1
14051
+ 1
14052
+ 1
14053
+ 1
14054
+ 1
14055
+ 1
14056
+ 1
14057
+ 1
14058
+ 1
14059
+ 1
14060
+ 1
14061
+ 1
14062
+ 1
14063
+ 1
14064
+ 1
14065
+ 1
14066
+ 1
14067
+ 1
14068
+ 1
14069
+ 1
14070
+ 1
14071
+ 1
14072
+ 1
14073
+ 1
14074
+ 1
14075
+ 1
14076
+ 1
14077
+ 1
14078
+ 1
14079
+ 1
14080
+ 1
14081
+ 1
14082
+ 1
14083
+ 1
14084
+ 1
14085
+ 1
14086
+ 1
14087
+ 1
14088
+ 1
14089
+ 1
14090
+ 1
14091
+ 1
14092
+ 1
14093
+ 1
14094
+ 1
14095
+ 1
14096
+ 1
14097
+ 1
14098
+ 1
14099
+ 1
14100
+ 1
14101
+ 1
14102
+ 1
14103
+ 1
14104
+ 1
14105
+ 1
14106
+ 1
14107
+ 1
14108
+ 1
14109
+ 1
14110
+ 1
14111
+ 1
14112
+ 1
14113
+ 1
14114
+ 1
14115
+ 1
14116
+ 1
14117
+ 1
14118
+ 1
14119
+ 1
14120
+ 1
14121
+ 1
14122
+ 1
14123
+ 1
14124
+ 1
14125
+ 1
14126
+ 1
14127
+ 1
14128
+ 1
14129
+ 1
14130
+ 1
14131
+ 1
14132
+ 1
14133
+ 1
14134
+ 1
14135
+ 1
14136
+ 1
14137
+ 1
14138
+ 1
14139
+ 1
14140
+ 1
14141
+ 1
14142
+ 1
14143
+ 1
14144
+ 1
14145
+ 1
14146
+ 1
14147
+ 1
14148
+ 1
14149
+ 1
14150
+ 1
14151
+ 1
14152
+ 1
14153
+ 1
14154
+ 1
14155
+ 1
14156
+ 1
14157
+ 1
14158
+ 1
14159
+ 1
14160
+ 1
14161
+ 1
14162
+ 1
14163
+ 1
14164
+ 1
14165
+ 1
14166
+ 1
14167
+ 1
14168
+ 1
14169
+ 1
14170
+ 1
14171
+ 1
14172
+ 1
14173
+ 1
14174
+ 1
14175
+ 1
14176
+ 1
14177
+ 1
14178
+ 1
14179
+ 1
14180
+ 1
14181
+ 1
14182
+ 1
14183
+ 1
14184
+ 1
14185
+ 1
14186
+ 1
14187
+ 1
14188
+ 1
14189
+ 1
14190
+ 1
14191
+ 1
14192
+ 1
14193
+ 1
14194
+ 1
14195
+ 1
14196
+ 1
14197
+ 1
14198
+ 1
14199
+ 1
14200
+ 1
14201
+ 1
14202
+ 1
14203
+ 1
14204
+ 1
14205
+ 1
14206
+ 1
14207
+ 1
14208
+ 1
14209
+ 1
14210
+ 1
14211
+ 1
14212
+ 1
14213
+ 1
14214
+ 1
14215
+ 1
14216
+ 1
14217
+ 1
14218
+ 1
14219
+ 1
14220
+ 1
14221
+ 1
14222
+ 1
14223
+ 1
14224
+ 1
14225
+ 1
14226
+ 1
14227
+ 1
14228
+ 1
14229
+ 1
14230
+ 1
14231
+ 1
14232
+ 1
14233
+ 1
14234
+ 1
14235
+ 1
14236
+ 1
14237
+ 1
14238
+ 1
14239
+ 1
14240
+ 1
14241
+ 1
14242
+ 1
14243
+ 1
14244
+ 1
14245
+ 1
14246
+ 1
14247
+ 1
14248
+ 1
14249
+ 1
14250
+ 1
14251
+ 1
14252
+ 1
14253
+ 1
14254
+ 1
14255
+ 1
14256
+ 1
14257
+ 1
14258
+ 1
14259
+ 1
14260
+ 1
14261
+ 1
14262
+ 1
14263
+ 1
14264
+ 1
14265
+ 1
14266
+ 1
14267
+ 1
14268
+ 1
14269
+ 1
14270
+ 1
14271
+ 1
14272
+ 1
14273
+ 1
14274
+ 1
14275
+ 1
14276
+ 1
14277
+ 1
14278
+ 1
14279
+ 1
14280
+ 1
14281
+ 1
14282
+ 1
14283
+ 1
14284
+ 1
14285
+ 1
14286
+ 1
14287
+ 1
14288
+ 1
14289
+ 1
14290
+ 1
14291
+ 1
14292
+ 1
14293
+ 1
14294
+ 1
14295
+ 1
14296
+ 1
14297
+ 1
14298
+ 1
14299
+ 1
14300
+ 1
14301
+ 1
14302
+ 1
14303
+ 1
14304
+ 1
14305
+ 1
14306
+ 1
14307
+ 1
14308
+ 1
14309
+ 1
14310
+ 1
14311
+ 1
14312
+ 1
14313
+ 1
14314
+ 1
14315
+ 1
14316
+ 1
14317
+ 1
14318
+ 1
14319
+ 1
14320
+ 1
14321
+ 1
14322
+ 1
14323
+ 1
14324
+ 1
14325
+ 1
14326
+ 1
14327
+ 1
14328
+ 1
14329
+ 1
14330
+ 1
14331
+ 1
14332
+ 1
14333
+ 1
14334
+ 1
14335
+ 1
14336
+ 1
14337
+ 1
14338
+ 1
14339
+ 1
14340
+ 1
14341
+ 1
14342
+ 1
14343
+ 1
14344
+ 1
14345
+ 1
14346
+ 1
14347
+ 1
14348
+ 1
14349
+ 1
14350
+ 1
14351
+ 1
14352
+ 1
14353
+ 1
14354
+ 1
14355
+ 1
14356
+ 1
14357
+ 1
14358
+ 1
14359
+ 1
14360
+ 1
14361
+ 1
14362
+ 1
14363
+ 1
14364
+ 1
14365
+ 1
14366
+ 1
14367
+ 1
14368
+ 1
14369
+ 1
14370
+ 1
14371
+ 1
14372
+ 1
14373
+ 1
14374
+ 1
14375
+ 1
14376
+ 1
14377
+ 1
14378
+ 1
14379
+ 1
14380
+ 1
14381
+ 1
14382
+ 1
14383
+ 1
14384
+ 1
14385
+ 1
14386
+ 1
14387
+ 1
14388
+ 1
14389
+ 1
14390
+ 1
14391
+ 1
14392
+ 1
14393
+ 1
14394
+ 1
14395
+ 1
14396
+ 1
14397
+ 1
14398
+ 1
14399
+ 1
14400
+ 1
14401
+ 1
14402
+ 1
14403
+ 1
14404
+ 1
14405
+ 1
14406
+ 1
14407
+ 1
14408
+ 1
14409
+ 1
14410
+ 1
14411
+ 1
14412
+ 1
14413
+ 1
14414
+ 1
14415
+ 1
14416
+ 1
14417
+ 1
14418
+ 1
14419
+ 1
14420
+ 1
14421
+ 1
14422
+ 1
14423
+ 1
14424
+ 1
14425
+ 1
14426
+ 1
14427
+ 1
14428
+ 1
14429
+ 1
14430
+ 1
14431
+ 1
14432
+ 1
14433
+ 1
14434
+ 1
14435
+ 1
14436
+ 1
14437
+ 1
14438
+ 1
14439
+ 1
14440
+ 1
14441
+ 1
14442
+ 1
14443
+ 1
14444
+ 1
14445
+ 1
14446
+ 1
14447
+ 1
14448
+ 1
14449
+ 1
14450
+ 1
14451
+ 1
14452
+ 1
14453
+ 1
14454
+ 1
14455
+ 1
14456
+ 1
14457
+ 1
14458
+ 1
14459
+ 1
14460
+ 1
14461
+ 1
14462
+ 1
14463
+ 1
14464
+ 1
14465
+ 1
14466
+ 1
14467
+ 1
14468
+ 1
14469
+ 1
14470
+ 1
14471
+ 1
14472
+ 1
14473
+ 1
14474
+ 1
14475
+ 1
14476
+ 1
14477
+ 1
14478
+ 1
14479
+ 1
14480
+ 1
14481
+ 1
14482
+ 1
14483
+ 1
14484
+ 1
14485
+ 1
14486
+ 1
14487
+ 1
14488
+ 1
14489
+ 1
14490
+ 1
14491
+ 1
14492
+ 1
14493
+ 1
14494
+ 1
14495
+ 1
14496
+ 1
14497
+ 1
14498
+ 1
14499
+ 1
14500
+ 2
14501
+ 1
14502
+ 1
14503
+ 1
14504
+ 1
14505
+ 1
14506
+ 1
14507
+ 1
14508
+ 1
14509
+ 1
14510
+ 1
14511
+ 1
14512
+ 1
14513
+ 1
14514
+ 1
14515
+ 1
14516
+ 1
14517
+ 1
14518
+ 2
14519
+ 1
14520
+ 1
14521
+ 1
14522
+ 1
14523
+ 1
14524
+ 1
14525
+ 1
14526
+ 1
14527
+ 1
14528
+ 1
14529
+ 1
14530
+ 1
14531
+ 1
14532
+ 1
14533
+ 1
14534
+ 1
14535
+ 1
14536
+ 1
14537
+ 1
14538
+ 1
14539
+ 1
14540
+ 1
14541
+ 1
14542
+ 1
14543
+ 1
14544
+ 1
14545
+ 1
14546
+ 1
14547
+ 1
14548
+ 1
14549
+ 1
14550
+ 1
14551
+ 1
14552
+ 1
14553
+ 1
14554
+ 1
14555
+ 1
14556
+ 1
14557
+ 1
14558
+ 1
14559
+ 1
14560
+ 1
14561
+ 1
14562
+ 1
14563
+ 1
14564
+ 1
14565
+ 1
14566
+ 1
14567
+ 1
14568
+ 1
14569
+ 1
14570
+ 1
14571
+ 1
14572
+ 1
14573
+ 1
14574
+ 1
14575
+ 1
14576
+ 1
14577
+ 1
14578
+ 1
14579
+ 1
14580
+ 1
14581
+ 1
14582
+ 1
14583
+ 1
14584
+ 1
14585
+ 1
14586
+ 1
14587
+ 1
14588
+ 1
14589
+ 1
14590
+ 1
14591
+ 1
14592
+ 1
14593
+ 1
14594
+ 1
14595
+ 1
14596
+ 1
14597
+ 1
14598
+ 1
14599
+ 1
14600
+ 1
14601
+ 1
14602
+ 1
14603
+ 1
14604
+ 1
14605
+ 1
14606
+ 1
14607
+ 1
14608
+ 1
14609
+ 1
14610
+ 1
14611
+ 1
14612
+ 1
14613
+ 1
14614
+ 1
14615
+ 1
14616
+ 1
14617
+ 1
14618
+ 1
14619
+ 1
14620
+ 1
14621
+ 1
14622
+ 1
14623
+ 1
14624
+ 1
14625
+ 1
14626
+ 1
14627
+ 1
14628
+ 1
14629
+ 1
14630
+ 1
14631
+ 1
14632
+ 1
14633
+ 1
14634
+ 1
14635
+ 1
14636
+ 1
14637
+ 1
14638
+ 1
14639
+ 1
14640
+ 1
14641
+ 1
14642
+ 1
14643
+ 1
14644
+ 1
14645
+ 1
14646
+ 1
14647
+ 1
14648
+ 1
14649
+ 1
14650
+ 1
14651
+ 1
14652
+ 1
14653
+ 1
14654
+ 1
14655
+ 1
14656
+ 1
14657
+ 1
14658
+ 1
14659
+ 1
14660
+ 1
14661
+ 1
14662
+ 1
14663
+ 1
14664
+ 1
14665
+ 1
14666
+ 1
14667
+ 1
14668
+ 1
14669
+ 1
14670
+ 1
14671
+ 1
14672
+ 1
14673
+ 1
14674
+ 1
14675
+ 1
14676
+ 1
14677
+ 1
14678
+ 1
14679
+ 1
14680
+ 1
14681
+ 1
14682
+ 1
14683
+ 1
14684
+ 1
14685
+ 1
14686
+ 1
14687
+ 1
14688
+ 1
14689
+ 1
14690
+ 1
14691
+ 1
14692
+ 1
14693
+ 1
14694
+ 1
14695
+ 1
14696
+ 1
14697
+ 1
14698
+ 1
14699
+ 1
14700
+ 1
14701
+ 1
14702
+ 1
14703
+ 1
14704
+ 1
14705
+ 1
14706
+ 1
14707
+ 1
14708
+ 1
14709
+ 1
14710
+ 1
14711
+ 1
14712
+ 1
14713
+ 1
14714
+ 1
14715
+ 1
14716
+ 1
14717
+ 1
14718
+ 1
14719
+ 1
14720
+ 1
14721
+ 1
14722
+ 1
14723
+ 1
14724
+ 1
14725
+ 1
14726
+ 1
14727
+ 1
14728
+ 1
14729
+ 1
14730
+ 1
14731
+ 1
14732
+ 1
14733
+ 1
14734
+ 1
14735
+ 1
14736
+ 1
14737
+ 1
14738
+ 1
14739
+ 1
14740
+ 1
14741
+ 1
14742
+ 1
14743
+ 1
14744
+ 1
14745
+ 1
14746
+ 1
14747
+ 1
14748
+ 1
14749
+ 1
14750
+ 1
14751
+ 1
14752
+ 1
14753
+ 1
14754
+ 1
14755
+ 1
14756
+ 1
14757
+ 1
14758
+ 1
14759
+ 1
14760
+ 1
14761
+ 1
14762
+ 1
14763
+ 1
14764
+ 1
14765
+ 1
14766
+ 1
14767
+ 1
14768
+ 1
14769
+ 1
14770
+ 1
14771
+ 1
14772
+ 1
14773
+ 1
14774
+ 1
14775
+ 1
14776
+ 2
14777
+ 1
14778
+ 1
14779
+ 1
14780
+ 1
14781
+ 1
14782
+ 1
14783
+ 1
14784
+ 1
14785
+ 1
14786
+ 1
14787
+ 1
14788
+ 1
14789
+ 1
14790
+ 1
14791
+ 1
14792
+ 1
14793
+ 1
14794
+ 1
14795
+ 1
14796
+ 1
14797
+ 1
14798
+ 1
14799
+ 1
14800
+ 1
14801
+ 1
14802
+ 1
14803
+ 1
14804
+ 1
14805
+ 1
14806
+ 1
14807
+ 1
14808
+ 1
14809
+ 1
14810
+ 1
14811
+ 1
14812
+ 1
14813
+ 1
14814
+ 1
14815
+ 1
14816
+ 1
14817
+ 1
14818
+ 1
14819
+ 1
14820
+ 1
14821
+ 1
14822
+ 1
14823
+ 1
14824
+ 1
14825
+ 1
14826
+ 1
14827
+ 1
14828
+ 1
14829
+ 1
14830
+ 1
14831
+ 1
14832
+ 1
14833
+ 1
14834
+ 1
14835
+ 1
14836
+ 1
14837
+ 1
14838
+ 1
14839
+ 1
14840
+ 1
14841
+ 1
14842
+ 1
14843
+ 1
14844
+ 1
14845
+ 1
14846
+ 1
14847
+ 1
14848
+ 1
14849
+ 1
14850
+ 1
14851
+ 1
14852
+ 1
14853
+ 1
14854
+ 1
14855
+ 1
14856
+ 1
14857
+ 1
14858
+ 1
14859
+ 1
14860
+ 1
14861
+ 1
14862
+ 1
14863
+ 1
14864
+ 1
14865
+ 1
14866
+ 1
14867
+ 1
14868
+ 1
14869
+ 1
14870
+ 1
14871
+ 1
14872
+ 1
14873
+ 1
14874
+ 1
14875
+ 1
14876
+ 1
14877
+ 1
14878
+ 1
14879
+ 1
14880
+ 1
14881
+ 1
14882
+ 1
14883
+ 1
14884
+ 1
14885
+ 1
14886
+ 1
14887
+ 1
14888
+ 1
14889
+ 1
14890
+ 1
14891
+ 1
14892
+ 1
14893
+ 1
14894
+ 1
14895
+ 1
14896
+ 1
14897
+ 1
14898
+ 1
14899
+ 1
14900
+ 1
14901
+ 1
14902
+ 1
14903
+ 1
14904
+ 1
14905
+ 1
14906
+ 1
14907
+ 1
14908
+ 1
14909
+ 1
14910
+ 1
14911
+ 1
14912
+ 1
14913
+ 1
14914
+ 1
14915
+ 1
14916
+ 1
14917
+ 1
14918
+ 1
14919
+ 1
14920
+ 1
14921
+ 1
14922
+ 1
14923
+ 1
14924
+ 1
14925
+ 1
14926
+ 1
14927
+ 1
14928
+ 1
14929
+ 1
14930
+ 1
14931
+ 1
14932
+ 1
14933
+ 1
14934
+ 1
14935
+ 1
14936
+ 1
14937
+ 1
14938
+ 1
14939
+ 1
14940
+ 1
14941
+ 1
14942
+ 1
14943
+ 1
14944
+ 1
14945
+ 1
14946
+ 1
14947
+ 1
14948
+ 1
14949
+ 1
14950
+ 1
14951
+ 1
14952
+ 1
14953
+ 1
14954
+ 1
14955
+ 1
14956
+ 1
14957
+ 1
14958
+ 1
14959
+ 1
14960
+ 1
14961
+ 1
14962
+ 1
14963
+ 1
14964
+ 1
14965
+ 1
14966
+ 1
14967
+ 1
14968
+ 1
14969
+ 1
14970
+ 1
14971
+ 1
14972
+ 1
14973
+ 1
14974
+ 1
14975
+ 1
14976
+ 1
14977
+ 1
14978
+ 1
14979
+ 1
14980
+ 1
14981
+ 1
14982
+ 1
14983
+ 1
14984
+ 1
14985
+ 1
14986
+ 1
14987
+ 1
14988
+ 1
14989
+ 1
14990
+ 1
14991
+ 1
14992
+ 1
14993
+ 1
14994
+ 1
14995
+ 1
14996
+ 1
14997
+ 1
14998
+ 1
14999
+ 1
15000
+ 1
15001
+ 1
15002
+ 1
15003
+ 1
15004
+ 1
15005
+ 1
15006
+ 1
15007
+ 1
15008
+ 1
15009
+ 1
15010
+ 1
15011
+ 1
15012
+ 1
15013
+ 1
15014
+ 1
15015
+ 1
15016
+ 1
15017
+ 1
15018
+ 1
15019
+ 1
15020
+ 1
15021
+ 1
15022
+ 1
15023
+ 1
15024
+ 1
15025
+ 1
15026
+ 1
15027
+ 1
15028
+ 1
15029
+ 1
15030
+ 1
15031
+ 1
15032
+ 1
15033
+ 1
15034
+ 1
15035
+ 1
15036
+ 1
15037
+ 1
15038
+ 1
15039
+ 1
15040
+ 1
15041
+ 1
15042
+ 1
15043
+ 1
15044
+ 1
15045
+ 1
15046
+ 1
15047
+ 1
15048
+ 1
15049
+ 1
15050
+ 1
15051
+ 1
15052
+ 1
15053
+ 1
15054
+ 1
15055
+ 1
15056
+ 1
15057
+ 1
15058
+ 1
15059
+ 1
15060
+ 1
15061
+ 1
15062
+ 1
15063
+ 1
15064
+ 1
15065
+ 1
15066
+ 1
15067
+ 1
15068
+ 1
15069
+ 1
15070
+ 1
15071
+ 1
15072
+ 1
15073
+ 1
15074
+ 1
15075
+ 1
15076
+ 1
15077
+ 1
15078
+ 1
15079
+ 1
15080
+ 1
15081
+ 1
15082
+ 1
15083
+ 1
15084
+ 1
15085
+ 1
15086
+ 1
15087
+ 1
15088
+ 1
15089
+ 1
15090
+ 1
15091
+ 1
15092
+ 1
15093
+ 1
15094
+ 1
15095
+ 1
15096
+ 1
15097
+ 1
15098
+ 1
15099
+ 1
15100
+ 1
15101
+ 1
15102
+ 1
15103
+ 1
15104
+ 1
15105
+ 1
15106
+ 1
15107
+ 1
15108
+ 1
15109
+ 1
15110
+ 1
15111
+ 1
15112
+ 1
15113
+ 1
15114
+ 1
15115
+ 1
15116
+ 1
15117
+ 1
15118
+ 1
15119
+ 1
15120
+ 1
15121
+ 1
15122
+ 1
15123
+ 1
15124
+ 1
15125
+ 1
15126
+ 1
15127
+ 1
15128
+ 1
15129
+ 1
15130
+ 1
15131
+ 1
15132
+ 1
15133
+ 1
15134
+ 1
15135
+ 1
15136
+ 1
15137
+ 1
15138
+ 1
15139
+ 1
15140
+ 1
15141
+ 1
15142
+ 1
15143
+ 1
15144
+ 1
15145
+ 1
15146
+ 1
15147
+ 1
15148
+ 1
15149
+ 1
15150
+ 1
15151
+ 1
15152
+ 1
15153
+ 1
15154
+ 1
15155
+ 1
15156
+ 1
15157
+ 1
15158
+ 1
15159
+ 1
15160
+ 1
15161
+ 1
15162
+ 1
15163
+ 1
15164
+ 1
15165
+ 1
15166
+ 1
15167
+ 1
15168
+ 1
15169
+ 1
15170
+ 1
15171
+ 1
15172
+ 1
15173
+ 1
15174
+ 1
15175
+ 1
15176
+ 1
15177
+ 1
15178
+ 1
15179
+ 1
15180
+ 1
15181
+ 1
15182
+ 1
15183
+ 1
15184
+ 1
15185
+ 1
15186
+ 1
15187
+ 1
15188
+ 1
15189
+ 1
15190
+ 1
15191
+ 1
15192
+ 1
15193
+ 1
15194
+ 1
15195
+ 1
15196
+ 1
15197
+ 1
15198
+ 1
15199
+ 1
15200
+ 1
15201
+ 1
15202
+ 1
15203
+ 1
15204
+ 1
15205
+ 1
15206
+ 1
15207
+ 1
15208
+ 1
15209
+ 1
15210
+ 1
15211
+ 1
15212
+ 1
15213
+ 1
15214
+ 1
15215
+ 1
15216
+ 1
15217
+ 1
15218
+ 1
15219
+ 1
15220
+ 1
15221
+ 1
15222
+ 1
15223
+ 1
15224
+ 1
15225
+ 1
15226
+ 1
15227
+ 1
15228
+ 1
15229
+ 1
15230
+ 1
15231
+ 1
15232
+ 1
15233
+ 1
15234
+ 1
15235
+ 1
15236
+ 1
15237
+ 1
15238
+ 1
15239
+ 1
15240
+ 1
15241
+ 1
15242
+ 1
15243
+ 1
15244
+ 1
15245
+ 1
15246
+ 1
15247
+ 1
15248
+ 1
15249
+ 1
15250
+ 1
15251
+ 1
15252
+ 1
15253
+ 1
15254
+ 1
15255
+ 1
15256
+ 1
15257
+ 1
15258
+ 1
15259
+ 1
15260
+ 1
15261
+ 1
15262
+ 1
15263
+ 1
15264
+ 1
15265
+ 1
15266
+ 1
15267
+ 1
15268
+ 1
15269
+ 1
15270
+ 1
15271
+ 1
15272
+ 1
15273
+ 1
15274
+ 1
15275
+ 1
15276
+ 1
15277
+ 1
15278
+ 1
15279
+ 1
15280
+ 1
15281
+ 1
15282
+ 1
15283
+ 1
15284
+ 1
15285
+ 1
15286
+ 1
15287
+ 1
15288
+ 1
15289
+ 1
15290
+ 1
15291
+ 1
15292
+ 1
15293
+ 1
15294
+ 1
15295
+ 1
15296
+ 1
15297
+ 1
15298
+ 2
15299
+ 1
15300
+ 1
15301
+ 1
15302
+ 1
15303
+ 1
15304
+ 1
15305
+ 1
15306
+ 1
15307
+ 1
15308
+ 1
15309
+ 1
15310
+ 1
15311
+ 1
15312
+ 1
15313
+ 1
15314
+ 1
15315
+ 1
15316
+ 1
15317
+ 1
15318
+ 1
15319
+ 1
15320
+ 1
15321
+ 1
15322
+ 1
15323
+ 1
15324
+ 1
15325
+ 1
15326
+ 1
15327
+ 1
15328
+ 1
15329
+ 1
15330
+ 1
15331
+ 1
15332
+ 1
15333
+ 1
15334
+ 1
15335
+ 1
15336
+ 1
15337
+ 1
15338
+ 1
15339
+ 1
15340
+ 1
15341
+ 1
15342
+ 1
15343
+ 1
15344
+ 1
15345
+ 1
15346
+ 1
15347
+ 1
15348
+ 1
15349
+ 1
15350
+ 1
15351
+ 1
15352
+ 1
15353
+ 1
15354
+ 1
15355
+ 1
15356
+ 1
15357
+ 1
15358
+ 1
15359
+ 1
15360
+ 1
15361
+ 1
15362
+ 1
15363
+ 1
15364
+ 1
15365
+ 1
15366
+ 1
15367
+ 1
15368
+ 1
15369
+ 1
15370
+ 1
15371
+ 1
15372
+ 1
15373
+ 1
15374
+ 1
15375
+ 1
15376
+ 1
15377
+ 1
15378
+ 1
15379
+ 1
15380
+ 1
15381
+ 1
15382
+ 1
15383
+ 1
15384
+ 1
15385
+ 1
15386
+ 1
15387
+ 1
15388
+ 1
15389
+ 1
15390
+ 1
15391
+ 1
15392
+ 1
15393
+ 1
15394
+ 1
15395
+ 1
15396
+ 1
15397
+ 1
15398
+ 1
15399
+ 1
15400
+ 1
15401
+ 1
15402
+ 1
15403
+ 1
15404
+ 1
15405
+ 1
15406
+ 1
15407
+ 1
15408
+ 1
15409
+ 1
15410
+ 1
15411
+ 1
15412
+ 1
15413
+ 1
15414
+ 1
15415
+ 1
15416
+ 1
15417
+ 1
15418
+ 1
15419
+ 1
15420
+ 1
15421
+ 1
15422
+ 1
15423
+ 1
15424
+ 1
15425
+ 1
15426
+ 1
15427
+ 1
15428
+ 1
15429
+ 1
15430
+ 1
15431
+ 1
15432
+ 1
15433
+ 1
15434
+ 1
15435
+ 1
15436
+ 1
15437
+ 1
15438
+ 1
15439
+ 1
15440
+ 1
15441
+ 1
15442
+ 1
15443
+ 1
15444
+ 1
15445
+ 1
15446
+ 1
15447
+ 1
15448
+ 1
15449
+ 1
15450
+ 1
15451
+ 1
15452
+ 1
15453
+ 1
15454
+ 1
15455
+ 1
15456
+ 1
15457
+ 1
15458
+ 1
15459
+ 1
15460
+ 1
15461
+ 1
15462
+ 1
15463
+ 1
15464
+ 1
15465
+ 1
15466
+ 1
15467
+ 1
15468
+ 1
15469
+ 1
15470
+ 1
15471
+ 1
15472
+ 1
15473
+ 1
15474
+ 1
15475
+ 1
15476
+ 1
15477
+ 1
15478
+ 1
15479
+ 1
15480
+ 1
15481
+ 1
15482
+ 1
15483
+ 1
15484
+ 1
15485
+ 1
15486
+ 1
15487
+ 1
15488
+ 1
15489
+ 1
15490
+ 1
15491
+ 1
15492
+ 1
15493
+ 1
15494
+ 1
15495
+ 1
15496
+ 1
15497
+ 1
15498
+ 1
15499
+ 1
15500
+ 1
15501
+ 1
15502
+ 1
15503
+ 1
15504
+ 1
15505
+ 1
15506
+ 1
15507
+ 1
15508
+ 1
15509
+ 1
15510
+ 1
15511
+ 1
15512
+ 1
15513
+ 1
15514
+ 1
15515
+ 1
15516
+ 1
15517
+ 1
15518
+ 1
15519
+ 1
15520
+ 1
15521
+ 1
15522
+ 1
15523
+ 1
15524
+ 1
15525
+ 1
15526
+ 1
15527
+ 1
15528
+ 1
15529
+ 1
15530
+ 1
15531
+ 1
15532
+ 1
15533
+ 1
15534
+ 1
15535
+ 1
15536
+ 1
15537
+ 1
15538
+ 1
15539
+ 1
15540
+ 1
15541
+ 1
15542
+ 1
15543
+ 1
15544
+ 1
15545
+ 1
15546
+ 1
15547
+ 1
15548
+ 1
15549
+ 1
15550
+ 1
15551
+ 1
15552
+ 1
15553
+ 1
15554
+ 1
15555
+ 1
15556
+ 1
15557
+ 2
15558
+ 1
15559
+ 1
15560
+ 1
15561
+ 1
15562
+ 1
15563
+ 1
15564
+ 1
15565
+ 1
15566
+ 1
15567
+ 1
15568
+ 1
15569
+ 1
15570
+ 1
15571
+ 1
15572
+ 1
15573
+ 1
15574
+ 1
15575
+ 1
15576
+ 1
15577
+ 1
15578
+ 1
15579
+ 1
15580
+ 1
15581
+ 1
15582
+ 1
15583
+ 1
15584
+ 1
15585
+ 1
15586
+ 1
15587
+ 1
15588
+ 1
15589
+ 1
15590
+ 1
15591
+ 1
15592
+ 1
15593
+ 1
15594
+ 1
15595
+ 1
15596
+ 1
15597
+ 1
15598
+ 1
15599
+ 1
15600
+ 1
15601
+ 1
15602
+ 1
15603
+ 1
15604
+ 1
15605
+ 1
15606
+ 1
15607
+ 1
15608
+ 1
15609
+ 1
15610
+ 1
15611
+ 1
15612
+ 1
15613
+ 1
15614
+ 1
15615
+ 1
15616
+ 1
15617
+ 1
15618
+ 1
15619
+ 1
15620
+ 1
15621
+ 1
15622
+ 1
15623
+ 1
15624
+ 1
15625
+ 1
15626
+ 1
15627
+ 1
15628
+ 1
15629
+ 1
15630
+ 1
15631
+ 1
15632
+ 1
15633
+ 1
15634
+ 1
15635
+ 1
15636
+ 1
15637
+ 1
15638
+ 1
15639
+ 1
15640
+ 1
15641
+ 1
15642
+ 1
15643
+ 1
15644
+ 1
15645
+ 1
15646
+ 1
15647
+ 1
15648
+ 1
15649
+ 1
15650
+ 1
15651
+ 1
15652
+ 1
15653
+ 1
15654
+ 1
15655
+ 1
15656
+ 1
15657
+ 1
15658
+ 1
15659
+ 1
15660
+ 1
15661
+ 1
15662
+ 1
15663
+ 1
15664
+ 1
15665
+ 1
15666
+ 1
15667
+ 1
15668
+ 1
15669
+ 1
15670
+ 1
15671
+ 1
15672
+ 1
15673
+ 1
15674
+ 1
15675
+ 1
15676
+ 1
15677
+ 1
15678
+ 1
15679
+ 1
15680
+ 1
15681
+ 1
15682
+ 1
15683
+ 1
15684
+ 1
15685
+ 1
15686
+ 1
15687
+ 1
15688
+ 1
15689
+ 1
15690
+ 1
15691
+ 1
15692
+ 1
15693
+ 1
15694
+ 1
15695
+ 1
15696
+ 1
15697
+ 1
15698
+ 1
15699
+ 1
15700
+ 1
15701
+ 1
15702
+ 1
15703
+ 1
15704
+ 1
15705
+ 1
15706
+ 1
15707
+ 1
15708
+ 1
15709
+ 1
15710
+ 1
15711
+ 1
15712
+ 1
15713
+ 1
15714
+ 1
15715
+ 1
15716
+ 1
15717
+ 1
15718
+ 1
15719
+ 1
15720
+ 1
15721
+ 1
15722
+ 1
15723
+ 1
15724
+ 1
15725
+ 1
15726
+ 1
15727
+ 1
15728
+ 1
15729
+ 1
15730
+ 1
15731
+ 1
15732
+ 1
15733
+ 1
15734
+ 1
15735
+ 1
15736
+ 1
15737
+ 1
15738
+ 1
15739
+ 1
15740
+ 1
15741
+ 1
15742
+ 1
15743
+ 1
15744
+ 1
15745
+ 1
15746
+ 1
15747
+ 1
15748
+ 1
15749
+ 1
15750
+ 1
15751
+ 1
15752
+ 1
15753
+ 1
15754
+ 1
15755
+ 1
15756
+ 1
15757
+ 1
15758
+ 1
15759
+ 1
15760
+ 1
15761
+ 1
15762
+ 1
15763
+ 1
15764
+ 1
15765
+ 1
15766
+ 1
15767
+ 1
15768
+ 1
15769
+ 1
15770
+ 1
15771
+ 1
15772
+ 1
15773
+ 1
15774
+ 1
15775
+ 1
15776
+ 1
15777
+ 1
15778
+ 1
15779
+ 1
15780
+ 1
15781
+ 1
15782
+ 1
15783
+ 1
15784
+ 1
15785
+ 1
15786
+ 1
15787
+ 1
15788
+ 1
15789
+ 1
15790
+ 1
15791
+ 1
15792
+ 1
15793
+ 1
15794
+ 1
15795
+ 1
15796
+ 1
15797
+ 1
15798
+ 1
15799
+ 1
15800
+ 1
15801
+ 1
15802
+ 1
15803
+ 1
15804
+ 1
15805
+ 1
15806
+ 1
15807
+ 1
15808
+ 1
15809
+ 1
15810
+ 1
15811
+ 1
15812
+ 1
15813
+ 1
15814
+ 1
15815
+ 1
15816
+ 1
15817
+ 1
15818
+ 1
15819
+ 1
15820
+ 1
15821
+ 1
15822
+ 1
15823
+ 1
15824
+ 1
15825
+ 1
15826
+ 1
15827
+ 1
15828
+ 1
15829
+ 1
15830
+ 1
15831
+ 1
15832
+ 1
15833
+ 1
15834
+ 1
15835
+ 2
15836
+ 1
15837
+ 1
15838
+ 1
15839
+ 1
15840
+ 2
15841
+ 1
15842
+ 1
15843
+ 1
15844
+ 1
15845
+ 1
15846
+ 1
15847
+ 1
15848
+ 1
15849
+ 1
15850
+ 1
15851
+ 1
15852
+ 1
15853
+ 1
15854
+ 1
15855
+ 1
15856
+ 1
15857
+ 1
15858
+ 1
15859
+ 1
15860
+ 1
15861
+ 1
15862
+ 1
15863
+ 1
15864
+ 1
15865
+ 1
15866
+ 1
15867
+ 1
15868
+ 1
15869
+ 1
15870
+ 1
15871
+ 1
15872
+ 1
15873
+ 1
15874
+ 1
15875
+ 1
15876
+ 1
15877
+ 1
15878
+ 1
15879
+ 1
15880
+ 1
15881
+ 1
15882
+ 1
15883
+ 1
15884
+ 1
15885
+ 1
15886
+ 1
15887
+ 1
15888
+ 1
15889
+ 1
15890
+ 1
15891
+ 1
15892
+ 1
15893
+ 1
15894
+ 1
15895
+ 1
15896
+ 1
15897
+ 1
15898
+ 1
15899
+ 1
15900
+ 1
15901
+ 1
15902
+ 1
15903
+ 1
15904
+ 1
15905
+ 1
15906
+ 1
15907
+ 1
15908
+ 1
15909
+ 1
15910
+ 1
15911
+ 1
15912
+ 1
15913
+ 1
15914
+ 1
15915
+ 1
15916
+ 1
15917
+ 1
15918
+ 1
15919
+ 1
15920
+ 1
15921
+ 1
15922
+ 1
15923
+ 1
15924
+ 1
15925
+ 1
15926
+ 1
15927
+ 1
15928
+ 1
15929
+ 1
15930
+ 1
15931
+ 1
15932
+ 1
15933
+ 1
15934
+ 1
15935
+ 1
15936
+ 1
15937
+ 1
15938
+ 1
15939
+ 1
15940
+ 1
15941
+ 1
15942
+ 1
15943
+ 1
15944
+ 1
15945
+ 1
15946
+ 1
15947
+ 1
15948
+ 1
15949
+ 1
15950
+ 1
15951
+ 1
15952
+ 1
15953
+ 1
15954
+ 1
15955
+ 1
15956
+ 1
15957
+ 1
15958
+ 1
15959
+ 1
15960
+ 1
15961
+ 1
15962
+ 1
15963
+ 1
15964
+ 1
15965
+ 1
15966
+ 1
15967
+ 1
15968
+ 1
15969
+ 1
15970
+ 1
15971
+ 1
15972
+ 1
15973
+ 1
15974
+ 1
15975
+ 1
15976
+ 1
15977
+ 1
15978
+ 1
15979
+ 1
15980
+ 1
15981
+ 1
15982
+ 1
15983
+ 1
15984
+ 1
15985
+ 1
15986
+ 1
15987
+ 1
15988
+ 1
15989
+ 1
15990
+ 1
15991
+ 1
15992
+ 1
15993
+ 1
15994
+ 1
15995
+ 1
15996
+ 1
15997
+ 1
15998
+ 1
15999
+ 1
16000
+ 1
16001
+ 1
16002
+ 1
16003
+ 1
16004
+ 1
16005
+ 1
16006
+ 1
16007
+ 1
16008
+ 1
16009
+ 1
16010
+ 1
16011
+ 1
16012
+ 1
16013
+ 1
16014
+ 1
16015
+ 1
16016
+ 1
16017
+ 1
16018
+ 1
16019
+ 1
16020
+ 1
16021
+ 1
16022
+ 1
16023
+ 1
16024
+ 1
16025
+ 1
16026
+ 1
16027
+ 1
16028
+ 1
16029
+ 1
16030
+ 1
16031
+ 1
16032
+ 1
16033
+ 1
16034
+ 1
16035
+ 1
16036
+ 1
16037
+ 1
16038
+ 1
16039
+ 1
16040
+ 1
16041
+ 1
16042
+ 1
16043
+ 1
16044
+ 1
16045
+ 1
16046
+ 1
16047
+ 1
16048
+ 1
16049
+ 1
16050
+ 1
16051
+ 1
16052
+ 1
16053
+ 1
16054
+ 1
16055
+ 1
16056
+ 1
16057
+ 1
16058
+ 1
16059
+ 1
16060
+ 1
16061
+ 1
16062
+ 1
16063
+ 1
16064
+ 1
16065
+ 1
16066
+ 1
16067
+ 1
16068
+ 1
16069
+ 1
16070
+ 1
16071
+ 1
16072
+ 1
16073
+ 1
16074
+ 1
16075
+ 1
16076
+ 1
16077
+ 1
16078
+ 1
16079
+ 1
16080
+ 1
16081
+ 1
16082
+ 1
16083
+ 1
16084
+ 1
16085
+ 1
16086
+ 1
16087
+ 1
16088
+ 1
16089
+ 1
16090
+ 1
16091
+ 1
16092
+ 1
16093
+ 1
16094
+ 1
16095
+ 1
16096
+ 1
16097
+ 1
16098
+ 1
16099
+ 1
16100
+ 1
16101
+ 1
16102
+ 1
16103
+ 1
16104
+ 1
16105
+ 1
16106
+ 1
16107
+ 1
16108
+ 1
16109
+ 1
16110
+ 1
16111
+ 1
16112
+ 1
16113
+ 1
16114
+ 1
16115
+ 1
16116
+ 1
16117
+ 1
16118
+ 1
16119
+ 1
16120
+ 1
16121
+ 1
16122
+ 1
16123
+ 1
16124
+ 1
16125
+ 1
16126
+ 1
16127
+ 2
16128
+ 2
16129
+ 1
16130
+ 1
16131
+ 1
16132
+ 1
16133
+ 1
16134
+ 1
16135
+ 1
16136
+ 1
16137
+ 1
16138
+ 1
16139
+ 1
16140
+ 1
16141
+ 1
16142
+ 1
16143
+ 1
16144
+ 1
16145
+ 1
16146
+ 1
16147
+ 1
16148
+ 1
16149
+ 1
16150
+ 1
16151
+ 1
16152
+ 1
16153
+ 1
16154
+ 1
16155
+ 1
16156
+ 1
16157
+ 1
16158
+ 1
16159
+ 1
16160
+ 1
16161
+ 1
16162
+ 1
16163
+ 1
16164
+ 1
16165
+ 1
16166
+ 1
16167
+ 1
16168
+ 1
16169
+ 1
16170
+ 1
16171
+ 1
16172
+ 1
16173
+ 1
16174
+ 1
16175
+ 1
16176
+ 1
16177
+ 1
16178
+ 1
16179
+ 1
16180
+ 1
16181
+ 1
16182
+ 1
16183
+ 1
16184
+ 1
16185
+ 1
16186
+ 1
16187
+ 1
16188
+ 1
16189
+ 1
16190
+ 1
16191
+ 1
16192
+ 1
16193
+ 1
16194
+ 1
16195
+ 1
16196
+ 1
16197
+ 1
16198
+ 1
16199
+ 1
16200
+ 1
16201
+ 1
16202
+ 1
16203
+ 1
16204
+ 1
16205
+ 1
16206
+ 1
16207
+ 1
16208
+ 1
16209
+ 1
16210
+ 1
16211
+ 1
16212
+ 1
16213
+ 1
16214
+ 1
16215
+ 1
16216
+ 1
16217
+ 1
16218
+ 1
16219
+ 1
16220
+ 1
16221
+ 1
16222
+ 1
16223
+ 1
16224
+ 1
16225
+ 1
16226
+ 1
16227
+ 1
16228
+ 1
16229
+ 1
16230
+ 1
16231
+ 1
16232
+ 1
16233
+ 1
16234
+ 1
16235
+ 1
16236
+ 1
16237
+ 1
16238
+ 1
16239
+ 1
16240
+ 1
16241
+ 1
16242
+ 1
16243
+ 1
16244
+ 1
16245
+ 1
16246
+ 1
16247
+ 1
16248
+ 1
16249
+ 1
16250
+ 1
16251
+ 1
16252
+ 1
16253
+ 1
16254
+ 1
16255
+ 1
16256
+ 1
16257
+ 1
16258
+ 1
16259
+ 1
16260
+ 1
16261
+ 1
16262
+ 1
16263
+ 1
16264
+ 1
16265
+ 1
16266
+ 1
16267
+ 1
16268
+ 1
16269
+ 1
16270
+ 1
16271
+ 1
16272
+ 1
16273
+ 1
16274
+ 1
16275
+ 1
16276
+ 1
16277
+ 1
16278
+ 1
16279
+ 1
16280
+ 1
16281
+ 1
16282
+ 1
16283
+ 1
16284
+ 1
16285
+ 1
16286
+ 1
16287
+ 1
16288
+ 1
16289
+ 1
16290
+ 1
16291
+ 1
16292
+ 1
16293
+ 1
16294
+ 1
16295
+ 1
16296
+ 1
16297
+ 1
16298
+ 1
16299
+ 1
16300
+ 1
16301
+ 1
16302
+ 1
16303
+ 1
16304
+ 1
16305
+ 1
16306
+ 1
16307
+ 1
16308
+ 1
16309
+ 1
16310
+ 1
16311
+ 1
16312
+ 1
16313
+ 1
16314
+ 1
16315
+ 1
16316
+ 1
16317
+ 1
16318
+ 1
16319
+ 1
16320
+ 1
16321
+ 1
16322
+ 1
16323
+ 1
16324
+ 1
16325
+ 1
16326
+ 1
16327
+ 1
16328
+ 1
16329
+ 1
16330
+ 1
16331
+ 1
16332
+ 1
16333
+ 1
16334
+ 1
16335
+ 1
16336
+ 1
16337
+ 1
16338
+ 1
16339
+ 1
16340
+ 1
16341
+ 1
16342
+ 1
16343
+ 1
16344
+ 1
16345
+ 1
16346
+ 1
16347
+ 1
16348
+ 1
16349
+ 1
16350
+ 1
16351
+ 1
16352
+ 1
16353
+ 1
16354
+ 1
16355
+ 1
16356
+ 1
16357
+ 1
16358
+ 1
16359
+ 1
16360
+ 1
16361
+ 1
16362
+ 1
16363
+ 1
16364
+ 1
16365
+ 1
16366
+ 1
16367
+ 1
16368
+ 1
16369
+ 1
16370
+ 1
16371
+ 1
16372
+ 1
16373
+ 1
16374
+ 1
16375
+ 1
16376
+ 1
16377
+ 1
16378
+ 1
16379
+ 1
16380
+ 1
16381
+ 1
16382
+ 1
16383
+ 1
16384
+ 1
16385
+ 1
16386
+ 1
16387
+ 2
16388
+ 1
16389
+ 1
16390
+ 1
16391
+ 1
16392
+ 1
16393
+ 1
16394
+ 1
16395
+ 1
16396
+ 1
16397
+ 1
16398
+ 1
16399
+ 1
16400
+ 1
16401
+ 1
16402
+ 1
16403
+ 1
16404
+ 1
16405
+ 1
16406
+ 1
16407
+ 1
16408
+ 1
16409
+ 1
16410
+ 1
16411
+ 1
16412
+ 1
16413
+ 1
16414
+ 1
16415
+ 1
16416
+ 1
16417
+ 1
16418
+ 1
16419
+ 1
16420
+ 1
16421
+ 1
16422
+ 1
16423
+ 1
16424
+ 1
16425
+ 1
16426
+ 1
16427
+ 1
16428
+ 1
16429
+ 1
16430
+ 1
16431
+ 1
16432
+ 1
16433
+ 1
16434
+ 1
16435
+ 1
16436
+ 1
16437
+ 1
16438
+ 1
16439
+ 1
16440
+ 1
16441
+ 1
16442
+ 1
16443
+ 1
16444
+ 1
16445
+ 1
16446
+ 1
16447
+ 1
16448
+ 1
16449
+ 1
16450
+ 1
16451
+ 1
16452
+ 1
16453
+ 1
16454
+ 1
16455
+ 1
16456
+ 1
16457
+ 1
16458
+ 1
16459
+ 1
16460
+ 1
16461
+ 1
16462
+ 1
16463
+ 1
16464
+ 1
16465
+ 1
16466
+ 1
16467
+ 1
16468
+ 1
16469
+ 1
16470
+ 1
16471
+ 1
16472
+ 1
16473
+ 1
16474
+ 1
16475
+ 1
16476
+ 1
16477
+ 1
16478
+ 1
16479
+ 1
16480
+ 1
16481
+ 1
16482
+ 1
16483
+ 1
16484
+ 1
16485
+ 1
16486
+ 1
16487
+ 1
16488
+ 1
16489
+ 1
16490
+ 1
16491
+ 1
16492
+ 1
16493
+ 1
16494
+ 1
16495
+ 1
16496
+ 1
16497
+ 1
16498
+ 1
16499
+ 1
16500
+ 1
16501
+ 1
16502
+ 1
16503
+ 1
16504
+ 1
16505
+ 1
16506
+ 1
16507
+ 1
16508
+ 1
16509
+ 1
16510
+ 1
16511
+ 1
16512
+ 1
16513
+ 1
16514
+ 1
16515
+ 1
16516
+ 1
16517
+ 1
16518
+ 1
16519
+ 1
16520
+ 1
16521
+ 1
16522
+ 1
16523
+ 1
16524
+ 1
16525
+ 1
16526
+ 1
16527
+ 1
16528
+ 1
16529
+ 1
16530
+ 1
16531
+ 1
16532
+ 1
16533
+ 1
16534
+ 1
16535
+ 1
16536
+ 1
16537
+ 1
16538
+ 1
16539
+ 1
16540
+ 1
16541
+ 1
16542
+ 1
16543
+ 1
16544
+ 1
16545
+ 1
16546
+ 1
16547
+ 1
16548
+ 1
16549
+ 1
16550
+ 1
16551
+ 1
16552
+ 1
16553
+ 1
16554
+ 1
16555
+ 1
16556
+ 1
16557
+ 1
16558
+ 1
16559
+ 1
16560
+ 1
16561
+ 1
16562
+ 1
16563
+ 1
16564
+ 1
16565
+ 1
16566
+ 1
16567
+ 1
16568
+ 1
16569
+ 1
16570
+ 1
16571
+ 1
16572
+ 1
16573
+ 1
16574
+ 1
16575
+ 1
16576
+ 1
16577
+ 1
16578
+ 1
16579
+ 1
16580
+ 1
16581
+ 1
16582
+ 1
16583
+ 1
16584
+ 1
16585
+ 1
16586
+ 1
16587
+ 1
16588
+ 1
16589
+ 1
16590
+ 1
16591
+ 1
16592
+ 1
16593
+ 1
16594
+ 1
16595
+ 1
16596
+ 1
16597
+ 1
16598
+ 1
16599
+ 1
16600
+ 1
16601
+ 1
16602
+ 1
16603
+ 1
16604
+ 1
16605
+ 1
16606
+ 1
16607
+ 1
16608
+ 1
16609
+ 1
16610
+ 1
16611
+ 1
16612
+ 1
16613
+ 1
16614
+ 1
16615
+ 1
16616
+ 1
16617
+ 1
16618
+ 1
16619
+ 1
16620
+ 1
16621
+ 1
16622
+ 1
16623
+ 1
16624
+ 1
16625
+ 1
16626
+ 1
16627
+ 1
16628
+ 1
16629
+ 1
16630
+ 1
16631
+ 1
16632
+ 1
16633
+ 1
16634
+ 2
16635
+ 1
16636
+ 1
16637
+ 1
16638
+ 1
16639
+ 1
16640
+ 1
16641
+ 1
16642
+ 1
16643
+ 1
16644
+ 1
16645
+ 2
16646
+ 1
16647
+ 1
16648
+ 1
16649
+ 1
16650
+ 1
16651
+ 1
16652
+ 2
16653
+ 1
16654
+ 1
16655
+ 1
16656
+ 1
16657
+ 1
16658
+ 1
16659
+ 1
16660
+ 1
16661
+ 1
16662
+ 1
16663
+ 1
16664
+ 1
16665
+ 1
16666
+ 1
16667
+ 1
16668
+ 1
16669
+ 1
16670
+ 1
16671
+ 1
16672
+ 1
16673
+ 1
16674
+ 1
16675
+ 1
16676
+ 1
16677
+ 1
16678
+ 1
16679
+ 1
16680
+ 1
16681
+ 1
16682
+ 1
16683
+ 1
16684
+ 1
16685
+ 1
16686
+ 1
16687
+ 1
16688
+ 1
16689
+ 1
16690
+ 1
16691
+ 1
16692
+ 1
16693
+ 1
16694
+ 1
16695
+ 1
16696
+ 1
16697
+ 1
16698
+ 1
16699
+ 1
16700
+ 1
16701
+ 1
16702
+ 1
16703
+ 1
16704
+ 1
16705
+ 1
16706
+ 1
16707
+ 1
16708
+ 1
16709
+ 1
16710
+ 1
16711
+ 1
16712
+ 1
16713
+ 1
16714
+ 1
16715
+ 1
16716
+ 1
16717
+ 1
16718
+ 1
16719
+ 1
16720
+ 1
16721
+ 1
16722
+ 1
16723
+ 1
16724
+ 1
16725
+ 1
16726
+ 1
16727
+ 1
16728
+ 1
16729
+ 1
16730
+ 1
16731
+ 1
16732
+ 1
16733
+ 1
16734
+ 1
16735
+ 1
16736
+ 1
16737
+ 1
16738
+ 1
16739
+ 1
16740
+ 1
16741
+ 1
16742
+ 1
16743
+ 1
16744
+ 1
16745
+ 1
16746
+ 1
16747
+ 1
16748
+ 1
16749
+ 1
16750
+ 1
16751
+ 1
16752
+ 1
16753
+ 1
16754
+ 1
16755
+ 1
16756
+ 1
16757
+ 1
16758
+ 1
16759
+ 1
16760
+ 1
16761
+ 1
16762
+ 1
16763
+ 1
16764
+ 1
16765
+ 1
16766
+ 1
16767
+ 1
16768
+ 1
16769
+ 1
16770
+ 1
16771
+ 1
16772
+ 1
16773
+ 1
16774
+ 1
16775
+ 1
16776
+ 1
16777
+ 1
16778
+ 1
16779
+ 1
16780
+ 1
16781
+ 1
16782
+ 1
16783
+ 1
16784
+ 1
16785
+ 1
16786
+ 1
16787
+ 1
16788
+ 1
16789
+ 1
16790
+ 1
16791
+ 1
16792
+ 1
16793
+ 1
16794
+ 1
16795
+ 1
16796
+ 1
16797
+ 1
16798
+ 1
16799
+ 1
16800
+ 1
16801
+ 1
16802
+ 1
16803
+ 1
16804
+ 1
16805
+ 1
16806
+ 1
16807
+ 1
16808
+ 1
16809
+ 1
16810
+ 1
16811
+ 1
16812
+ 1
16813
+ 1
16814
+ 1
16815
+ 1
16816
+ 1
16817
+ 1
16818
+ 1
16819
+ 1
16820
+ 1
16821
+ 1
16822
+ 1
16823
+ 1
16824
+ 1
16825
+ 1
16826
+ 1
16827
+ 1
16828
+ 1
16829
+ 1
16830
+ 1
16831
+ 1
16832
+ 1
16833
+ 1
16834
+ 1
16835
+ 1
16836
+ 1
16837
+ 1
16838
+ 1
16839
+ 1
16840
+ 1
16841
+ 1
16842
+ 1
16843
+ 1
16844
+ 1
16845
+ 1
16846
+ 1
16847
+ 1
16848
+ 1
16849
+ 1
16850
+ 1
16851
+ 1
16852
+ 1
16853
+ 1
16854
+ 1
16855
+ 1
16856
+ 1
16857
+ 1
16858
+ 1
16859
+ 1
16860
+ 1
16861
+ 1
16862
+ 1
16863
+ 1
16864
+ 1
16865
+ 1
16866
+ 1
16867
+ 1
16868
+ 1
16869
+ 1
16870
+ 1
16871
+ 1
16872
+ 1
16873
+ 1
16874
+ 1
16875
+ 1
16876
+ 1
16877
+ 1
16878
+ 1
16879
+ 1
16880
+ 1
16881
+ 1
16882
+ 1
16883
+ 1
16884
+ 1
16885
+ 1
16886
+ 1
16887
+ 1
16888
+ 1
16889
+ 1
16890
+ 1
16891
+ 1
16892
+ 1
16893
+ 1
16894
+ 1
16895
+ 1
16896
+ 1
16897
+ 1
16898
+ 1
16899
+ 1
16900
+ 1
16901
+ 1
16902
+ 1
16903
+ 1
16904
+ 1
16905
+ 1
16906
+ 1
16907
+ 1
16908
+ 1
16909
+ 1
16910
+ 1
16911
+ 1
16912
+ 1
16913
+ 1
16914
+ 1
16915
+ 1
16916
+ 1
16917
+ 1
16918
+ 1
16919
+ 1
16920
+ 1
16921
+ 1
16922
+ 1
16923
+ 1
16924
+ 1
16925
+ 1
16926
+ 1
16927
+ 1
16928
+ 1
16929
+ 1
16930
+ 1
16931
+ 1
16932
+ 1
16933
+ 1
16934
+ 1
16935
+ 1
16936
+ 1
16937
+ 1
16938
+ 1
16939
+ 1
16940
+ 1
16941
+ 1
16942
+ 1
16943
+ 1
16944
+ 1
16945
+ 1
16946
+ 1
16947
+ 1
16948
+ 1
16949
+ 1
16950
+ 1
16951
+ 1
16952
+ 1
16953
+ 1
16954
+ 1
16955
+ 1
16956
+ 1
16957
+ 1
16958
+ 1
16959
+ 1
16960
+ 1
16961
+ 1
16962
+ 1
16963
+ 1
16964
+ 1
16965
+ 1
16966
+ 1
16967
+ 1
16968
+ 1
16969
+ 1
16970
+ 1
16971
+ 1
16972
+ 1
16973
+ 1
16974
+ 1
16975
+ 1
16976
+ 1
16977
+ 1
16978
+ 1
16979
+ 1
16980
+ 1
16981
+ 1
16982
+ 1
16983
+ 1
16984
+ 1
16985
+ 1
16986
+ 1
16987
+ 1
16988
+ 1
16989
+ 1
16990
+ 1
16991
+ 1
16992
+ 1
16993
+ 1
16994
+ 1
16995
+ 1
16996
+ 1
16997
+ 1
16998
+ 1
16999
+ 1
17000
+ 1
17001
+ 1
17002
+ 1
17003
+ 1
17004
+ 1
17005
+ 1
17006
+ 1
17007
+ 1
17008
+ 1
17009
+ 1
17010
+ 1
17011
+ 1
17012
+ 1
17013
+ 1
17014
+ 1
17015
+ 1
17016
+ 1
17017
+ 1
17018
+ 1
17019
+ 1
17020
+ 1
17021
+ 1
17022
+ 1
17023
+ 1
17024
+ 1
17025
+ 1
17026
+ 1
17027
+ 1
17028
+ 1
17029
+ 1
17030
+ 1
17031
+ 1
17032
+ 1
17033
+ 1
17034
+ 1
17035
+ 1
17036
+ 1
17037
+ 1
17038
+ 1
17039
+ 1
17040
+ 1
17041
+ 1
17042
+ 1
17043
+ 1
17044
+ 1
17045
+ 1
17046
+ 1
17047
+ 1
17048
+ 1
17049
+ 1
17050
+ 1
17051
+ 1
17052
+ 1
17053
+ 1
17054
+ 1
17055
+ 1
17056
+ 1
17057
+ 1
17058
+ 1
17059
+ 1
17060
+ 1
17061
+ 1
17062
+ 1
17063
+ 1
17064
+ 1
17065
+ 1
17066
+ 1
17067
+ 1
17068
+ 1
17069
+ 1
17070
+ 1
17071
+ 1
17072
+ 1
17073
+ 1
17074
+ 1
17075
+ 1
17076
+ 1
17077
+ 1
17078
+ 1
17079
+ 1
17080
+ 1
17081
+ 1
17082
+ 1
17083
+ 1
17084
+ 1
17085
+ 1
17086
+ 1
17087
+ 1
17088
+ 1
17089
+ 1
17090
+ 1
17091
+ 1
17092
+ 1
17093
+ 1
17094
+ 1
17095
+ 1
17096
+ 1
17097
+ 1
17098
+ 1
17099
+ 1
17100
+ 1
17101
+ 1
17102
+ 1
17103
+ 1
17104
+ 1
17105
+ 1
17106
+ 1
17107
+ 1
17108
+ 1
17109
+ 1
17110
+ 1
17111
+ 1
17112
+ 1
17113
+ 1
17114
+ 1
17115
+ 1
17116
+ 1
17117
+ 1
17118
+ 1
17119
+ 1
17120
+ 1
17121
+ 1
17122
+ 1
17123
+ 1
17124
+ 1
17125
+ 1
17126
+ 1
17127
+ 1
17128
+ 1
17129
+ 1
17130
+ 1
17131
+ 1
17132
+ 1
17133
+ 1
17134
+ 1
17135
+ 1
17136
+ 1
17137
+ 1
17138
+ 1
17139
+ 1
17140
+ 1
17141
+ 1
17142
+ 1
17143
+ 1
17144
+ 1
17145
+ 1
17146
+ 1
17147
+ 1
17148
+ 1
17149
+ 1
17150
+ 1
17151
+ 1
17152
+ 1
17153
+ 1
17154
+ 1
17155
+ 1
17156
+ 1
17157
+ 1
17158
+ 1
17159
+ 1
17160
+ 1
17161
+ 1
17162
+ 1
17163
+ 1
17164
+ 1
17165
+ 1
17166
+ 1
17167
+ 2
17168
+ 1
17169
+ 1
17170
+ 2
17171
+ 1
17172
+ 1
17173
+ 1
17174
+ 1
17175
+ 1
17176
+ 1
17177
+ 1
17178
+ 1
17179
+ 1
17180
+ 1
17181
+ 1
17182
+ 1
17183
+ 1
17184
+ 1
17185
+ 1
17186
+ 1
17187
+ 1
17188
+ 1
17189
+ 1
17190
+ 1
17191
+ 1
17192
+ 1
17193
+ 1
17194
+ 1
17195
+ 1
17196
+ 1
17197
+ 1
17198
+ 1
17199
+ 1
17200
+ 1
17201
+ 1
17202
+ 1
17203
+ 1
17204
+ 1
17205
+ 1
17206
+ 1
17207
+ 1
17208
+ 1
17209
+ 1
17210
+ 1
17211
+ 1
17212
+ 1
17213
+ 1
17214
+ 1
17215
+ 1
17216
+ 1
17217
+ 1
17218
+ 1
17219
+ 1
17220
+ 1
17221
+ 1
17222
+ 1
17223
+ 1
17224
+ 1
17225
+ 1
17226
+ 1
17227
+ 1
17228
+ 1
17229
+ 1
17230
+ 1
17231
+ 1
17232
+ 1
17233
+ 1
17234
+ 1
17235
+ 1
17236
+ 1
17237
+ 1
17238
+ 1
17239
+ 1
17240
+ 1
17241
+ 1
17242
+ 1
17243
+ 1
17244
+ 1
17245
+ 1
17246
+ 1
17247
+ 1
17248
+ 1
17249
+ 1
17250
+ 1
17251
+ 1
17252
+ 1
17253
+ 1
17254
+ 1
17255
+ 1
17256
+ 1
17257
+ 1
17258
+ 1
17259
+ 1
17260
+ 1
17261
+ 1
17262
+ 1
17263
+ 1
17264
+ 1
17265
+ 1
17266
+ 1
17267
+ 1
17268
+ 1
17269
+ 1
17270
+ 1
17271
+ 1
17272
+ 1
17273
+ 1
17274
+ 1
17275
+ 1
17276
+ 1
17277
+ 1
17278
+ 1
17279
+ 1
17280
+ 1
17281
+ 1
17282
+ 1
17283
+ 1
17284
+ 1
17285
+ 1
17286
+ 1
17287
+ 1
17288
+ 1
17289
+ 1
17290
+ 1
17291
+ 1
17292
+ 1
17293
+ 1
17294
+ 1
17295
+ 1
17296
+ 1
17297
+ 1
17298
+ 1
17299
+ 1
17300
+ 1
17301
+ 1
17302
+ 1
17303
+ 1
17304
+ 1
17305
+ 1
17306
+ 1
17307
+ 1
17308
+ 1
17309
+ 1
17310
+ 1
17311
+ 1
17312
+ 1
17313
+ 1
17314
+ 1
17315
+ 1
17316
+ 1
17317
+ 1
17318
+ 1
17319
+ 1
17320
+ 1
17321
+ 1
17322
+ 1
17323
+ 1
17324
+ 1
17325
+ 1
17326
+ 1
17327
+ 1
17328
+ 1
17329
+ 1
17330
+ 1
17331
+ 1
17332
+ 1
17333
+ 1
17334
+ 1
17335
+ 1
17336
+ 1
17337
+ 1
17338
+ 1
17339
+ 1
17340
+ 1
17341
+ 1
17342
+ 1
17343
+ 1
17344
+ 1
17345
+ 1
17346
+ 1
17347
+ 1
17348
+ 1
17349
+ 1
17350
+ 1
17351
+ 1
17352
+ 1
17353
+ 1
17354
+ 1
17355
+ 1
17356
+ 1
17357
+ 1
17358
+ 1
17359
+ 1
17360
+ 1
17361
+ 1
17362
+ 1
17363
+ 1
17364
+ 1
17365
+ 1
17366
+ 1
17367
+ 1
17368
+ 1
17369
+ 1
17370
+ 1
17371
+ 1
17372
+ 1
17373
+ 1
17374
+ 1
17375
+ 1
17376
+ 1
17377
+ 1
17378
+ 1
17379
+ 1
17380
+ 1
17381
+ 1
17382
+ 1
17383
+ 1
17384
+ 1
17385
+ 1
17386
+ 1
17387
+ 1
17388
+ 1
17389
+ 1
17390
+ 1
17391
+ 1
17392
+ 1
17393
+ 1
17394
+ 1
17395
+ 1
17396
+ 1
17397
+ 1
17398
+ 1
17399
+ 1
17400
+ 1
17401
+ 1
17402
+ 1
17403
+ 1
17404
+ 1
17405
+ 1
17406
+ 1
17407
+ 1
17408
+ 1
17409
+ 1
17410
+ 1
17411
+ 1
17412
+ 1
17413
+ 1
17414
+ 1
17415
+ 1
17416
+ 1
17417
+ 1
17418
+ 1
17419
+ 1
17420
+ 1
17421
+ 1
17422
+ 1
17423
+ 1
17424
+ 1
17425
+ 1
17426
+ 1
17427
+ 1
17428
+ 1
17429
+ 1
17430
+ 1
17431
+ 1
17432
+ 1
17433
+ 1
17434
+ 1
17435
+ 1
17436
+ 1
17437
+ 1
17438
+ 1
17439
+ 1
17440
+ 1
17441
+ 1
17442
+ 1
17443
+ 1
17444
+ 1
17445
+ 1
17446
+ 1
17447
+ 1
17448
+ 1
17449
+ 1
17450
+ 1
17451
+ 1
17452
+ 1
17453
+ 1
17454
+ 1
17455
+ 1
17456
+ 1
17457
+ 1
17458
+ 1
17459
+ 1
17460
+ 1
17461
+ 1
17462
+ 1
17463
+ 1
17464
+ 1
17465
+ 1
17466
+ 1
17467
+ 1
17468
+ 1
17469
+ 1
17470
+ 1
17471
+ 1
17472
+ 1
17473
+ 1
17474
+ 1
17475
+ 1
17476
+ 1
17477
+ 1
17478
+ 1
17479
+ 1
17480
+ 1
17481
+ 1
17482
+ 1
17483
+ 1
17484
+ 1
17485
+ 1
17486
+ 1
17487
+ 1
17488
+ 1
17489
+ 1
17490
+ 1
17491
+ 1
17492
+ 1
17493
+ 1
17494
+ 1
17495
+ 1
17496
+ 1
17497
+ 1
17498
+ 1
17499
+ 1
17500
+ 1
17501
+ 1
17502
+ 1
17503
+ 1
17504
+ 1
17505
+ 1
17506
+ 1
17507
+ 1
17508
+ 1
17509
+ 1
17510
+ 1
17511
+ 1
17512
+ 1
17513
+ 1
17514
+ 1
17515
+ 1
17516
+ 1
17517
+ 1
17518
+ 1
17519
+ 1
17520
+ 1
17521
+ 1
17522
+ 1
17523
+ 1
17524
+ 1
17525
+ 1
17526
+ 1
17527
+ 1
17528
+ 1
17529
+ 1
17530
+ 1
17531
+ 1
17532
+ 1
17533
+ 1
17534
+ 1
17535
+ 1
17536
+ 1
17537
+ 1
17538
+ 1
17539
+ 1
17540
+ 1
17541
+ 1
17542
+ 1
17543
+ 1
17544
+ 1
17545
+ 1
17546
+ 1
17547
+ 1
17548
+ 1
17549
+ 1
17550
+ 1
17551
+ 1
17552
+ 1
17553
+ 1
17554
+ 1
17555
+ 1
17556
+ 1
17557
+ 1
17558
+ 1
17559
+ 1
17560
+ 1
17561
+ 1
17562
+ 1
17563
+ 1
17564
+ 1
17565
+ 1
17566
+ 1
17567
+ 1
17568
+ 1
17569
+ 1
17570
+ 1
17571
+ 1
17572
+ 1
17573
+ 1
17574
+ 1
17575
+ 1
17576
+ 1
17577
+ 1
17578
+ 1
17579
+ 1
17580
+ 1
17581
+ 1
17582
+ 1
17583
+ 1
17584
+ 1
17585
+ 1
17586
+ 1
17587
+ 1
17588
+ 1
17589
+ 1
17590
+ 1
17591
+ 1
17592
+ 1
17593
+ 1
17594
+ 1
17595
+ 1
17596
+ 1
17597
+ 1
17598
+ 1
17599
+ 1
17600
+ 1
17601
+ 1
17602
+ 1
17603
+ 1
17604
+ 1
17605
+ 1
17606
+ 1
17607
+ 1
17608
+ 1
17609
+ 1
17610
+ 1
17611
+ 1
17612
+ 1
17613
+ 1
17614
+ 1
17615
+ 1
17616
+ 1
17617
+ 1
17618
+ 1
17619
+ 1
17620
+ 1
17621
+ 1
17622
+ 1
17623
+ 1
17624
+ 1
17625
+ 1
17626
+ 1
17627
+ 1
17628
+ 1
17629
+ 1
17630
+ 1
17631
+ 1
17632
+ 1
17633
+ 1
17634
+ 1
17635
+ 1
17636
+ 1
17637
+ 1
17638
+ 1
17639
+ 1
17640
+ 1
17641
+ 1
17642
+ 1
17643
+ 1
17644
+ 1
17645
+ 1
17646
+ 1
17647
+ 1
17648
+ 1
17649
+ 1
17650
+ 1
17651
+ 1
17652
+ 1
17653
+ 1
17654
+ 1
17655
+ 1
17656
+ 1
17657
+ 1
17658
+ 1
17659
+ 1
17660
+ 1
17661
+ 1
17662
+ 1
17663
+ 1
17664
+ 1
17665
+ 1
17666
+ 1
17667
+ 1
17668
+ 1
17669
+ 1
17670
+ 1
17671
+ 1
17672
+ 1
17673
+ 1
17674
+ 1
17675
+ 1
17676
+ 1
17677
+ 1
17678
+ 1
17679
+ 1
17680
+ 1
17681
+ 1
17682
+ 1
17683
+ 1
17684
+ 1
17685
+ 1
17686
+ 1
17687
+ 1
17688
+ 1
17689
+ 1
17690
+ 1
17691
+ 1
17692
+ 1
17693
+ 1
17694
+ 1
17695
+ 1
17696
+ 1
17697
+ 1
17698
+ 1
17699
+ 1
17700
+ 1
17701
+ 1
17702
+ 1
17703
+ 1
17704
+ 1
17705
+ 1
17706
+ 1
17707
+ 1
17708
+ 1
17709
+ 1
17710
+ 1
17711
+ 1
17712
+ 1
17713
+ 1
17714
+ 1
17715
+ 1
17716
+ 1
17717
+ 1
17718
+ 1
17719
+ 1
17720
+ 1
17721
+ 1
17722
+ 1
17723
+ 1
17724
+ 1
17725
+ 1
17726
+ 1
17727
+ 1
17728
+ 1
17729
+ 1
17730
+ 1
17731
+ 1
17732
+ 1
17733
+ 1
17734
+ 1
17735
+ 1
17736
+ 1
17737
+ 1
17738
+ 1
17739
+ 1
17740
+ 1
17741
+ 1
17742
+ 1
17743
+ 1
17744
+ 1
17745
+ 1
17746
+ 1
17747
+ 1
17748
+ 1
17749
+ 1
17750
+ 1
17751
+ 1
17752
+ 1
17753
+ 1
17754
+ 1
17755
+ 1
17756
+ 1
17757
+ 1
17758
+ 1
17759
+ 1
17760
+ 1
17761
+ 1
17762
+ 1
17763
+ 1
17764
+ 1
17765
+ 1
17766
+ 1
17767
+ 1
17768
+ 1
17769
+ 1
17770
+ 1
17771
+ 1
17772
+ 1
17773
+ 1
17774
+ 1
17775
+ 1
17776
+ 1
17777
+ 1
17778
+ 1
17779
+ 1
17780
+ 1
17781
+ 1
17782
+ 1
17783
+ 1
17784
+ 1
17785
+ 1
17786
+ 1
17787
+ 1
17788
+ 1
17789
+ 1
17790
+ 1
17791
+ 1
17792
+ 1
17793
+ 1
17794
+ 1
17795
+ 1
17796
+ 1
17797
+ 1
17798
+ 1
17799
+ 1
17800
+ 1
17801
+ 1
17802
+ 1
17803
+ 1
17804
+ 1
17805
+ 1
17806
+ 1
17807
+ 1
17808
+ 1
17809
+ 1
17810
+ 1
17811
+ 1
17812
+ 1
17813
+ 1
17814
+ 1
17815
+ 1
17816
+ 1
17817
+ 1
17818
+ 1
17819
+ 1
17820
+ 1
17821
+ 1
17822
+ 1
17823
+ 1
17824
+ 1
17825
+ 1
17826
+ 1
17827
+ 1
17828
+ 1
17829
+ 1
17830
+ 1
17831
+ 1
17832
+ 1
17833
+ 1
17834
+ 1
17835
+ 1
17836
+ 1
17837
+ 1
17838
+ 1
17839
+ 1
17840
+ 1
17841
+ 1
17842
+ 1
17843
+ 1
17844
+ 1
17845
+ 1
17846
+ 1
17847
+ 1
17848
+ 1
17849
+ 1
17850
+ 1
17851
+ 1
17852
+ 1
17853
+ 1
17854
+ 1
17855
+ 1
17856
+ 1
17857
+ 1
17858
+ 1
17859
+ 1
17860
+ 1
17861
+ 1
17862
+ 1
17863
+ 1
17864
+ 1
17865
+ 1
17866
+ 1
17867
+ 1
17868
+ 1
17869
+ 1
17870
+ 1
17871
+ 1
17872
+ 1
17873
+ 1
17874
+ 1
17875
+ 1
17876
+ 1
17877
+ 1
17878
+ 1
17879
+ 1
17880
+ 1
17881
+ 1
17882
+ 1
17883
+ 1
17884
+ 1
17885
+ 1
17886
+ 1
17887
+ 1
17888
+ 1
17889
+ 1
17890
+ 1
17891
+ 1
17892
+ 1
17893
+ 1
17894
+ 1
17895
+ 1
17896
+ 1
17897
+ 1
17898
+ 1
17899
+ 1
17900
+ 1
17901
+ 1
17902
+ 1
17903
+ 1
17904
+ 1
17905
+ 1
17906
+ 1
17907
+ 1
17908
+ 1
17909
+ 1
17910
+ 1
17911
+ 1
17912
+ 1
17913
+ 1
17914
+ 1
17915
+ 1
17916
+ 1
17917
+ 1
17918
+ 1
17919
+ 1
17920
+ 1
17921
+ 1
17922
+ 1
17923
+ 1
17924
+ 1
17925
+ 1
17926
+ 1
17927
+ 1
17928
+ 1
17929
+ 1
17930
+ 1
17931
+ 1
17932
+ 1
17933
+ 1
17934
+ 1
17935
+ 1
17936
+ 1
17937
+ 1
17938
+ 1
17939
+ 1
17940
+ 1
17941
+ 1
17942
+ 1
17943
+ 1
17944
+ 1
17945
+ 1
17946
+ 1
17947
+ 1
17948
+ 1
17949
+ 1
17950
+ 1
17951
+ 1
17952
+ 1
17953
+ 1
17954
+ 1
17955
+ 1
17956
+ 1
17957
+ 1
17958
+ 1
17959
+ 2
17960
+ 2
17961
+ 1
17962
+ 1
17963
+ 2
17964
+ 1
17965
+ 1
17966
+ 1
17967
+ 1
17968
+ 1
17969
+ 1
17970
+ 1
17971
+ 1
17972
+ 1
17973
+ 1
17974
+ 1
17975
+ 1
17976
+ 1
17977
+ 1
17978
+ 1
17979
+ 1
17980
+ 1
17981
+ 1
17982
+ 1
17983
+ 1
17984
+ 1
17985
+ 1
17986
+ 1
17987
+ 1
17988
+ 1
17989
+ 1
17990
+ 1
17991
+ 1
17992
+ 1
17993
+ 1
17994
+ 1
17995
+ 1
17996
+ 1
17997
+ 1
17998
+ 1
17999
+ 1
18000
+ 1
18001
+ 1
18002
+ 1
18003
+ 1
18004
+ 1
18005
+ 1
18006
+ 1
18007
+ 1
18008
+ 1
18009
+ 1
18010
+ 1
18011
+ 1
18012
+ 1
18013
+ 1
18014
+ 1
18015
+ 1
18016
+ 1
18017
+ 1
18018
+ 1
18019
+ 1
18020
+ 1
18021
+ 1
18022
+ 1
18023
+ 1
18024
+ 1
18025
+ 1
18026
+ 1
18027
+ 1
18028
+ 1
18029
+ 1
18030
+ 1
18031
+ 1
18032
+ 1
18033
+ 1
18034
+ 1
18035
+ 1
18036
+ 1
18037
+ 1
18038
+ 1
18039
+ 1
18040
+ 1
18041
+ 1
18042
+ 1
18043
+ 1
18044
+ 1
18045
+ 1
18046
+ 1
18047
+ 1
18048
+ 1
18049
+ 1
18050
+ 1
18051
+ 1
18052
+ 1
18053
+ 1
18054
+ 1
18055
+ 1
18056
+ 1
18057
+ 1
18058
+ 1
18059
+ 1
18060
+ 1
18061
+ 1
18062
+ 1
18063
+ 1
18064
+ 1
18065
+ 1
18066
+ 1
18067
+ 1
18068
+ 1
18069
+ 1
18070
+ 1
18071
+ 1
18072
+ 1
18073
+ 1
18074
+ 1
18075
+ 1
18076
+ 1
18077
+ 1
18078
+ 1
18079
+ 1
18080
+ 1
18081
+ 1
18082
+ 1
18083
+ 1
18084
+ 1
18085
+ 1
18086
+ 1
18087
+ 1
18088
+ 1
18089
+ 1
18090
+ 1
18091
+ 1
18092
+ 1
18093
+ 1
18094
+ 1
18095
+ 1
18096
+ 1
18097
+ 1
18098
+ 1
18099
+ 1
18100
+ 1
18101
+ 1
18102
+ 1
18103
+ 1
18104
+ 1
18105
+ 1
18106
+ 1
18107
+ 1
18108
+ 1
18109
+ 1
18110
+ 1
18111
+ 1
18112
+ 1
18113
+ 1
18114
+ 1
18115
+ 1
18116
+ 1
18117
+ 1
18118
+ 1
18119
+ 1
18120
+ 1
18121
+ 1
18122
+ 1
18123
+ 1
18124
+ 1
18125
+ 1
18126
+ 1
18127
+ 1
18128
+ 1
18129
+ 1
18130
+ 1
18131
+ 1
18132
+ 1
18133
+ 1
18134
+ 1
18135
+ 1
18136
+ 1
18137
+ 1
18138
+ 1
18139
+ 1
18140
+ 1
18141
+ 1
18142
+ 1
18143
+ 1
18144
+ 1
18145
+ 1
18146
+ 1
18147
+ 1
18148
+ 1
18149
+ 1
18150
+ 1
18151
+ 1
18152
+ 1
18153
+ 1
18154
+ 1
18155
+ 1
18156
+ 1
18157
+ 1
18158
+ 1
18159
+ 1
18160
+ 1
18161
+ 1
18162
+ 1
18163
+ 1
18164
+ 1
18165
+ 1
18166
+ 1
18167
+ 1
18168
+ 1
18169
+ 1
18170
+ 1
18171
+ 1
18172
+ 1
18173
+ 1
18174
+ 1
18175
+ 1
18176
+ 1
18177
+ 1
18178
+ 1
18179
+ 1
18180
+ 1
18181
+ 1
18182
+ 1
18183
+ 1
18184
+ 1
18185
+ 1
18186
+ 1
18187
+ 1
18188
+ 1
18189
+ 1
18190
+ 1
18191
+ 1
18192
+ 1
18193
+ 1
18194
+ 1
18195
+ 1
18196
+ 1
18197
+ 1
18198
+ 1
18199
+ 1
18200
+ 1
18201
+ 1
18202
+ 1
18203
+ 1
18204
+ 1
18205
+ 1
18206
+ 1
18207
+ 1
18208
+ 1
18209
+ 1
18210
+ 1
18211
+ 1
18212
+ 1
18213
+ 1
18214
+ 1
18215
+ 1
18216
+ 1
18217
+ 1
18218
+ 1
18219
+ 1
18220
+ 1
18221
+ 1
18222
+ 1
18223
+ 1
18224
+ 1
18225
+ 2
18226
+ 1
18227
+ 1
18228
+ 1
18229
+ 1
18230
+ 1
18231
+ 1
18232
+ 1
18233
+ 1
18234
+ 1
18235
+ 1
18236
+ 1
18237
+ 1
18238
+ 1
18239
+ 1
18240
+ 1
18241
+ 1
18242
+ 1
18243
+ 1
18244
+ 1
18245
+ 1
18246
+ 1
18247
+ 1
18248
+ 1
18249
+ 1
18250
+ 1
18251
+ 1
18252
+ 1
18253
+ 1
18254
+ 1
18255
+ 1
18256
+ 1
18257
+ 1
18258
+ 1
18259
+ 1
18260
+ 1
18261
+ 1
18262
+ 1
18263
+ 1
18264
+ 1
18265
+ 1
18266
+ 1
18267
+ 1
18268
+ 1
18269
+ 1
18270
+ 1
18271
+ 1
18272
+ 1
18273
+ 1
18274
+ 1
18275
+ 1
18276
+ 1
18277
+ 1
18278
+ 1
18279
+ 1
18280
+ 1
18281
+ 1
18282
+ 1
18283
+ 1
18284
+ 1
18285
+ 1
18286
+ 1
18287
+ 1
18288
+ 1
18289
+ 1
18290
+ 1
18291
+ 1
18292
+ 1
18293
+ 1
18294
+ 1
18295
+ 1
18296
+ 1
18297
+ 1
18298
+ 1
18299
+ 1
18300
+ 1
18301
+ 1
18302
+ 1
18303
+ 1
18304
+ 1
18305
+ 1
18306
+ 1
18307
+ 1
18308
+ 1
18309
+ 1
18310
+ 1
18311
+ 1
18312
+ 1
18313
+ 1
18314
+ 1
18315
+ 1
18316
+ 1
18317
+ 1
18318
+ 1
18319
+ 1
18320
+ 1
18321
+ 1
18322
+ 1
18323
+ 1
18324
+ 1
18325
+ 1
18326
+ 1
18327
+ 1
18328
+ 1
18329
+ 1
18330
+ 1
18331
+ 1
18332
+ 1
18333
+ 1
18334
+ 1
18335
+ 1
18336
+ 1
18337
+ 1
18338
+ 1
18339
+ 1
18340
+ 1
18341
+ 1
18342
+ 1
18343
+ 1
18344
+ 1
18345
+ 1
18346
+ 1
18347
+ 1
18348
+ 1
18349
+ 1
18350
+ 1
18351
+ 1
18352
+ 1
18353
+ 1
18354
+ 1
18355
+ 1
18356
+ 1
18357
+ 1
18358
+ 1
18359
+ 1
18360
+ 1
18361
+ 1
18362
+ 1
18363
+ 1
18364
+ 1
18365
+ 1
18366
+ 1
18367
+ 1
18368
+ 1
18369
+ 1
18370
+ 1
18371
+ 1
18372
+ 1
18373
+ 1
18374
+ 1
18375
+ 1
18376
+ 1
18377
+ 1
18378
+ 1
18379
+ 1
18380
+ 1
18381
+ 1
18382
+ 1
18383
+ 1
18384
+ 1
18385
+ 1
18386
+ 1
18387
+ 1
18388
+ 1
18389
+ 1
18390
+ 1
18391
+ 1
18392
+ 1
18393
+ 1
18394
+ 1
18395
+ 1
18396
+ 1
18397
+ 1
18398
+ 1
18399
+ 1
18400
+ 1
18401
+ 1
18402
+ 1
18403
+ 1
18404
+ 1
18405
+ 1
18406
+ 1
18407
+ 1
18408
+ 1
18409
+ 1
18410
+ 1
18411
+ 1
18412
+ 1
18413
+ 1
18414
+ 1
18415
+ 1
18416
+ 1
18417
+ 1
18418
+ 1
18419
+ 1
18420
+ 1
18421
+ 1
18422
+ 1
18423
+ 1
18424
+ 1
18425
+ 1
18426
+ 1
18427
+ 1
18428
+ 1
18429
+ 1
18430
+ 1
18431
+ 1
18432
+ 1
18433
+ 1
18434
+ 1
18435
+ 1
18436
+ 1
18437
+ 1
18438
+ 1
18439
+ 1
18440
+ 1
18441
+ 1
18442
+ 1
18443
+ 1
18444
+ 1
18445
+ 1
18446
+ 1
18447
+ 1
18448
+ 1
18449
+ 1
18450
+ 1
18451
+ 1
18452
+ 1
18453
+ 1
18454
+ 1
18455
+ 1
18456
+ 1
18457
+ 1
18458
+ 1
18459
+ 1
18460
+ 1
18461
+ 1
18462
+ 1
18463
+ 1
18464
+ 1
18465
+ 1
18466
+ 1
18467
+ 1
18468
+ 1
18469
+ 1
18470
+ 1
18471
+ 1
18472
+ 1
18473
+ 1
18474
+ 1
18475
+ 1
18476
+ 1
18477
+ 1
18478
+ 1
18479
+ 1
18480
+ 1
18481
+ 1
18482
+ 1
18483
+ 1
18484
+ 1
18485
+ 1
18486
+ 1
18487
+ 1
18488
+ 1
18489
+ 1
18490
+ 1
18491
+ 1
18492
+ 1
18493
+ 1
18494
+ 1
18495
+ 1
18496
+ 1
18497
+ 1
18498
+ 1
18499
+ 1
18500
+ 1
18501
+ 1
18502
+ 1
18503
+ 1
18504
+ 1
18505
+ 1
18506
+ 1
18507
+ 1
18508
+ 1
18509
+ 1
18510
+ 1
18511
+ 1
18512
+ 1
18513
+ 1
18514
+ 1
18515
+ 1
18516
+ 1
18517
+ 1
18518
+ 1
18519
+ 1
18520
+ 1
18521
+ 1
18522
+ 1
18523
+ 1
18524
+ 1
18525
+ 1
18526
+ 1
18527
+ 1
18528
+ 1
18529
+ 1
18530
+ 1
18531
+ 1
18532
+ 1
18533
+ 1
18534
+ 1
18535
+ 1
18536
+ 1
18537
+ 1
18538
+ 1
18539
+ 1
18540
+ 1
18541
+ 1
18542
+ 1
18543
+ 1
18544
+ 1
18545
+ 1
18546
+ 1
18547
+ 1
18548
+ 1
18549
+ 1
18550
+ 1
18551
+ 1
18552
+ 1
18553
+ 1
18554
+ 1
18555
+ 1
18556
+ 1
18557
+ 1
18558
+ 1
18559
+ 1
18560
+ 1
18561
+ 1
18562
+ 1
18563
+ 1
18564
+ 1
18565
+ 1
18566
+ 1
18567
+ 1
18568
+ 1
18569
+ 1
18570
+ 1
18571
+ 1
18572
+ 1
18573
+ 1
18574
+ 1
18575
+ 1
18576
+ 1
18577
+ 1
18578
+ 1
18579
+ 1
18580
+ 1
18581
+ 1
18582
+ 1
18583
+ 1
18584
+ 1
18585
+ 1
18586
+ 1
18587
+ 1
18588
+ 1
18589
+ 1
18590
+ 1
18591
+ 1
18592
+ 1
18593
+ 1
18594
+ 1
18595
+ 1
18596
+ 1
18597
+ 1
18598
+ 1
18599
+ 1
18600
+ 1
18601
+ 1
18602
+ 1
18603
+ 1
18604
+ 1
18605
+ 1
18606
+ 1
18607
+ 1
18608
+ 1
18609
+ 1
18610
+ 1
18611
+ 1
18612
+ 1
18613
+ 1
18614
+ 1
18615
+ 1
18616
+ 1
18617
+ 1
18618
+ 1
18619
+ 1
18620
+ 1
18621
+ 1
18622
+ 1
18623
+ 1
18624
+ 1
18625
+ 1
18626
+ 1
18627
+ 1
18628
+ 1
18629
+ 1
18630
+ 1
18631
+ 1
18632
+ 1
18633
+ 1
18634
+ 1
18635
+ 1
18636
+ 1
18637
+ 1
18638
+ 1
18639
+ 1
18640
+ 1
18641
+ 1
18642
+ 1
18643
+ 1
18644
+ 1
18645
+ 1
18646
+ 1
18647
+ 1
18648
+ 1
18649
+ 1
18650
+ 1
18651
+ 1
18652
+ 1
18653
+ 1
18654
+ 1
18655
+ 1
18656
+ 1
18657
+ 1
18658
+ 1
18659
+ 1
18660
+ 1
18661
+ 1
18662
+ 1
18663
+ 1
18664
+ 1
18665
+ 1
18666
+ 1
18667
+ 1
18668
+ 1
18669
+ 1
18670
+ 1
18671
+ 1
18672
+ 1
18673
+ 1
18674
+ 1
18675
+ 1
18676
+ 1
18677
+ 1
18678
+ 1
18679
+ 1
18680
+ 1
18681
+ 1
18682
+ 1
18683
+ 1
18684
+ 1
18685
+ 1
18686
+ 1
18687
+ 1
18688
+ 1
18689
+ 1
18690
+ 1
18691
+ 1
18692
+ 1
18693
+ 1
18694
+ 1
18695
+ 1
18696
+ 1
18697
+ 1
18698
+ 1
18699
+ 1
18700
+ 1
18701
+ 1
18702
+ 1
18703
+ 1
18704
+ 1
18705
+ 1
18706
+ 1
18707
+ 1
18708
+ 1
18709
+ 1
18710
+ 1
18711
+ 1
18712
+ 1
18713
+ 1
18714
+ 1
18715
+ 1
18716
+ 1
18717
+ 1
18718
+ 1
18719
+ 1
18720
+ 1
18721
+ 1
18722
+ 1
18723
+ 1
18724
+ 1
18725
+ 1
18726
+ 1
18727
+ 1
18728
+ 1
18729
+ 1
18730
+ 1
18731
+ 1
18732
+ 1
18733
+ 1
18734
+ 1
18735
+ 1
18736
+ 1
18737
+ 1
18738
+ 1
18739
+ 1
18740
+ 1
18741
+ 1
18742
+ 1
18743
+ 1
18744
+ 1
18745
+ 1
18746
+ 1
18747
+ 1
18748
+ 1
18749
+ 1
18750
+ 1
18751
+ 1
18752
+ 1
18753
+ 1
18754
+ 1
18755
+ 1
18756
+ 1
18757
+ 1
18758
+ 1
18759
+ 1
18760
+ 1
18761
+ 1
18762
+ 1
18763
+ 1
18764
+ 1
18765
+ 1
18766
+ 1
18767
+ 1
18768
+ 1
18769
+ 1
18770
+ 1
18771
+ 1
18772
+ 1
18773
+ 1
18774
+ 1
18775
+ 1
18776
+ 1
18777
+ 1
18778
+ 1
18779
+ 1
18780
+ 1
18781
+ 1
18782
+ 1
18783
+ 1
18784
+ 1
18785
+ 1
18786
+ 1
18787
+ 1
18788
+ 1
18789
+ 1
18790
+ 1
18791
+ 1
18792
+ 1
18793
+ 1
18794
+ 1
18795
+ 1
18796
+ 1
18797
+ 1
18798
+ 1
18799
+ 1
18800
+ 1
18801
+ 1
18802
+ 1
18803
+ 1
18804
+ 1
18805
+ 1
18806
+ 1
18807
+ 1
18808
+ 1
18809
+ 1
18810
+ 1
18811
+ 1
18812
+ 1
18813
+ 1
18814
+ 1
18815
+ 1
18816
+ 1
18817
+ 1
18818
+ 1
18819
+ 1
18820
+ 1
18821
+ 1
18822
+ 1
18823
+ 1
18824
+ 1
18825
+ 1
18826
+ 1
18827
+ 1
18828
+ 1
18829
+ 1
18830
+ 1
18831
+ 1
18832
+ 1
18833
+ 1
18834
+ 1
18835
+ 1
18836
+ 1
18837
+ 1
18838
+ 1
18839
+ 1
18840
+ 1
18841
+ 1
18842
+ 1
18843
+ 1
18844
+ 1
18845
+ 1
18846
+ 1
18847
+ 1
18848
+ 1
18849
+ 1
18850
+ 1
18851
+ 1
18852
+ 1
18853
+ 1
18854
+ 1
18855
+ 1
18856
+ 1
18857
+ 1
18858
+ 1
18859
+ 1
18860
+ 1
18861
+ 1
18862
+ 1
18863
+ 1
18864
+ 1
18865
+ 1
18866
+ 1
18867
+ 1
18868
+ 1
18869
+ 1
18870
+ 1
18871
+ 1
18872
+ 1
18873
+ 1
18874
+ 1
18875
+ 1
18876
+ 1
18877
+ 1
18878
+ 1
18879
+ 1
18880
+ 1
18881
+ 1
18882
+ 1
18883
+ 1
18884
+ 1
18885
+ 1
18886
+ 1
18887
+ 1
18888
+ 1
18889
+ 1
18890
+ 1
18891
+ 1
18892
+ 1
18893
+ 1
18894
+ 1
18895
+ 1
18896
+ 1
18897
+ 1
18898
+ 1
18899
+ 1
18900
+ 1
18901
+ 1
18902
+ 1
18903
+ 1
18904
+ 1
18905
+ 1
18906
+ 1
18907
+ 1
18908
+ 1
18909
+ 1
18910
+ 1
18911
+ 1
18912
+ 1
18913
+ 1
18914
+ 1
18915
+ 1
18916
+ 1
18917
+ 1
18918
+ 1
18919
+ 1
18920
+ 1
18921
+ 1
18922
+ 1
18923
+ 1
18924
+ 1
18925
+ 1
18926
+ 1
18927
+ 1
18928
+ 1
18929
+ 1
18930
+ 1
18931
+ 1
18932
+ 1
18933
+ 1
18934
+ 1
18935
+ 1
18936
+ 1
18937
+ 1
18938
+ 1
18939
+ 1
18940
+ 1
18941
+ 1
18942
+ 1
18943
+ 1
18944
+ 1
18945
+ 1
18946
+ 1
18947
+ 1
18948
+ 1
18949
+ 1
18950
+ 1
18951
+ 1
18952
+ 1
18953
+ 1
18954
+ 1
18955
+ 1
18956
+ 1
18957
+ 1
18958
+ 1
18959
+ 1
18960
+ 1
18961
+ 1
18962
+ 1
18963
+ 1
18964
+ 1
18965
+ 1
18966
+ 1
18967
+ 1
18968
+ 1
18969
+ 1
18970
+ 1
18971
+ 1
18972
+ 1
18973
+ 1
18974
+ 1
18975
+ 1
18976
+ 1
18977
+ 1
18978
+ 1
18979
+ 1
18980
+ 1
18981
+ 1
18982
+ 1
18983
+ 1
18984
+ 1
18985
+ 1
18986
+ 1
18987
+ 1
18988
+ 1
18989
+ 1
18990
+ 1
18991
+ 1
18992
+ 1
18993
+ 1
18994
+ 1
18995
+ 1
18996
+ 1
18997
+ 1
18998
+ 1
18999
+ 1
19000
+ 1
19001
+ 1
19002
+ 1
19003
+ 1
19004
+ 1
19005
+ 1
19006
+ 1
19007
+ 1
19008
+ 1
19009
+ 1
19010
+ 1
19011
+ 1
19012
+ 1
19013
+ 1
19014
+ 1
19015
+ 1
19016
+ 1
19017
+ 1
19018
+ 1
19019
+ 1
19020
+ 1
19021
+ 1
19022
+ 1
19023
+ 1
19024
+ 1
19025
+ 1
19026
+ 1
19027
+ 1
19028
+ 1
19029
+ 1
19030
+ 1
19031
+ 1
19032
+ 1
19033
+ 1
19034
+ 1
19035
+ 1
19036
+ 1
19037
+ 1
19038
+ 1
19039
+ 1
19040
+ 1
19041
+ 1
19042
+ 1
19043
+ 1
19044
+ 1
19045
+ 1
19046
+ 1
19047
+ 1
19048
+ 1
19049
+ 1
19050
+ 1
19051
+ 1
19052
+ 1
19053
+ 1
19054
+ 1
19055
+ 1
19056
+ 1
19057
+ 1
19058
+ 1
19059
+ 1
19060
+ 1
19061
+ 1
19062
+ 1
19063
+ 1
19064
+ 1
19065
+ 1
19066
+ 1
19067
+ 1
19068
+ 1
19069
+ 1
19070
+ 1
19071
+ 1
19072
+ 1
19073
+ 1
19074
+ 1
19075
+ 1
19076
+ 1
19077
+ 1
19078
+ 1
19079
+ 1
19080
+ 1
19081
+ 1
19082
+ 1
19083
+ 1
19084
+ 1
19085
+ 1
19086
+ 1
19087
+ 1
19088
+ 1
19089
+ 1
19090
+ 1
19091
+ 1
19092
+ 1
19093
+ 1
19094
+ 1
19095
+ 1
19096
+ 1
19097
+ 1
19098
+ 1
19099
+ 1
19100
+ 1
19101
+ 1
19102
+ 1
19103
+ 1
19104
+ 1
19105
+ 1
19106
+ 1
19107
+ 1
19108
+ 1
19109
+ 1
19110
+ 1
19111
+ 1
19112
+ 1
19113
+ 1
19114
+ 1
19115
+ 1
19116
+ 1
19117
+ 1
19118
+ 1
19119
+ 1
19120
+ 1
19121
+ 1
19122
+ 1
19123
+ 1
19124
+ 1
19125
+ 1
19126
+ 1
19127
+ 1
19128
+ 1
19129
+ 1
19130
+ 1
19131
+ 1
19132
+ 1
19133
+ 1
19134
+ 1
19135
+ 1
19136
+ 1
19137
+ 1
19138
+ 1
19139
+ 1
19140
+ 1
19141
+ 1
19142
+ 1
19143
+ 1
19144
+ 1
19145
+ 1
19146
+ 1
19147
+ 1
19148
+ 1
19149
+ 1
19150
+ 1
19151
+ 1
19152
+ 1
19153
+ 1
19154
+ 1
19155
+ 1
19156
+ 1
19157
+ 1
19158
+ 1
19159
+ 1
19160
+ 1
19161
+ 1
19162
+ 1
19163
+ 1
19164
+ 1
19165
+ 1
19166
+ 1
19167
+ 1
19168
+ 1
19169
+ 1
19170
+ 1
19171
+ 1
19172
+ 1
19173
+ 1
19174
+ 1
19175
+ 1
19176
+ 1
19177
+ 1
19178
+ 1
19179
+ 1
19180
+ 1
19181
+ 1
19182
+ 1
19183
+ 1
19184
+ 1
19185
+ 1
19186
+ 1
19187
+ 1
19188
+ 1
19189
+ 1
19190
+ 1
19191
+ 1
19192
+ 1
19193
+ 1
19194
+ 1
19195
+ 1
19196
+ 1
19197
+ 1
19198
+ 1
19199
+ 1
19200
+ 1
19201
+ 1
19202
+ 1
19203
+ 1
19204
+ 1
19205
+ 1
19206
+ 1
19207
+ 1
19208
+ 1
19209
+ 1
19210
+ 1
19211
+ 1
19212
+ 1
19213
+ 1
19214
+ 1
19215
+ 1
19216
+ 1
19217
+ 1
19218
+ 1
19219
+ 1
19220
+ 1
19221
+ 1
19222
+ 1
19223
+ 1
19224
+ 1
19225
+ 1
19226
+ 1
19227
+ 1
19228
+ 1
19229
+ 1
19230
+ 1
19231
+ 1
19232
+ 1
19233
+ 1
19234
+ 1
19235
+ 1
19236
+ 1
19237
+ 1
19238
+ 1
19239
+ 1
19240
+ 1
19241
+ 1
19242
+ 1
19243
+ 1
19244
+ 1
19245
+ 1
19246
+ 1
19247
+ 1
19248
+ 1
19249
+ 1
19250
+ 1
19251
+ 1
19252
+ 1
19253
+ 1
19254
+ 1
19255
+ 1
19256
+ 1
19257
+ 1
19258
+ 1
19259
+ 1
19260
+ 1
19261
+ 1
19262
+ 1
19263
+ 1
19264
+ 1
19265
+ 1
19266
+ 1
19267
+ 1
19268
+ 1
19269
+ 1
19270
+ 1
19271
+ 1
19272
+ 1
19273
+ 1
19274
+ 1
19275
+ 1
19276
+ 1
19277
+ 1
19278
+ 1
19279
+ 1
19280
+ 1
19281
+ 1
19282
+ 1
19283
+ 1
19284
+ 1
19285
+ 1
19286
+ 1
19287
+ 1
19288
+ 1
19289
+ 1
19290
+ 1
19291
+ 1
19292
+ 1
19293
+ 1
19294
+ 1
19295
+ 1
19296
+ 1
19297
+ 1
19298
+ 1
19299
+ 1
19300
+ 1
19301
+ 1
19302
+ 1
19303
+ 1
19304
+ 1
19305
+ 1
19306
+ 1
19307
+ 1
19308
+ 1
19309
+ 1
19310
+ 1
19311
+ 1
19312
+ 1
19313
+ 1
19314
+ 1
19315
+ 1
19316
+ 1
19317
+ 1
19318
+ 1
19319
+ 1
19320
+ 1
19321
+ 1
19322
+ 1
19323
+ 1
19324
+ 1
19325
+ 1
19326
+ 1
19327
+ 1
19328
+ 1
19329
+ 1
19330
+ 1
19331
+ 1
19332
+ 1
19333
+ 1
19334
+ 1
19335
+ 1
19336
+ 1
19337
+ 1
19338
+ 1
19339
+ 1
19340
+ 1
19341
+ 1
19342
+ 1
19343
+ 1
19344
+ 1
19345
+ 1
19346
+ 1
19347
+ 1
19348
+ 1
19349
+ 1
19350
+ 1
19351
+ 1
19352
+ 1
19353
+ 1
19354
+ 1
19355
+ 1
19356
+ 1
19357
+ 1
19358
+ 1
19359
+ 1
19360
+ 1
19361
+ 1
19362
+ 1
19363
+ 1
19364
+ 1
19365
+ 1
19366
+ 1
19367
+ 1
19368
+ 1
19369
+ 1
19370
+ 1
19371
+ 1
19372
+ 1
19373
+ 1
19374
+ 1
19375
+ 1
19376
+ 1
19377
+ 1
19378
+ 1
19379
+ 1
19380
+ 1
19381
+ 1
19382
+ 1
19383
+ 1
19384
+ 1
19385
+ 1
19386
+ 1
19387
+ 1
19388
+ 1
19389
+ 1
19390
+ 1
19391
+ 1
19392
+ 1
19393
+ 1
19394
+ 1
19395
+ 1
19396
+ 1
19397
+ 1
19398
+ 1
19399
+ 1
19400
+ 1
19401
+ 1
19402
+ 1
19403
+ 1
19404
+ 1
19405
+ 1
19406
+ 1
19407
+ 1
19408
+ 1
19409
+ 1
19410
+ 1
19411
+ 1
19412
+ 1
19413
+ 1
19414
+ 1
19415
+ 1
19416
+ 1
19417
+ 1
19418
+ 1
19419
+ 1
19420
+ 1
19421
+ 1
19422
+ 1
19423
+ 1
19424
+ 1
19425
+ 1
19426
+ 1
19427
+ 1
19428
+ 1
19429
+ 1
19430
+ 1
19431
+ 1
19432
+ 1
19433
+ 1
19434
+ 1
19435
+ 1
19436
+ 1
19437
+ 1
19438
+ 1
19439
+ 1
19440
+ 1
19441
+ 1
19442
+ 1
19443
+ 1
19444
+ 1
19445
+ 1
19446
+ 1
19447
+ 1
19448
+ 1
19449
+ 1
19450
+ 1
19451
+ 1
19452
+ 1
19453
+ 1
19454
+ 1
19455
+ 1
19456
+ 1
19457
+ 1
19458
+ 1
19459
+ 1
19460
+ 1
19461
+ 1
19462
+ 1
19463
+ 1
19464
+ 1
19465
+ 1
19466
+ 1
19467
+ 1
19468
+ 1
19469
+ 1
19470
+ 1
19471
+ 1
19472
+ 1
19473
+ 1
19474
+ 1
19475
+ 1
19476
+ 1
19477
+ 1
19478
+ 1
19479
+ 1
19480
+ 1
19481
+ 1
19482
+ 1
19483
+ 1
19484
+ 1
19485
+ 1
19486
+ 1
19487
+ 1
19488
+ 1
19489
+ 1
19490
+ 1
19491
+ 1
19492
+ 1
19493
+ 1
19494
+ 1
19495
+ 1
19496
+ 1
19497
+ 1
19498
+ 1
19499
+ 1
19500
+ 1
19501
+ 1
19502
+ 1
19503
+ 1
19504
+ 1
19505
+ 1
19506
+ 1
19507
+ 1
19508
+ 1
19509
+ 1
19510
+ 1
19511
+ 1
19512
+ 1
19513
+ 1
19514
+ 1
19515
+ 1
19516
+ 1
19517
+ 1
19518
+ 1
19519
+ 1
19520
+ 1
19521
+ 1
19522
+ 1
19523
+ 1
19524
+ 1
19525
+ 1
19526
+ 1
19527
+ 1
19528
+ 1
19529
+ 1
19530
+ 1
19531
+ 1
19532
+ 1
19533
+ 1
19534
+ 1
19535
+ 1
19536
+ 1
19537
+ 1
19538
+ 1
19539
+ 1
19540
+ 1
19541
+ 1
19542
+ 1
19543
+ 1
19544
+ 1
19545
+ 1
19546
+ 1
19547
+ 1
19548
+ 1
19549
+ 1
19550
+ 1
19551
+ 1
19552
+ 1
19553
+ 1
19554
+ 1
19555
+ 1
19556
+ 1
19557
+ 1
19558
+ 1
19559
+ 1
19560
+ 1
19561
+ 1
19562
+ 1
19563
+ 1
19564
+ 1
19565
+ 1
19566
+ 1
19567
+ 1
19568
+ 1
19569
+ 1
19570
+ 1
19571
+ 1
19572
+ 1
19573
+ 1
19574
+ 1
19575
+ 1
19576
+ 1
19577
+ 1
19578
+ 1
19579
+ 1
19580
+ 1
19581
+ 1
19582
+ 1
19583
+ 1
19584
+ 1
19585
+ 1
19586
+ 1
19587
+ 1
19588
+ 1
19589
+ 1
19590
+ 1
19591
+ 1
19592
+ 1
19593
+ 1
19594
+ 1
19595
+ 1
19596
+ 1
19597
+ 1
19598
+ 1
19599
+ 1
19600
+ 1
19601
+ 1
19602
+ 1
19603
+ 1
19604
+ 1
19605
+ 1
19606
+ 1
19607
+ 1
19608
+ 1
19609
+ 1
19610
+ 1
19611
+ 1
19612
+ 1
19613
+ 1
19614
+ 1
19615
+ 1
19616
+ 1
19617
+ 1
19618
+ 1
19619
+ 1
19620
+ 1
19621
+ 1
19622
+ 1
19623
+ 1
19624
+ 1
19625
+ 1
19626
+ 1
19627
+ 1
19628
+ 1
19629
+ 1
19630
+ 1
19631
+ 1
19632
+ 1
19633
+ 1
19634
+ 1
19635
+ 1
19636
+ 1
19637
+ 1
19638
+ 1
19639
+ 1
19640
+ 1
19641
+ 1
19642
+ 1
19643
+ 1
19644
+ 1
19645
+ 1
19646
+ 1
19647
+ 1
19648
+ 1
19649
+ 1
19650
+ 1
19651
+ 1
19652
+ 1
19653
+ 1
19654
+ 1
19655
+ 1
19656
+ 1
19657
+ 1
19658
+ 1
19659
+ 1
19660
+ 1
19661
+ 1
19662
+ 1
19663
+ 1
19664
+ 1
19665
+ 1
19666
+ 1
19667
+ 1
19668
+ 1
19669
+ 1
19670
+ 1
19671
+ 1
19672
+ 1
19673
+ 1
19674
+ 1
19675
+ 1
19676
+ 1
19677
+ 1
19678
+ 1
19679
+ 1
19680
+ 1
19681
+ 1
19682
+ 1
19683
+ 1
19684
+ 1
19685
+ 1
19686
+ 1
19687
+ 1
19688
+ 1
19689
+ 1
19690
+ 1
19691
+ 1
19692
+ 1
19693
+ 1
19694
+ 1
19695
+ 1
19696
+ 1
19697
+ 1
19698
+ 1
19699
+ 1
19700
+ 1
19701
+ 1
19702
+ 1
19703
+ 1
19704
+ 1
19705
+ 1
19706
+ 1
19707
+ 1
19708
+ 1
19709
+ 1
19710
+ 1
19711
+ 1
19712
+ 1
19713
+ 1
19714
+ 1
19715
+ 1
19716
+ 1
19717
+ 1
19718
+ 1
19719
+ 1
19720
+ 1
19721
+ 1
19722
+ 1
19723
+ 1
19724
+ 1
19725
+ 1
19726
+ 1
19727
+ 1
19728
+ 1
19729
+ 1
19730
+ 1
19731
+ 1
19732
+ 1
19733
+ 1
19734
+ 1
19735
+ 1
19736
+ 1
19737
+ 1
19738
+ 1
19739
+ 1
19740
+ 1
19741
+ 1
19742
+ 1
19743
+ 1
19744
+ 1
19745
+ 1
19746
+ 1
19747
+ 1
19748
+ 1
19749
+ 1
19750
+ 1
19751
+ 1
19752
+ 1
19753
+ 1
19754
+ 1
19755
+ 1
19756
+ 1
19757
+ 1
19758
+ 1
19759
+ 1
19760
+ 1
19761
+ 1
19762
+ 1
19763
+ 1
19764
+ 1
19765
+ 1
19766
+ 1
19767
+ 1
19768
+ 1
19769
+ 1
19770
+ 1
19771
+ 1
19772
+ 1
19773
+ 1
19774
+ 1
19775
+ 1
19776
+ 1
19777
+ 1
19778
+ 1
19779
+ 1
19780
+ 1
19781
+ 1
19782
+ 1
19783
+ 1
19784
+ 1
19785
+ 1
19786
+ 1
19787
+ 1
19788
+ 1
19789
+ 1
19790
+ 1
19791
+ 1
19792
+ 1
19793
+ 1
19794
+ 1
19795
+ 1
19796
+ 1
19797
+ 1
19798
+ 1
19799
+ 1
19800
+ 1
19801
+ 1
19802
+ 1
19803
+ 1
19804
+ 1
19805
+ 1
19806
+ 1
19807
+ 1
19808
+ 1
19809
+ 1
19810
+ 1
19811
+ 1
19812
+ 1
19813
+ 1
19814
+ 1
19815
+ 1
19816
+ 1
19817
+ 1
19818
+ 1
19819
+ 1
19820
+ 1
19821
+ 1
19822
+ 1
19823
+ 1
19824
+ 1
19825
+ 1
19826
+ 1
19827
+ 1
19828
+ 1
19829
+ 1
19830
+ 1
19831
+ 1
19832
+ 1
19833
+ 1
19834
+ 1
19835
+ 1
19836
+ 1
19837
+ 1
19838
+ 1
19839
+ 1
19840
+ 1
19841
+ 1
19842
+ 1
19843
+ 1
19844
+ 1
19845
+ 1
19846
+ 1
19847
+ 1
19848
+ 1
19849
+ 1
19850
+ 1
19851
+ 1
19852
+ 1
19853
+ 1
19854
+ 1
19855
+ 1
19856
+ 1
19857
+ 1
19858
+ 1
19859
+ 1
19860
+ 1
19861
+ 1
19862
+ 1
19863
+ 1
19864
+ 1
19865
+ 1
19866
+ 1
19867
+ 1
19868
+ 1
19869
+ 1
19870
+ 1
19871
+ 1
19872
+ 1
19873
+ 1
19874
+ 1
19875
+ 1
19876
+ 1
19877
+ 1
19878
+ 1
19879
+ 1
19880
+ 1
19881
+ 1
19882
+ 1
19883
+ 1
19884
+ 1
19885
+ 1
19886
+ 1
19887
+ 1
19888
+ 1
19889
+ 1
19890
+ 1
19891
+ 1
19892
+ 1
19893
+ 1
19894
+ 1
19895
+ 1
19896
+ 1
19897
+ 1
19898
+ 1
19899
+ 1
19900
+ 1
19901
+ 1
19902
+ 1
19903
+ 1
19904
+ 1
19905
+ 1
19906
+ 1
19907
+ 1
19908
+ 1
19909
+ 1
19910
+ 1
19911
+ 1
19912
+ 1
19913
+ 1
19914
+ 1
19915
+ 1
19916
+ 1
19917
+ 1
19918
+ 1
19919
+ 1
19920
+ 1
19921
+ 1
19922
+ 1
19923
+ 1
19924
+ 1
19925
+ 1
19926
+ 1
19927
+ 1
19928
+ 1
19929
+ 1
19930
+ 1
19931
+ 1
19932
+ 1
19933
+ 1
19934
+ 1
19935
+ 1
19936
+ 1
19937
+ 1
19938
+ 1
19939
+ 1
19940
+ 1
19941
+ 1
19942
+ 1
19943
+ 1
19944
+ 1
19945
+ 1
19946
+ 1
19947
+ 1
19948
+ 1
19949
+ 1
19950
+ 1
19951
+ 1
19952
+ 1
19953
+ 1
19954
+ 1
19955
+ 1
19956
+ 1
19957
+ 1
19958
+ 1
19959
+ 1
19960
+ 1
19961
+ 1
19962
+ 1
19963
+ 1
19964
+ 1
19965
+ 1
19966
+ 1
19967
+ 1
19968
+ 1
19969
+ 1
19970
+ 1
19971
+ 1
19972
+ 1
19973
+ 1
19974
+ 1
19975
+ 1
19976
+ 1
19977
+ 1
19978
+ 1
19979
+ 1
19980
+ 1
19981
+ 1
19982
+ 1
19983
+ 1
19984
+ 1
19985
+ 1
19986
+ 1
19987
+ 1
19988
+ 1
19989
+ 1
19990
+ 1
19991
+ 1
19992
+ 1
19993
+ 1
19994
+ 1
19995
+ 1
19996
+ 1
19997
+ 1
19998
+ 1
19999
+ 1
20000
+ 1
20001
+ 1
20002
+ 1
20003
+ 1
20004
+ 1
20005
+ 1
20006
+ 1
20007
+ 1
20008
+ 1
20009
+ 1
20010
+ 1
20011
+ 1
20012
+ 1
20013
+ 1
20014
+ 1
20015
+ 1
20016
+ 1
20017
+ 1
20018
+ 1
20019
+ 1
20020
+ 1
20021
+ 1
20022
+ 1
20023
+ 1
20024
+ 1
20025
+ 1
20026
+ 1
20027
+ 1
20028
+ 1
20029
+ 1
20030
+ 1
20031
+ 1
20032
+ 1
20033
+ 1
20034
+ 1
20035
+ 1
20036
+ 1
20037
+ 1
20038
+ 1
20039
+ 1
20040
+ 1
20041
+ 1
20042
+ 1
20043
+ 1
20044
+ 1
20045
+ 1
20046
+ 1
20047
+ 1
20048
+ 1
20049
+ 1
20050
+ 1
20051
+ 1
20052
+ 1
20053
+ 1
20054
+ 1
20055
+ 1
20056
+ 1
20057
+ 1
20058
+ 1
20059
+ 1
20060
+ 1
20061
+ 1
20062
+ 1
20063
+ 1
20064
+ 1
20065
+ 1
20066
+ 1
20067
+ 1
20068
+ 1
20069
+ 1
20070
+ 1
20071
+ 1
20072
+ 1
20073
+ 1
20074
+ 1
20075
+ 1
20076
+ 1
20077
+ 1
20078
+ 1
20079
+ 1
20080
+ 1
20081
+ 1
20082
+ 1
20083
+ 1
20084
+ 1
20085
+ 1
20086
+ 1
20087
+ 1
20088
+ 1
20089
+ 1
20090
+ 1
20091
+ 1
20092
+ 1
20093
+ 1
20094
+ 1
20095
+ 1
20096
+ 1
20097
+ 1
20098
+ 1
20099
+ 1
20100
+ 1
20101
+ 1
20102
+ 1
20103
+ 1
20104
+ 1
20105
+ 1
20106
+ 1
20107
+ 1
20108
+ 1
20109
+ 1
20110
+ 1
20111
+ 1
20112
+ 1
20113
+ 1
20114
+ 1
20115
+ 1
20116
+ 1
20117
+ 1
20118
+ 1
20119
+ 1
20120
+ 1
20121
+ 1
20122
+ 1
20123
+ 1
20124
+ 1
20125
+ 1
20126
+ 1
20127
+ 1
20128
+ 1
20129
+ 1
20130
+ 1
20131
+ 1
20132
+ 1
20133
+ 1
20134
+ 1
20135
+ 1
20136
+ 1
20137
+ 1
20138
+ 1
20139
+ 1
20140
+ 1
20141
+ 1
20142
+ 1
20143
+ 1
20144
+ 1
20145
+ 1
20146
+ 1
20147
+ 1
20148
+ 1
20149
+ 1
20150
+ 1
20151
+ 1
20152
+ 1
20153
+ 1
20154
+ 1
20155
+ 1
20156
+ 1
20157
+ 1
20158
+ 1
20159
+ 1
20160
+ 1
20161
+ 1
20162
+ 1
20163
+ 1
20164
+ 1
20165
+ 1
20166
+ 1
20167
+ 1
20168
+ 1
20169
+ 1
20170
+ 1
20171
+ 1
20172
+ 1
20173
+ 1
20174
+ 1
20175
+ 1
20176
+ 1
20177
+ 1
20178
+ 1
20179
+ 1
20180
+ 1
20181
+ 1
20182
+ 1
20183
+ 1
20184
+ 1
20185
+ 1
20186
+ 1
20187
+ 1
20188
+ 1
20189
+ 1
20190
+ 1
20191
+ 1
20192
+ 1
20193
+ 1
20194
+ 1
20195
+ 1
20196
+ 1
20197
+ 1
20198
+ 1
20199
+ 1
20200
+ 1
20201
+ 1
20202
+ 1
20203
+ 1
20204
+ 1
20205
+ 1
20206
+ 1
20207
+ 1
20208
+ 1
20209
+ 1
20210
+ 1
20211
+ 1
20212
+ 1
20213
+ 1
20214
+ 1
20215
+ 1
20216
+ 1
20217
+ 1
20218
+ 1
20219
+ 1
20220
+ 1
20221
+ 1
20222
+ 1
20223
+ 1
20224
+ 1
20225
+ 1
20226
+ 1
20227
+ 1
20228
+ 1
20229
+ 1
20230
+ 1
20231
+ 1
20232
+ 1
20233
+ 1
20234
+ 1
20235
+ 1
20236
+ 1
20237
+ 1
20238
+ 1
20239
+ 1
20240
+ 1
20241
+ 1
20242
+ 1
20243
+ 1
20244
+ 1
20245
+ 1
20246
+ 1
20247
+ 1
20248
+ 1
20249
+ 1
20250
+ 1
20251
+ 1
20252
+ 1
20253
+ 1
20254
+ 1
20255
+ 1
20256
+ 1
20257
+ 1
20258
+ 1
20259
+ 1
20260
+ 1
20261
+ 1
20262
+ 1
20263
+ 1
20264
+ 1
20265
+ 1
20266
+ 1
20267
+ 1
20268
+ 1
20269
+ 1
20270
+ 1
20271
+ 1
20272
+ 1
20273
+ 1
20274
+ 1
20275
+ 1
20276
+ 1
20277
+ 1
20278
+ 1
20279
+ 1
20280
+ 1
20281
+ 1
20282
+ 1
20283
+ 1
20284
+ 1
20285
+ 1
20286
+ 1
20287
+ 1
20288
+ 1
20289
+ 1
20290
+ 1
20291
+ 1
20292
+ 1
20293
+ 1
20294
+ 1
20295
+ 1
20296
+ 1
20297
+ 1
20298
+ 1
20299
+ 1
20300
+ 1
20301
+ 1
20302
+ 1
20303
+ 1
20304
+ 1
20305
+ 1
20306
+ 1
20307
+ 1
20308
+ 1
20309
+ 1
20310
+ 1
20311
+ 1
20312
+ 1
20313
+ 1
20314
+ 1
20315
+ 1
20316
+ 1
20317
+ 1
20318
+ 1
20319
+ 1
20320
+ 1
20321
+ 1
20322
+ 1
20323
+ 1
20324
+ 1
20325
+ 1
20326
+ 1
20327
+ 1
20328
+ 1
20329
+ 1
20330
+ 1
20331
+ 1
20332
+ 1
20333
+ 1
20334
+ 1
20335
+ 1
20336
+ 1
20337
+ 1
20338
+ 1
20339
+ 1
20340
+ 1
20341
+ 1
20342
+ 1
20343
+ 1
20344
+ 1
20345
+ 1
20346
+ 1
20347
+ 1
20348
+ 1
20349
+ 1
20350
+ 1
20351
+ 1
20352
+ 1
20353
+ 1
20354
+ 1
20355
+ 1
20356
+ 1
20357
+ 1
20358
+ 1
20359
+ 1
20360
+ 1
20361
+ 1
20362
+ 1
20363
+ 1
20364
+ 1
20365
+ 1
20366
+ 1
20367
+ 1
20368
+ 1
20369
+ 1
20370
+ 1
20371
+ 1
20372
+ 1
20373
+ 1
20374
+ 1
20375
+ 1
20376
+ 1
20377
+ 1
20378
+ 1
20379
+ 1
20380
+ 1
20381
+ 1
20382
+ 1
20383
+ 1
20384
+ 1
20385
+ 1
20386
+ 1
20387
+ 1
20388
+ 1
20389
+ 1
20390
+ 1
20391
+ 1
20392
+ 1
20393
+ 1
20394
+ 1
20395
+ 1
20396
+ 1
20397
+ 1
20398
+ 1
20399
+ 1
20400
+ 1
20401
+ 1
20402
+ 1
20403
+ 1
20404
+ 1
20405
+ 1
20406
+ 1
20407
+ 1
20408
+ 1
20409
+ 1
20410
+ 1
20411
+ 1
20412
+ 1
20413
+ 1
20414
+ 1
20415
+ 1
20416
+ 1
20417
+ 1
20418
+ 1
20419
+ 1
20420
+ 1
20421
+ 1
20422
+ 1
20423
+ 1
20424
+ 1
20425
+ 1
20426
+ 1
20427
+ 1
20428
+ 1
20429
+ 1
20430
+ 1
20431
+ 1
20432
+ 1
20433
+ 1
20434
+ 1
20435
+ 1
20436
+ 1
20437
+ 1
20438
+ 1
20439
+ 1
20440
+ 1
20441
+ 1
20442
+ 1
20443
+ 1
20444
+ 1
20445
+ 1
20446
+ 1
20447
+ 1
20448
+ 1
20449
+ 1
20450
+ 1
20451
+ 1
20452
+ 1
20453
+ 1
20454
+ 1
20455
+ 1
20456
+ 1
20457
+ 1
20458
+ 1
20459
+ 1
20460
+ 1
20461
+ 1
20462
+ 1
20463
+ 1
20464
+ 1
20465
+ 1
20466
+ 1
20467
+ 1
20468
+ 1
20469
+ 1
20470
+ 1
20471
+ 1
20472
+ 1
20473
+ 1
20474
+ 1
20475
+ 1
20476
+ 1
20477
+ 1
20478
+ 1
20479
+ 1
20480
+ 1
20481
+ 1
20482
+ 1
20483
+ 1
20484
+ 1
20485
+ 1
20486
+ 1
20487
+ 1
20488
+ 1
20489
+ 1
20490
+ 1
20491
+ 1
20492
+ 1
20493
+ 1
20494
+ 1
20495
+ 1
20496
+ 1
20497
+ 1
20498
+ 1
20499
+ 1
20500
+ 1
20501
+ 1
20502
+ 1
20503
+ 1
20504
+ 1
20505
+ 1
20506
+ 1
20507
+ 1
20508
+ 1
20509
+ 1
20510
+ 1
20511
+ 1
20512
+ 1
20513
+ 1
20514
+ 1
20515
+ 1
20516
+ 1
20517
+ 1
20518
+ 1
20519
+ 1
20520
+ 1
20521
+ 1
20522
+ 1
20523
+ 1
20524
+ 1
20525
+ 1
20526
+ 1
20527
+ 1
20528
+ 1
20529
+ 1
20530
+ 1
20531
+ 1
20532
+ 1
20533
+ 1
20534
+ 1
20535
+ 1
20536
+ 1
20537
+ 1
20538
+ 1
20539
+ 1
20540
+ 1
20541
+ 1
20542
+ 1
20543
+ 1
20544
+ 1
20545
+ 1
20546
+ 1
20547
+ 1
20548
+ 1
20549
+ 1
20550
+ 1
20551
+ 1
20552
+ 1
20553
+ 1
20554
+ 1
20555
+ 1
20556
+ 1
20557
+ 1
20558
+ 1
20559
+ 1
20560
+ 1
20561
+ 1
20562
+ 1
20563
+ 1
20564
+ 1
20565
+ 1
20566
+ 1
20567
+ 1
20568
+ 1
20569
+ 1
20570
+ 1
20571
+ 1
20572
+ 1
20573
+ 1
20574
+ 1
20575
+ 1
20576
+ 1
20577
+ 1
20578
+ 1
20579
+ 1
20580
+ 1
20581
+ 1
20582
+ 1
20583
+ 1
20584
+ 1
20585
+ 1
20586
+ 1
20587
+ 1
20588
+ 1
20589
+ 1
20590
+ 1
20591
+ 1
20592
+ 1
20593
+ 1
20594
+ 1
20595
+ 1
20596
+ 1
20597
+ 1
20598
+ 1
20599
+ 1
20600
+ 1
20601
+ 1
20602
+ 1
20603
+ 1
20604
+ 1
20605
+ 1
20606
+ 1
20607
+ 1
20608
+ 1
20609
+ 1
20610
+ 1
20611
+ 1
20612
+ 1
20613
+ 1
20614
+ 1
20615
+ 1
20616
+ 1
20617
+ 1
20618
+ 1
20619
+ 1
20620
+ 1
20621
+ 1
20622
+ 1
20623
+ 1
20624
+ 1
20625
+ 1
20626
+ 1
20627
+ 1
20628
+ 1
20629
+ 1
20630
+ 1
20631
+ 1
20632
+ 1
20633
+ 1
20634
+ 1
20635
+ 1
20636
+ 1
20637
+ 1
20638
+ 1
20639
+ 1
20640
+ 1
20641
+ 1
20642
+ 1
20643
+ 1
20644
+ 1
20645
+ 1
20646
+ 1
20647
+ 1
20648
+ 1
20649
+ 1
20650
+ 1
20651
+ 1
20652
+ 1
20653
+ 1
20654
+ 1
20655
+ 1
20656
+ 1
20657
+ 1
20658
+ 1
20659
+ 1
20660
+ 1
20661
+ 1
20662
+ 1
20663
+ 1
20664
+ 1
20665
+ 1
20666
+ 1
20667
+ 1
20668
+ 1
20669
+ 1
20670
+ 1
20671
+ 1
20672
+ 1
20673
+ 1
20674
+ 1
20675
+ 1
20676
+ 1
20677
+ 1
20678
+ 1
20679
+ 1
20680
+ 1
20681
+ 1
20682
+ 1
20683
+ 1
20684
+ 1
20685
+ 1
20686
+ 1
20687
+ 1
20688
+ 1
20689
+ 1
20690
+ 1
20691
+ 1
20692
+ 1
20693
+ 1
20694
+ 1
20695
+ 1
20696
+ 1
20697
+ 1
20698
+ 1
20699
+ 1
20700
+ 1
20701
+ 1
20702
+ 1
20703
+ 1
20704
+ 1
20705
+ 1
20706
+ 1
20707
+ 1
20708
+ 1
20709
+ 1
20710
+ 1
20711
+ 1
20712
+ 1
20713
+ 1
20714
+ 1
20715
+ 1
20716
+ 1
20717
+ 1
20718
+ 1
20719
+ 1
20720
+ 1
20721
+ 1
20722
+ 1
20723
+ 1
20724
+ 1
20725
+ 1
20726
+ 1
20727
+ 1
20728
+ 1
20729
+ 1
20730
+ 1
20731
+ 1
20732
+ 1
20733
+ 1
20734
+ 1
20735
+ 1
20736
+ 1
20737
+ 1
20738
+ 1
20739
+ 1
20740
+ 1
20741
+ 1
20742
+ 1
20743
+ 1
20744
+ 1
20745
+ 1
20746
+ 1
20747
+ 1
20748
+ 1
20749
+ 1
20750
+ 1
20751
+ 1
20752
+ 1
20753
+ 1
20754
+ 1
20755
+ 1
20756
+ 1
20757
+ 1
20758
+ 1
20759
+ 1
20760
+ 1
20761
+ 1
20762
+ 1
20763
+ 1
20764
+ 1
20765
+ 1
20766
+ 1
20767
+ 1
20768
+ 1
20769
+ 1
20770
+ 1
20771
+ 1
20772
+ 1
20773
+ 1
20774
+ 1
20775
+ 1
20776
+ 1
20777
+ 1
20778
+ 1
20779
+ 1
20780
+ 1
20781
+ 1
20782
+ 1
20783
+ 1
20784
+ 1
20785
+ 1
20786
+ 1
20787
+ 1
20788
+ 1
20789
+ 1
20790
+ 1
20791
+ 1
20792
+ 1
20793
+ 1
20794
+ 1
20795
+ 1
20796
+ 1
20797
+ 1
20798
+ 1
20799
+ 1
20800
+ 1
20801
+ 1
20802
+ 1
20803
+ 1
20804
+ 1
20805
+ 1
20806
+ 1
20807
+ 1
20808
+ 1
20809
+ 1
20810
+ 1
20811
+ 1
20812
+ 1
20813
+ 1
20814
+ 1
20815
+ 1
20816
+ 1
20817
+ 1
20818
+ 1
20819
+ 1
20820
+ 1
20821
+ 1
20822
+ 1
20823
+ 1
20824
+ 1
20825
+ 1
20826
+ 1
20827
+ 1
20828
+ 1
20829
+ 1
20830
+ 1
20831
+ 1
20832
+ 1
20833
+ 1
20834
+ 1
20835
+ 1
20836
+ 1
20837
+ 1
20838
+ 1
20839
+ 1
20840
+ 1
20841
+ 1
20842
+ 1
20843
+ 1
20844
+ 1
20845
+ 1
20846
+ 1
20847
+ 1
20848
+ 1
20849
+ 1
20850
+ 1
20851
+ 1
20852
+ 1
20853
+ 1
20854
+ 1
20855
+ 1
20856
+ 1
20857
+ 1
20858
+ 1
20859
+ 1
20860
+ 1
20861
+ 1
20862
+ 1
20863
+ 1
20864
+ 1
20865
+ 1
20866
+ 1
20867
+ 1
20868
+ 1
20869
+ 1
20870
+ 1
20871
+ 1
20872
+ 1
20873
+ 1
20874
+ 1
20875
+ 1
20876
+ 1
20877
+ 1
20878
+ 1
20879
+ 1
20880
+ 1
20881
+ 1
20882
+ 1
20883
+ 1
20884
+ 1
20885
+ 1
20886
+ 1
20887
+ 1
20888
+ 1
20889
+ 1
20890
+ 1
20891
+ 1
20892
+ 1
20893
+ 1
20894
+ 1
20895
+ 1
20896
+ 1
20897
+ 1
20898
+ 1
20899
+ 1
20900
+ 1
20901
+ 1
20902
+ 1
20903
+ 1
20904
+ 1
20905
+ 1
20906
+ 1
20907
+ 1
20908
+ 1
20909
+ 1
20910
+ 1
20911
+ 1
20912
+ 1
20913
+ 1
20914
+ 1
20915
+ 1
20916
+ 1
20917
+ 1
20918
+ 1
20919
+ 1
20920
+ 1
20921
+ 1
20922
+ 1
20923
+ 1
20924
+ 1
20925
+ 1
20926
+ 1
20927
+ 1
20928
+ 1
20929
+ 1
20930
+ 1
20931
+ 1
20932
+ 1
20933
+ 1
20934
+ 1
20935
+ 1
20936
+ 1
20937
+ 1
20938
+ 1
20939
+ 1
20940
+ 1
20941
+ 1
20942
+ 1
20943
+ 1
20944
+ 1
20945
+ 1
20946
+ 1
20947
+ 1
20948
+ 1
20949
+ 1
20950
+ 1
20951
+ 1
20952
+ 1
20953
+ 1
20954
+ 1
20955
+ 1
20956
+ 1
20957
+ 1
20958
+ 1
20959
+ 1
20960
+ 1
20961
+ 1
20962
+ 1
20963
+ 1
20964
+ 1
20965
+ 1
20966
+ 1
20967
+ 1
20968
+ 1
20969
+ 1
20970
+ 1
20971
+ 1
20972
+ 1
20973
+ 1
20974
+ 1
20975
+ 1
20976
+ 1
20977
+ 1
20978
+ 1
20979
+ 1
20980
+ 1
20981
+ 1
20982
+ 1
20983
+ 1
20984
+ 1
20985
+ 1
20986
+ 1
20987
+ 1
20988
+ 1
20989
+ 1
20990
+ 1
20991
+ 1
20992
+ 1
20993
+ 1
20994
+ 1
20995
+ 1
20996
+ 1
20997
+ 1
20998
+ 1
20999
+ 1
21000
+ 1
21001
+ 1
21002
+ 1
21003
+ 1
21004
+ 1
21005
+ 1
21006
+ 1
21007
+ 1
21008
+ 1
21009
+ 1
21010
+ 1
21011
+ 1
21012
+ 1
21013
+ 1
21014
+ 1
21015
+ 1
21016
+ 1
21017
+ 1
21018
+ 1
21019
+ 1
21020
+ 1
21021
+ 1
21022
+ 1
21023
+ 1
21024
+ 1
21025
+ 1
21026
+ 1
21027
+ 1
21028
+ 1
21029
+ 1
21030
+ 1
21031
+ 1
21032
+ 1
21033
+ 1
21034
+ 1
21035
+ 1
21036
+ 1
21037
+ 1
21038
+ 1
21039
+ 1
21040
+ 1
21041
+ 1
21042
+ 1
21043
+ 1
21044
+ 1
21045
+ 1
21046
+ 1
21047
+ 1
21048
+ 1
21049
+ 1
21050
+ 1
21051
+ 1
21052
+ 1
21053
+ 1
21054
+ 1
21055
+ 1
21056
+ 1
21057
+ 1
21058
+ 1
21059
+ 1
21060
+ 1
21061
+ 1
21062
+ 1
21063
+ 1
21064
+ 1
21065
+ 1
21066
+ 1
21067
+ 1
21068
+ 1
21069
+ 1
21070
+ 1
21071
+ 1
21072
+ 1
21073
+ 1
21074
+ 1
21075
+ 1
21076
+ 1
21077
+ 1
21078
+ 1
21079
+ 1
21080
+ 1
21081
+ 1
21082
+ 1
21083
+ 1
21084
+ 1
21085
+ 1
21086
+ 1
21087
+ 1
21088
+ 1
21089
+ 1
21090
+ 1
21091
+ 1
21092
+ 1
21093
+ 1
21094
+ 1
21095
+ 1
21096
+ 1
21097
+ 1
21098
+ 1
21099
+ 1
21100
+ 1
21101
+ 1
21102
+ 1
21103
+ 1
21104
+ 1
21105
+ 1
21106
+ 1
21107
+ 1
21108
+ 1
21109
+ 1
21110
+ 1
21111
+ 1
21112
+ 1
21113
+ 1
21114
+ 1
21115
+ 1
21116
+ 1
21117
+ 1
21118
+ 1
21119
+ 1
21120
+ 1
21121
+ 1
21122
+ 1
21123
+ 1
21124
+ 1
21125
+ 1
21126
+ 1
21127
+ 1
21128
+ 1
21129
+ 1
21130
+ 1
21131
+ 1
21132
+ 1
21133
+ 1
21134
+ 1
21135
+ 1
21136
+ 1
21137
+ 1
21138
+ 1
21139
+ 1
21140
+ 1
21141
+ 1
21142
+ 1
21143
+ 1
21144
+ 1
21145
+ 1
21146
+ 1
21147
+ 1
21148
+ 1
21149
+ 1
21150
+ 1
21151
+ 1
21152
+ 1
21153
+ 1
21154
+ 1
21155
+ 1
21156
+ 1
21157
+ 1
21158
+ 1
21159
+ 1
21160
+ 1
21161
+ 1
21162
+ 1
21163
+ 1
21164
+ 1
21165
+ 1
21166
+ 1
21167
+ 1
21168
+ 1
21169
+ 1
21170
+ 1
21171
+ 1
21172
+ 1
21173
+ 1
21174
+ 1
21175
+ 1
21176
+ 1
21177
+ 2
21178
+ 1
21179
+ 1
21180
+ 1
21181
+ 2
21182
+ 1
21183
+ 1
21184
+ 1
21185
+ 1
21186
+ 1
21187
+ 1
21188
+ 1
21189
+ 1
21190
+ 1
21191
+ 2
21192
+ 1
21193
+ 1
21194
+ 1
21195
+ 1
21196
+ 1
21197
+ 1
21198
+ 1
21199
+ 1
21200
+ 1
21201
+ 1
21202
+ 1
21203
+ 1
21204
+ 1
21205
+ 1
21206
+ 1
21207
+ 1
21208
+ 1
21209
+ 1
21210
+ 1
21211
+ 1
21212
+ 1
21213
+ 1
21214
+ 1
21215
+ 1
21216
+ 1
21217
+ 1
21218
+ 1
21219
+ 1
21220
+ 1
21221
+ 1
21222
+ 1
21223
+ 1
21224
+ 1
21225
+ 1
21226
+ 1
21227
+ 1
21228
+ 1
21229
+ 1
21230
+ 1
21231
+ 1
21232
+ 1
21233
+ 1
21234
+ 1
21235
+ 1
21236
+ 1
21237
+ 1
21238
+ 1
21239
+ 1
21240
+ 1
21241
+ 1
21242
+ 1
21243
+ 1
21244
+ 1
21245
+ 1
21246
+ 1
21247
+ 1
21248
+ 1
21249
+ 1
21250
+ 1
21251
+ 1
21252
+ 1
21253
+ 1
21254
+ 1
21255
+ 1
21256
+ 1
21257
+ 1
21258
+ 1
21259
+ 1
21260
+ 1
21261
+ 1
21262
+ 1
21263
+ 1
21264
+ 1
21265
+ 1
21266
+ 1
21267
+ 1
21268
+ 1
21269
+ 1
21270
+ 1
21271
+ 1
21272
+ 1
21273
+ 1
21274
+ 1
21275
+ 1
21276
+ 1
21277
+ 1
21278
+ 1
21279
+ 1
21280
+ 1
21281
+ 1
21282
+ 1
21283
+ 1
21284
+ 1
21285
+ 1
21286
+ 1
21287
+ 1
21288
+ 1
21289
+ 1
21290
+ 1
21291
+ 1
21292
+ 1
21293
+ 1
21294
+ 1
21295
+ 1
21296
+ 1
21297
+ 1
21298
+ 1
21299
+ 1
21300
+ 1
21301
+ 1
21302
+ 1
21303
+ 1
21304
+ 1
21305
+ 1
21306
+ 1
21307
+ 1
21308
+ 1
21309
+ 1
21310
+ 1
21311
+ 1
21312
+ 1
21313
+ 1
21314
+ 1
21315
+ 1
21316
+ 1
21317
+ 1
21318
+ 1
21319
+ 1
21320
+ 1
21321
+ 1
21322
+ 1
21323
+ 1
21324
+ 1
21325
+ 1
21326
+ 1
21327
+ 1
21328
+ 1
21329
+ 1
21330
+ 1
21331
+ 1
21332
+ 1
21333
+ 1
21334
+ 1
21335
+ 1
21336
+ 1
21337
+ 1
21338
+ 1
21339
+ 1
21340
+ 1
21341
+ 1
21342
+ 1
21343
+ 1
21344
+ 1
21345
+ 1
21346
+ 1
21347
+ 1
21348
+ 1
21349
+ 1
21350
+ 1
21351
+ 1
21352
+ 1
21353
+ 1
21354
+ 1
21355
+ 1
21356
+ 1
21357
+ 1
21358
+ 1
21359
+ 1
21360
+ 1
21361
+ 1
21362
+ 1
21363
+ 1
21364
+ 1
21365
+ 1
21366
+ 1
21367
+ 1
21368
+ 1
21369
+ 1
21370
+ 1
21371
+ 1
21372
+ 1
21373
+ 1
21374
+ 1
21375
+ 1
21376
+ 1
21377
+ 1
21378
+ 1
21379
+ 1
21380
+ 1
21381
+ 1
21382
+ 1
21383
+ 1
21384
+ 1
21385
+ 1
21386
+ 1
21387
+ 1
21388
+ 1
21389
+ 1
21390
+ 1
21391
+ 1
21392
+ 1
21393
+ 1
21394
+ 1
21395
+ 1
21396
+ 1
21397
+ 1
21398
+ 1
21399
+ 1
21400
+ 1
21401
+ 1
21402
+ 1
21403
+ 1
21404
+ 1
21405
+ 1
21406
+ 1
21407
+ 1
21408
+ 1
21409
+ 1
21410
+ 1
21411
+ 1
21412
+ 1
21413
+ 1
21414
+ 1
21415
+ 1
21416
+ 1
21417
+ 1
21418
+ 1
21419
+ 1
21420
+ 1
21421
+ 1
21422
+ 1
21423
+ 1
21424
+ 1
21425
+ 1
21426
+ 1
21427
+ 1
21428
+ 1
21429
+ 1
21430
+ 1
21431
+ 1
21432
+ 1
21433
+ 1
21434
+ 1
21435
+ 1
21436
+ 1
21437
+ 1
21438
+ 1
21439
+ 1
21440
+ 1
21441
+ 1
21442
+ 1
21443
+ 1
21444
+ 1
21445
+ 1
21446
+ 1
21447
+ 1
21448
+ 1
21449
+ 1
21450
+ 1
21451
+ 1
21452
+ 1
21453
+ 1
21454
+ 1
21455
+ 1
21456
+ 1
21457
+ 1
21458
+ 1
21459
+ 1
21460
+ 1
21461
+ 1
21462
+ 1
21463
+ 1
21464
+ 1
21465
+ 1
21466
+ 1
21467
+ 1
21468
+ 1
21469
+ 1
21470
+ 1
21471
+ 1
21472
+ 1
21473
+ 1
21474
+ 1
21475
+ 1
21476
+ 1
21477
+ 1
21478
+ 1
21479
+ 1
21480
+ 1
21481
+ 1
21482
+ 1
21483
+ 1
21484
+ 1
21485
+ 1
21486
+ 1
21487
+ 1
21488
+ 1
21489
+ 1
21490
+ 1
21491
+ 1
21492
+ 1
21493
+ 1
21494
+ 1
21495
+ 1
21496
+ 1
21497
+ 1
21498
+ 1
21499
+ 1
21500
+ 1
21501
+ 1
21502
+ 1
21503
+ 1
21504
+ 1
21505
+ 1
21506
+ 1
21507
+ 1
21508
+ 1
21509
+ 1
21510
+ 1
21511
+ 1
21512
+ 1
21513
+ 1
21514
+ 1
21515
+ 1
21516
+ 1
21517
+ 1
21518
+ 1
21519
+ 1
21520
+ 1
21521
+ 1
21522
+ 1
21523
+ 1
21524
+ 1
21525
+ 1
21526
+ 1
21527
+ 1
21528
+ 1
21529
+ 1
21530
+ 1
21531
+ 1
21532
+ 1
21533
+ 1
21534
+ 1
21535
+ 1
21536
+ 1
21537
+ 1
21538
+ 1
21539
+ 1
21540
+ 1
21541
+ 1
21542
+ 1
21543
+ 1
21544
+ 1
21545
+ 1
21546
+ 1
21547
+ 1
21548
+ 1
21549
+ 1
21550
+ 1
21551
+ 1
21552
+ 1
21553
+ 1
21554
+ 1
21555
+ 1
21556
+ 1
21557
+ 1
21558
+ 1
21559
+ 1
21560
+ 1
21561
+ 1
21562
+ 1
21563
+ 1
21564
+ 1
21565
+ 1
21566
+ 1
21567
+ 1
21568
+ 1
21569
+ 1
21570
+ 1
21571
+ 1
21572
+ 1
21573
+ 1
21574
+ 1
21575
+ 1
21576
+ 1
21577
+ 1
21578
+ 1
21579
+ 1
21580
+ 1
21581
+ 1
21582
+ 1
21583
+ 1
21584
+ 1
21585
+ 1
21586
+ 1
21587
+ 1
21588
+ 1
21589
+ 1
21590
+ 1
21591
+ 1
21592
+ 1
21593
+ 1
21594
+ 1
21595
+ 1
21596
+ 1
21597
+ 1
21598
+ 1
21599
+ 1
21600
+ 1
21601
+ 1
21602
+ 1
21603
+ 1
21604
+ 1
21605
+ 1
21606
+ 1
21607
+ 1
21608
+ 1
21609
+ 1
21610
+ 1
21611
+ 1
21612
+ 1
21613
+ 1
21614
+ 1
21615
+ 1
21616
+ 1
21617
+ 1
21618
+ 1
21619
+ 1
21620
+ 1
21621
+ 1
21622
+ 1
21623
+ 1
21624
+ 1
21625
+ 1
21626
+ 1
21627
+ 1
21628
+ 1
21629
+ 1
21630
+ 1
21631
+ 1
21632
+ 1
21633
+ 1
21634
+ 1
21635
+ 1
21636
+ 1
21637
+ 1
21638
+ 1
21639
+ 1
21640
+ 1
21641
+ 1
21642
+ 1
21643
+ 1
21644
+ 1
21645
+ 1
21646
+ 1
21647
+ 1
21648
+ 1
21649
+ 1
21650
+ 1
21651
+ 1
21652
+ 1
21653
+ 1
21654
+ 1
21655
+ 1
21656
+ 1
21657
+ 1
21658
+ 1
21659
+ 1
21660
+ 1
21661
+ 1
21662
+ 1
21663
+ 1
21664
+ 1
21665
+ 1
21666
+ 1
21667
+ 1
21668
+ 1
21669
+ 1
21670
+ 1
21671
+ 1
21672
+ 1
21673
+ 1
21674
+ 1
21675
+ 1
21676
+ 1
21677
+ 1
21678
+ 1
21679
+ 1
21680
+ 1
21681
+ 1
21682
+ 1
21683
+ 1
21684
+ 1
21685
+ 1
21686
+ 1
21687
+ 1
21688
+ 1
21689
+ 1
21690
+ 1
21691
+ 1
21692
+ 1
21693
+ 1
21694
+ 1
21695
+ 1
21696
+ 1
21697
+ 1
21698
+ 1
21699
+ 1
21700
+ 1
21701
+ 1
21702
+ 1
21703
+ 1
21704
+ 1
21705
+ 1
21706
+ 1
21707
+ 1
21708
+ 1
21709
+ 1
21710
+ 1
21711
+ 1
21712
+ 1
21713
+ 1
21714
+ 1
21715
+ 1
21716
+ 1
21717
+ 1
21718
+ 1
21719
+ 1
21720
+ 1
21721
+ 1
21722
+ 1
21723
+ 1
21724
+ 1
21725
+ 1
21726
+ 1
21727
+ 1
21728
+ 1
21729
+ 1
21730
+ 1
21731
+ 1
21732
+ 1
21733
+ 1
21734
+ 1
21735
+ 1
21736
+ 1
21737
+ 1
21738
+ 1
21739
+ 1
21740
+ 1
21741
+ 1
21742
+ 1
21743
+ 1
21744
+ 1
21745
+ 1
21746
+ 1
21747
+ 1
21748
+ 1
21749
+ 1
21750
+ 1
21751
+ 1
21752
+ 1
21753
+ 1
21754
+ 1
21755
+ 1
21756
+ 1
21757
+ 1
21758
+ 1
21759
+ 1
21760
+ 1
21761
+ 1
21762
+ 1
21763
+ 1
21764
+ 1
21765
+ 1
21766
+ 1
21767
+ 1
21768
+ 1
21769
+ 1
21770
+ 1
21771
+ 1
21772
+ 1
21773
+ 1
21774
+ 1
21775
+ 1
21776
+ 1
21777
+ 1
21778
+ 1
21779
+ 1
21780
+ 1
21781
+ 1
21782
+ 1
21783
+ 1
21784
+ 1
21785
+ 1
21786
+ 1
21787
+ 1
21788
+ 1
21789
+ 1
21790
+ 1
21791
+ 1
21792
+ 1
21793
+ 1
21794
+ 1
21795
+ 1
21796
+ 1
21797
+ 1
21798
+ 1
21799
+ 1
21800
+ 1
21801
+ 1
21802
+ 1
21803
+ 2
21804
+ 2
21805
+ 1
21806
+ 1
21807
+ 2
21808
+ 1
21809
+ 1
21810
+ 1
21811
+ 1
21812
+ 1
21813
+ 1
21814
+ 1
21815
+ 1
21816
+ 1
21817
+ 1
21818
+ 1
21819
+ 1
21820
+ 1
21821
+ 1
21822
+ 1
21823
+ 1
21824
+ 1
21825
+ 1
21826
+ 1
21827
+ 1
21828
+ 1
21829
+ 1
21830
+ 1
21831
+ 1
21832
+ 1
21833
+ 1
21834
+ 1
21835
+ 1
21836
+ 1
21837
+ 1
21838
+ 1
21839
+ 1
21840
+ 1
21841
+ 1
21842
+ 1
21843
+ 1
21844
+ 1
21845
+ 1
21846
+ 1
21847
+ 1
21848
+ 1
21849
+ 1
21850
+ 1
21851
+ 1
21852
+ 1
21853
+ 1
21854
+ 1
21855
+ 1
21856
+ 1
21857
+ 1
21858
+ 1
21859
+ 1
21860
+ 1
21861
+ 1
21862
+ 1
21863
+ 1
21864
+ 1
21865
+ 1
21866
+ 1
21867
+ 1
21868
+ 1
21869
+ 1
21870
+ 1
21871
+ 1
21872
+ 1
21873
+ 1
21874
+ 1
21875
+ 1
21876
+ 1
21877
+ 1
21878
+ 1
21879
+ 1
21880
+ 1
21881
+ 1
21882
+ 1
21883
+ 1
21884
+ 1
21885
+ 1
21886
+ 1
21887
+ 1
21888
+ 1
21889
+ 1
21890
+ 1
21891
+ 1
21892
+ 1
21893
+ 1
21894
+ 1
21895
+ 1
21896
+ 1
21897
+ 1
21898
+ 1
21899
+ 1
21900
+ 1
21901
+ 1
21902
+ 1
21903
+ 1
21904
+ 1
21905
+ 1
21906
+ 1
21907
+ 1
21908
+ 1
21909
+ 1
21910
+ 1
21911
+ 1
21912
+ 1
21913
+ 1
21914
+ 1
21915
+ 1
21916
+ 1
21917
+ 1
21918
+ 1
21919
+ 1
21920
+ 1
21921
+ 1
21922
+ 1
21923
+ 1
21924
+ 1
21925
+ 1
21926
+ 1
21927
+ 1
21928
+ 1
21929
+ 1
21930
+ 1
21931
+ 1
21932
+ 1
21933
+ 1
21934
+ 1
21935
+ 1
21936
+ 1
21937
+ 1
21938
+ 1
21939
+ 1
21940
+ 1
21941
+ 1
21942
+ 1
21943
+ 1
21944
+ 1
21945
+ 1
21946
+ 1
21947
+ 1
21948
+ 1
21949
+ 1
21950
+ 1
21951
+ 1
21952
+ 1
21953
+ 1
21954
+ 1
21955
+ 1
21956
+ 1
21957
+ 1
21958
+ 1
21959
+ 1
21960
+ 1
21961
+ 1
21962
+ 1
21963
+ 1
21964
+ 1
21965
+ 1
21966
+ 1
21967
+ 1
21968
+ 1
21969
+ 1
21970
+ 1
21971
+ 1
21972
+ 1
21973
+ 1
21974
+ 1
21975
+ 1
21976
+ 1
21977
+ 1
21978
+ 1
21979
+ 1
21980
+ 1
21981
+ 1
21982
+ 1
21983
+ 1
21984
+ 1
21985
+ 1
21986
+ 1
21987
+ 1
21988
+ 1
21989
+ 1
21990
+ 1
21991
+ 1
21992
+ 1
21993
+ 1
21994
+ 1
21995
+ 1
21996
+ 1
21997
+ 1
21998
+ 1
21999
+ 1
22000
+ 1
22001
+ 1
22002
+ 1
22003
+ 1
22004
+ 1
22005
+ 1
22006
+ 1
22007
+ 1
22008
+ 1
22009
+ 1
22010
+ 1
22011
+ 1
22012
+ 1
22013
+ 1
22014
+ 1
22015
+ 1
22016
+ 1
22017
+ 1
22018
+ 1
22019
+ 1
22020
+ 1
22021
+ 1
22022
+ 1
22023
+ 1
22024
+ 1
22025
+ 1
22026
+ 1
22027
+ 1
22028
+ 1
22029
+ 1
22030
+ 1
22031
+ 1
22032
+ 1
22033
+ 1
22034
+ 1
22035
+ 1
22036
+ 1
22037
+ 1
22038
+ 1
22039
+ 1
22040
+ 1
22041
+ 1
22042
+ 1
22043
+ 1
22044
+ 1
22045
+ 1
22046
+ 1
22047
+ 1
22048
+ 1
22049
+ 1
22050
+ 1
22051
+ 1
22052
+ 1
22053
+ 1
22054
+ 1
22055
+ 1
22056
+ 1
22057
+ 1
22058
+ 1
22059
+ 1
22060
+ 1
22061
+ 1
22062
+ 1
22063
+ 1
22064
+ 1
22065
+ 1
22066
+ 1
22067
+ 1
22068
+ 1
22069
+ 1
22070
+ 1
22071
+ 1
22072
+ 1
22073
+ 1
22074
+ 1
22075
+ 1
22076
+ 1
22077
+ 1
22078
+ 1
22079
+ 1
22080
+ 1
22081
+ 1
22082
+ 1
22083
+ 1
22084
+ 1
22085
+ 1
22086
+ 1
22087
+ 1
22088
+ 1
22089
+ 1
22090
+ 1
22091
+ 1
22092
+ 1
22093
+ 1
22094
+ 1
22095
+ 1
22096
+ 1
22097
+ 1
22098
+ 1
22099
+ 1
22100
+ 1
22101
+ 1
22102
+ 1
22103
+ 1
22104
+ 1
22105
+ 1
22106
+ 1
22107
+ 1
22108
+ 1
22109
+ 1
22110
+ 1
22111
+ 1
22112
+ 1
22113
+ 1
22114
+ 1
22115
+ 1
22116
+ 1
22117
+ 1
22118
+ 1
22119
+ 1
22120
+ 1
22121
+ 1
22122
+ 1
22123
+ 1
22124
+ 1
22125
+ 1
22126
+ 1
22127
+ 1
22128
+ 1
22129
+ 1
22130
+ 1
22131
+ 1
22132
+ 1
22133
+ 1
22134
+ 1
22135
+ 1
22136
+ 1
22137
+ 1
22138
+ 1
22139
+ 1
22140
+ 1
22141
+ 1
22142
+ 1
22143
+ 1
22144
+ 1
22145
+ 1
22146
+ 1
22147
+ 1
22148
+ 1
22149
+ 1
22150
+ 1
22151
+ 1
22152
+ 1
22153
+ 1
22154
+ 1
22155
+ 1
22156
+ 1
22157
+ 1
22158
+ 1
22159
+ 1
22160
+ 1
22161
+ 1
22162
+ 1
22163
+ 1
22164
+ 1
22165
+ 1
22166
+ 1
22167
+ 1
22168
+ 1
22169
+ 1
22170
+ 1
22171
+ 1
22172
+ 1
22173
+ 1
22174
+ 1
22175
+ 1
22176
+ 1
22177
+ 1
22178
+ 1
22179
+ 1
22180
+ 1
22181
+ 1
22182
+ 1
22183
+ 1
22184
+ 1
22185
+ 1
22186
+ 1
22187
+ 1
22188
+ 1
22189
+ 1
22190
+ 1
22191
+ 1
22192
+ 1
22193
+ 1
22194
+ 1
22195
+ 1
22196
+ 1
22197
+ 1
22198
+ 1
22199
+ 1
22200
+ 1
22201
+ 1
22202
+ 1
22203
+ 1
22204
+ 1
22205
+ 1
22206
+ 1
22207
+ 1
22208
+ 1
22209
+ 1
22210
+ 1
22211
+ 1
22212
+ 1
22213
+ 1
22214
+ 1
22215
+ 1
22216
+ 1
22217
+ 1
22218
+ 1
22219
+ 1
22220
+ 1
22221
+ 1
22222
+ 1
22223
+ 1
22224
+ 1
22225
+ 1
22226
+ 1
22227
+ 1
22228
+ 1
22229
+ 1
22230
+ 1
22231
+ 1
22232
+ 1
22233
+ 1
22234
+ 1
22235
+ 1
22236
+ 1
22237
+ 1
22238
+ 1
22239
+ 1
22240
+ 1
22241
+ 1
22242
+ 1
22243
+ 1
22244
+ 1
22245
+ 1
22246
+ 1
22247
+ 1
22248
+ 1
22249
+ 1
22250
+ 1
22251
+ 1
22252
+ 1
22253
+ 1
22254
+ 1
22255
+ 1
22256
+ 1
22257
+ 1
22258
+ 1
22259
+ 1
22260
+ 1
22261
+ 1
22262
+ 1
22263
+ 1
22264
+ 1
22265
+ 1
22266
+ 1
22267
+ 1
22268
+ 1
22269
+ 1
22270
+ 1
22271
+ 1
22272
+ 1
22273
+ 1
22274
+ 1
22275
+ 1
22276
+ 1
22277
+ 1
22278
+ 1
22279
+ 1
22280
+ 1
22281
+ 1
22282
+ 1
22283
+ 1
22284
+ 1
22285
+ 1
22286
+ 1
22287
+ 1
22288
+ 1
22289
+ 1
22290
+ 1
22291
+ 1
22292
+ 1
22293
+ 1
22294
+ 1
22295
+ 1
22296
+ 1
22297
+ 1
22298
+ 1
22299
+ 1
22300
+ 1
22301
+ 1
22302
+ 1
22303
+ 1
22304
+ 1
22305
+ 1
22306
+ 1
22307
+ 1
22308
+ 1
22309
+ 1
22310
+ 1
22311
+ 1
22312
+ 1
22313
+ 1
22314
+ 1
22315
+ 1
22316
+ 1
22317
+ 1
22318
+ 1
22319
+ 1
22320
+ 1
22321
+ 1
22322
+ 1
22323
+ 1
22324
+ 1
22325
+ 1
22326
+ 1
22327
+ 1
22328
+ 1
22329
+ 1
22330
+ 1
22331
+ 1
22332
+ 1
22333
+ 1
22334
+ 1
22335
+ 1
22336
+ 1
22337
+ 1
22338
+ 1
22339
+ 1
22340
+ 1
22341
+ 1
22342
+ 1
22343
+ 1
22344
+ 1
22345
+ 1
22346
+ 1
22347
+ 1
22348
+ 1
22349
+ 1
22350
+ 1
22351
+ 1
22352
+ 1
22353
+ 1
22354
+ 1
22355
+ 1
22356
+ 1
22357
+ 1
22358
+ 1
22359
+ 1
22360
+ 1
22361
+ 1
22362
+ 1
22363
+ 1
22364
+ 1
22365
+ 1
22366
+ 1
22367
+ 1
22368
+ 1
22369
+ 1
22370
+ 1
22371
+ 1
22372
+ 1
22373
+ 1
22374
+ 1
22375
+ 1
22376
+ 1
22377
+ 1
22378
+ 1
22379
+ 1
22380
+ 1
22381
+ 1
22382
+ 1
22383
+ 1
22384
+ 1
22385
+ 1
22386
+ 1
22387
+ 1
22388
+ 1
22389
+ 1
22390
+ 1
22391
+ 1
22392
+ 1
22393
+ 1
22394
+ 1
22395
+ 1
22396
+ 1
22397
+ 1
22398
+ 1
22399
+ 1
22400
+ 1
22401
+ 1
22402
+ 1
22403
+ 1
22404
+ 1
22405
+ 1
22406
+ 1
22407
+ 1
22408
+ 1
22409
+ 1
22410
+ 1
22411
+ 1
22412
+ 1
22413
+ 1
22414
+ 1
22415
+ 1
22416
+ 1
22417
+ 1
22418
+ 1
22419
+ 1
22420
+ 1
22421
+ 1
22422
+ 1
22423
+ 1
22424
+ 1
22425
+ 1
22426
+ 1
22427
+ 1
22428
+ 1
22429
+ 1
22430
+ 1
22431
+ 1
22432
+ 1
22433
+ 1
22434
+ 1
22435
+ 1
22436
+ 1
22437
+ 1
22438
+ 1
22439
+ 1
22440
+ 1
22441
+ 1
22442
+ 1
22443
+ 1
22444
+ 1
22445
+ 1
22446
+ 1
22447
+ 1
22448
+ 1
22449
+ 1
22450
+ 1
22451
+ 1
22452
+ 1
22453
+ 1
22454
+ 1
22455
+ 1
22456
+ 1
22457
+ 1
22458
+ 1
22459
+ 1
22460
+ 1
22461
+ 1
22462
+ 1
22463
+ 1
22464
+ 1
22465
+ 1
22466
+ 1
22467
+ 1
22468
+ 1
22469
+ 1
22470
+ 1
22471
+ 1
22472
+ 1
22473
+ 1
22474
+ 1
22475
+ 1
22476
+ 1
22477
+ 1
22478
+ 1
22479
+ 1
22480
+ 1
22481
+ 1
22482
+ 1
22483
+ 1
22484
+ 1
22485
+ 1
22486
+ 1
22487
+ 1
22488
+ 1
22489
+ 1
22490
+ 1
22491
+ 1
22492
+ 1
22493
+ 1
22494
+ 1
22495
+ 1
22496
+ 1
22497
+ 1
22498
+ 1
22499
+ 1
22500
+ 1
22501
+ 1
22502
+ 1
22503
+ 1
22504
+ 1
22505
+ 1
22506
+ 1
22507
+ 1
22508
+ 1
22509
+ 1
22510
+ 1
22511
+ 1
22512
+ 1
22513
+ 1
22514
+ 1
22515
+ 1
22516
+ 1
22517
+ 1
22518
+ 1
22519
+ 1
22520
+ 1
22521
+ 1
22522
+ 1
22523
+ 1
22524
+ 1
22525
+ 1
22526
+ 1
22527
+ 1
22528
+ 1
22529
+ 1
22530
+ 1
22531
+ 1
22532
+ 1
22533
+ 1
22534
+ 1
22535
+ 1
22536
+ 1
22537
+ 1
22538
+ 1
22539
+ 1
22540
+ 1
22541
+ 1
22542
+ 1
22543
+ 1
22544
+ 1
22545
+ 1
22546
+ 1
22547
+ 1
22548
+ 1
22549
+ 1
22550
+ 1
22551
+ 1
22552
+ 1
22553
+ 1
22554
+ 1
22555
+ 1
22556
+ 1
22557
+ 1
22558
+ 1
22559
+ 1
22560
+ 1
22561
+ 1
22562
+ 1
22563
+ 1
22564
+ 1
22565
+ 1
22566
+ 1
22567
+ 1
22568
+ 1
22569
+ 1
22570
+ 1
22571
+ 1
22572
+ 1
22573
+ 1
22574
+ 1
22575
+ 1
22576
+ 1
22577
+ 1
22578
+ 1
22579
+ 1
22580
+ 1
22581
+ 1
22582
+ 1
22583
+ 1
22584
+ 1
22585
+ 2
22586
+ 1
22587
+ 1
22588
+ 1
22589
+ 1
22590
+ 1
22591
+ 1
22592
+ 1
22593
+ 1
22594
+ 1
22595
+ 1
22596
+ 1
22597
+ 1
22598
+ 1
22599
+ 1
22600
+ 1
22601
+ 1
22602
+ 1
22603
+ 1
22604
+ 1
22605
+ 1
22606
+ 1
22607
+ 1
22608
+ 1
22609
+ 1
22610
+ 1
22611
+ 1
22612
+ 1
22613
+ 1
22614
+ 1
22615
+ 1
22616
+ 1
22617
+ 1
22618
+ 1
22619
+ 1
22620
+ 1
22621
+ 1
22622
+ 1
22623
+ 1
22624
+ 1
22625
+ 1
22626
+ 1
22627
+ 1
22628
+ 1
22629
+ 1
22630
+ 1
22631
+ 1
22632
+ 1
22633
+ 1
22634
+ 1
22635
+ 1
22636
+ 1
22637
+ 1
22638
+ 1
22639
+ 1
22640
+ 1
22641
+ 1
22642
+ 1
22643
+ 1
22644
+ 1
22645
+ 1
22646
+ 1
22647
+ 1
22648
+ 1
22649
+ 1
22650
+ 1
22651
+ 1
22652
+ 1
22653
+ 1
22654
+ 1
22655
+ 1
22656
+ 1
22657
+ 1
22658
+ 1
22659
+ 1
22660
+ 1
22661
+ 1
22662
+ 1
22663
+ 1
22664
+ 1
22665
+ 1
22666
+ 1
22667
+ 1
22668
+ 1
22669
+ 1
22670
+ 1
22671
+ 1
22672
+ 1
22673
+ 1
22674
+ 1
22675
+ 1
22676
+ 1
22677
+ 1
22678
+ 1
22679
+ 1
22680
+ 1
22681
+ 1
22682
+ 1
22683
+ 1
22684
+ 1
22685
+ 1
22686
+ 1
22687
+ 1
22688
+ 1
22689
+ 1
22690
+ 1
22691
+ 1
22692
+ 1
22693
+ 1
22694
+ 1
22695
+ 1
22696
+ 1
22697
+ 1
22698
+ 1
22699
+ 1
22700
+ 1
22701
+ 1
22702
+ 1
22703
+ 1
22704
+ 1
22705
+ 1
22706
+ 1
22707
+ 1
22708
+ 1
22709
+ 1
22710
+ 1
22711
+ 1
22712
+ 1
22713
+ 1
22714
+ 1
22715
+ 1
22716
+ 1
22717
+ 1
22718
+ 1
22719
+ 1
22720
+ 1
22721
+ 1
22722
+ 1
22723
+ 1
22724
+ 1
22725
+ 1
22726
+ 1
22727
+ 1
22728
+ 1
22729
+ 1
22730
+ 1
22731
+ 1
22732
+ 1
22733
+ 1
22734
+ 1
22735
+ 1
22736
+ 1
22737
+ 1
22738
+ 1
22739
+ 1
22740
+ 1
22741
+ 1
22742
+ 1
22743
+ 1
22744
+ 1
22745
+ 1
22746
+ 1
22747
+ 1
22748
+ 1
22749
+ 1
22750
+ 1
22751
+ 1
22752
+ 1
22753
+ 1
22754
+ 1
22755
+ 1
22756
+ 1
22757
+ 1
22758
+ 1
22759
+ 1
22760
+ 1
22761
+ 1
22762
+ 1
22763
+ 1
22764
+ 1
22765
+ 1
22766
+ 1
22767
+ 1
22768
+ 1
22769
+ 1
22770
+ 1
22771
+ 1
22772
+ 1
22773
+ 1
22774
+ 1
22775
+ 1
22776
+ 1
22777
+ 1
22778
+ 1
22779
+ 1
22780
+ 1
22781
+ 1
22782
+ 1
22783
+ 1
22784
+ 1
22785
+ 1
22786
+ 1
22787
+ 1
22788
+ 1
22789
+ 1
22790
+ 1
22791
+ 1
22792
+ 1
22793
+ 1
22794
+ 1
22795
+ 1
22796
+ 1
22797
+ 1
22798
+ 1
22799
+ 1
22800
+ 1
22801
+ 1
22802
+ 1
22803
+ 1
22804
+ 1
22805
+ 1
22806
+ 1
22807
+ 1
22808
+ 1
22809
+ 1
22810
+ 1
22811
+ 1
22812
+ 1
22813
+ 1
22814
+ 1
22815
+ 1
22816
+ 1
22817
+ 1
22818
+ 1
22819
+ 1
22820
+ 1
22821
+ 1
22822
+ 1
22823
+ 1
22824
+ 1
22825
+ 1
22826
+ 1
22827
+ 1
22828
+ 1
22829
+ 1
22830
+ 1
22831
+ 1
22832
+ 1
22833
+ 1
22834
+ 1
22835
+ 1
22836
+ 1
22837
+ 1
22838
+ 1
22839
+ 1
22840
+ 1
22841
+ 1
22842
+ 1
22843
+ 1
22844
+ 1
22845
+ 1
22846
+ 1
22847
+ 1
22848
+ 1
22849
+ 1
22850
+ 1
22851
+ 1
22852
+ 1
22853
+ 1
22854
+ 1
22855
+ 1
22856
+ 1
22857
+ 1
22858
+ 2
22859
+ 2
22860
+ 1
22861
+ 1
22862
+ 1
22863
+ 1
22864
+ 1
22865
+ 1
22866
+ 1
22867
+ 2
22868
+ 1
22869
+ 1
22870
+ 1
22871
+ 1
22872
+ 1
basicsr/archs/gmflow/data/datasets.py ADDED
@@ -0,0 +1,312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Data loading based on https://github.com/NVIDIA/flownet2-pytorch
2
+
3
+ import numpy as np
4
+ import torch
5
+ import torch.utils.data as data
6
+
7
+ import os
8
+ import random
9
+ from glob import glob
10
+ import os.path as osp
11
+
12
+ from utils import frame_utils
13
+ from data.transforms import FlowAugmentor, SparseFlowAugmentor
14
+
15
+
16
+ class FlowDataset(data.Dataset):
17
+ def __init__(self, aug_params=None, sparse=False,
18
+ load_occlusion=False,
19
+ ):
20
+ self.augmentor = None
21
+ self.sparse = sparse
22
+
23
+ if aug_params is not None:
24
+ if sparse:
25
+ self.augmentor = SparseFlowAugmentor(**aug_params)
26
+ else:
27
+ self.augmentor = FlowAugmentor(**aug_params)
28
+
29
+ self.is_test = False
30
+ self.init_seed = False
31
+ self.flow_list = []
32
+ self.image_list = []
33
+ self.extra_info = []
34
+
35
+ self.load_occlusion = load_occlusion
36
+ self.occ_list = []
37
+
38
+ def __getitem__(self, index):
39
+
40
+ if self.is_test:
41
+ img1 = frame_utils.read_gen(self.image_list[index][0])
42
+ img2 = frame_utils.read_gen(self.image_list[index][1])
43
+
44
+ img1 = np.array(img1).astype(np.uint8)[..., :3]
45
+ img2 = np.array(img2).astype(np.uint8)[..., :3]
46
+
47
+ img1 = torch.from_numpy(img1).permute(2, 0, 1).float()
48
+ img2 = torch.from_numpy(img2).permute(2, 0, 1).float()
49
+
50
+ return img1, img2, self.extra_info[index]
51
+
52
+ if not self.init_seed:
53
+ worker_info = torch.utils.data.get_worker_info()
54
+ if worker_info is not None:
55
+ torch.manual_seed(worker_info.id)
56
+ np.random.seed(worker_info.id)
57
+ random.seed(worker_info.id)
58
+ self.init_seed = True
59
+
60
+ index = index % len(self.image_list)
61
+ valid = None
62
+
63
+ if self.sparse:
64
+ flow, valid = frame_utils.readFlowKITTI(self.flow_list[index]) # [H, W, 2], [H, W]
65
+ else:
66
+ flow = frame_utils.read_gen(self.flow_list[index])
67
+
68
+ if self.load_occlusion:
69
+ occlusion = frame_utils.read_gen(self.occ_list[index]) # [H, W], 0 or 255 (occluded)
70
+
71
+ img1 = frame_utils.read_gen(self.image_list[index][0])
72
+ img2 = frame_utils.read_gen(self.image_list[index][1])
73
+
74
+ flow = np.array(flow).astype(np.float32)
75
+ img1 = np.array(img1).astype(np.uint8)
76
+ img2 = np.array(img2).astype(np.uint8)
77
+
78
+ if self.load_occlusion:
79
+ occlusion = np.array(occlusion).astype(np.float32)
80
+
81
+ # grayscale images
82
+ if len(img1.shape) == 2:
83
+ img1 = np.tile(img1[..., None], (1, 1, 3))
84
+ img2 = np.tile(img2[..., None], (1, 1, 3))
85
+ else:
86
+ img1 = img1[..., :3]
87
+ img2 = img2[..., :3]
88
+
89
+ if self.augmentor is not None:
90
+ if self.sparse:
91
+ img1, img2, flow, valid = self.augmentor(img1, img2, flow, valid)
92
+ else:
93
+ if self.load_occlusion:
94
+ img1, img2, flow, occlusion = self.augmentor(img1, img2, flow, occlusion=occlusion)
95
+ else:
96
+ img1, img2, flow = self.augmentor(img1, img2, flow)
97
+
98
+ img1 = torch.from_numpy(img1).permute(2, 0, 1).float()
99
+ img2 = torch.from_numpy(img2).permute(2, 0, 1).float()
100
+ flow = torch.from_numpy(flow).permute(2, 0, 1).float()
101
+
102
+ if self.load_occlusion:
103
+ occlusion = torch.from_numpy(occlusion) # [H, W]
104
+
105
+ if valid is not None:
106
+ valid = torch.from_numpy(valid)
107
+ else:
108
+ valid = (flow[0].abs() < 1000) & (flow[1].abs() < 1000)
109
+
110
+ # mask out occluded pixels
111
+ if self.load_occlusion:
112
+ # non-occlusion: 0, occlusion: 255
113
+ noc_valid = 1 - occlusion / 255. # 0 or 1
114
+
115
+ return img1, img2, flow, valid.float(), noc_valid.float()
116
+
117
+ return img1, img2, flow, valid.float()
118
+
119
+ def __rmul__(self, v):
120
+ self.flow_list = v * self.flow_list
121
+ self.image_list = v * self.image_list
122
+
123
+ return self
124
+
125
+ def __len__(self):
126
+ return len(self.image_list)
127
+
128
+
129
+ class MpiSintel(FlowDataset):
130
+ def __init__(self, aug_params=None, split='training',
131
+ root='datasets/Sintel',
132
+ dstype='clean',
133
+ load_occlusion=False,
134
+ ):
135
+ super(MpiSintel, self).__init__(aug_params,
136
+ load_occlusion=load_occlusion,
137
+ )
138
+
139
+ flow_root = osp.join(root, split, 'flow')
140
+ image_root = osp.join(root, split, dstype)
141
+
142
+ if load_occlusion:
143
+ occlusion_root = osp.join(root, split, 'occlusions')
144
+
145
+ if split == 'test':
146
+ self.is_test = True
147
+
148
+ for scene in os.listdir(image_root):
149
+ image_list = sorted(glob(osp.join(image_root, scene, '*.png')))
150
+ for i in range(len(image_list) - 1):
151
+ self.image_list += [[image_list[i], image_list[i + 1]]]
152
+ self.extra_info += [(scene, i)] # scene and frame_id
153
+
154
+ if split != 'test':
155
+ self.flow_list += sorted(glob(osp.join(flow_root, scene, '*.flo')))
156
+
157
+ if load_occlusion:
158
+ self.occ_list += sorted(glob(osp.join(occlusion_root, scene, '*.png')))
159
+
160
+
161
+ class FlyingChairs(FlowDataset):
162
+ def __init__(self, aug_params=None, split='train',
163
+ root='datasets/FlyingChairs_release/data',
164
+ ):
165
+ super(FlyingChairs, self).__init__(aug_params)
166
+
167
+ images = sorted(glob(osp.join(root, '*.ppm')))
168
+ flows = sorted(glob(osp.join(root, '*.flo')))
169
+ assert (len(images) // 2 == len(flows))
170
+
171
+ split_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'chairs_split.txt')
172
+ split_list = np.loadtxt(split_file, dtype=np.int32)
173
+ for i in range(len(flows)):
174
+ xid = split_list[i]
175
+ if (split == 'training' and xid == 1) or (split == 'validation' and xid == 2):
176
+ self.flow_list += [flows[i]]
177
+ self.image_list += [[images[2 * i], images[2 * i + 1]]]
178
+
179
+
180
+ class FlyingThings3D(FlowDataset):
181
+ def __init__(self, aug_params=None,
182
+ root='datasets/FlyingThings3D',
183
+ dstype='frames_cleanpass',
184
+ test_set=False,
185
+ validate_subset=True,
186
+ ):
187
+ super(FlyingThings3D, self).__init__(aug_params)
188
+
189
+ img_dir = root
190
+ flow_dir = root
191
+
192
+ for cam in ['left']:
193
+ for direction in ['into_future', 'into_past']:
194
+ if test_set:
195
+ image_dirs = sorted(glob(osp.join(img_dir, dstype, 'TEST/*/*')))
196
+ else:
197
+ image_dirs = sorted(glob(osp.join(img_dir, dstype, 'TRAIN/*/*')))
198
+ image_dirs = sorted([osp.join(f, cam) for f in image_dirs])
199
+
200
+ if test_set:
201
+ flow_dirs = sorted(glob(osp.join(flow_dir, 'optical_flow/TEST/*/*')))
202
+ else:
203
+ flow_dirs = sorted(glob(osp.join(flow_dir, 'optical_flow/TRAIN/*/*')))
204
+ flow_dirs = sorted([osp.join(f, direction, cam) for f in flow_dirs])
205
+
206
+ for idir, fdir in zip(image_dirs, flow_dirs):
207
+ images = sorted(glob(osp.join(idir, '*.png')))
208
+ flows = sorted(glob(osp.join(fdir, '*.pfm')))
209
+ for i in range(len(flows) - 1):
210
+ if direction == 'into_future':
211
+ self.image_list += [[images[i], images[i + 1]]]
212
+ self.flow_list += [flows[i]]
213
+ elif direction == 'into_past':
214
+ self.image_list += [[images[i + 1], images[i]]]
215
+ self.flow_list += [flows[i + 1]]
216
+
217
+ # validate on 1024 subset of test set for fast speed
218
+ if test_set and validate_subset:
219
+ num_val_samples = 1024
220
+ all_test_samples = len(self.image_list) # 7866
221
+
222
+ stride = all_test_samples // num_val_samples
223
+ remove = all_test_samples % num_val_samples
224
+
225
+ # uniformly sample a subset
226
+ self.image_list = self.image_list[:-remove][::stride]
227
+ self.flow_list = self.flow_list[:-remove][::stride]
228
+
229
+
230
+ class KITTI(FlowDataset):
231
+ def __init__(self, aug_params=None, split='training',
232
+ root='datasets/KITTI',
233
+ ):
234
+ super(KITTI, self).__init__(aug_params, sparse=True,
235
+ )
236
+ if split == 'testing':
237
+ self.is_test = True
238
+
239
+ root = osp.join(root, split)
240
+ images1 = sorted(glob(osp.join(root, 'image_2/*_10.png')))
241
+ images2 = sorted(glob(osp.join(root, 'image_2/*_11.png')))
242
+
243
+ for img1, img2 in zip(images1, images2):
244
+ frame_id = img1.split('/')[-1]
245
+ self.extra_info += [[frame_id]]
246
+ self.image_list += [[img1, img2]]
247
+
248
+ if split == 'training':
249
+ self.flow_list = sorted(glob(osp.join(root, 'flow_occ/*_10.png')))
250
+
251
+
252
+ class HD1K(FlowDataset):
253
+ def __init__(self, aug_params=None, root='datasets/HD1K'):
254
+ super(HD1K, self).__init__(aug_params, sparse=True)
255
+
256
+ seq_ix = 0
257
+ while 1:
258
+ flows = sorted(glob(os.path.join(root, 'hd1k_flow_gt', 'flow_occ/%06d_*.png' % seq_ix)))
259
+ images = sorted(glob(os.path.join(root, 'hd1k_input', 'image_2/%06d_*.png' % seq_ix)))
260
+
261
+ if len(flows) == 0:
262
+ break
263
+
264
+ for i in range(len(flows) - 1):
265
+ self.flow_list += [flows[i]]
266
+ self.image_list += [[images[i], images[i + 1]]]
267
+
268
+ seq_ix += 1
269
+
270
+
271
+ def build_train_dataset(args):
272
+ """ Create the data loader for the corresponding training set """
273
+ if args.stage == 'chairs':
274
+ aug_params = {'crop_size': args.image_size, 'min_scale': -0.1, 'max_scale': 1.0, 'do_flip': True}
275
+
276
+ train_dataset = FlyingChairs(aug_params, split='training')
277
+
278
+ elif args.stage == 'things':
279
+ aug_params = {'crop_size': args.image_size, 'min_scale': -0.4, 'max_scale': 0.8, 'do_flip': True}
280
+
281
+ clean_dataset = FlyingThings3D(aug_params, dstype='frames_cleanpass')
282
+ final_dataset = FlyingThings3D(aug_params, dstype='frames_finalpass')
283
+ train_dataset = clean_dataset + final_dataset
284
+
285
+ elif args.stage == 'sintel':
286
+ # 1041 pairs for clean and final each
287
+ aug_params = {'crop_size': args.image_size, 'min_scale': -0.2, 'max_scale': 0.6, 'do_flip': True}
288
+
289
+ things = FlyingThings3D(aug_params, dstype='frames_cleanpass') # 40302
290
+
291
+ sintel_clean = MpiSintel(aug_params, split='training', dstype='clean')
292
+ sintel_final = MpiSintel(aug_params, split='training', dstype='final')
293
+
294
+ aug_params = {'crop_size': args.image_size, 'min_scale': -0.3, 'max_scale': 0.5, 'do_flip': True}
295
+
296
+ kitti = KITTI(aug_params=aug_params) # 200
297
+
298
+ aug_params = {'crop_size': args.image_size, 'min_scale': -0.5, 'max_scale': 0.2, 'do_flip': True}
299
+
300
+ hd1k = HD1K(aug_params=aug_params) # 1047
301
+
302
+ train_dataset = 100 * sintel_clean + 100 * sintel_final + 200 * kitti + 5 * hd1k + things
303
+
304
+ elif args.stage == 'kitti':
305
+ aug_params = {'crop_size': args.image_size, 'min_scale': -0.2, 'max_scale': 0.4, 'do_flip': False}
306
+
307
+ train_dataset = KITTI(aug_params, split='training',
308
+ )
309
+ else:
310
+ raise ValueError(f'stage {args.stage} is not supported')
311
+
312
+ return train_dataset
basicsr/archs/gmflow/data/transforms.py ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import cv2
3
+ from PIL import Image
4
+ from torchvision.transforms import ColorJitter
5
+
6
+
7
+ class FlowAugmentor:
8
+ def __init__(self, crop_size, min_scale=-0.2, max_scale=0.5, do_flip=True,
9
+ no_eraser_aug=True,
10
+ ):
11
+ # spatial augmentation params
12
+ self.crop_size = crop_size
13
+ self.min_scale = min_scale
14
+ self.max_scale = max_scale
15
+ self.spatial_aug_prob = 0.8
16
+ self.stretch_prob = 0.8
17
+ self.max_stretch = 0.2
18
+
19
+ # flip augmentation params
20
+ self.do_flip = do_flip
21
+ self.h_flip_prob = 0.5
22
+ self.v_flip_prob = 0.1
23
+
24
+ # photometric augmentation params
25
+ self.photo_aug = ColorJitter(brightness=0.4, contrast=0.4, saturation=0.4, hue=0.5 / 3.14)
26
+
27
+ self.asymmetric_color_aug_prob = 0.2
28
+
29
+ if no_eraser_aug:
30
+ # we disable eraser aug since no obvious improvement is observed in our experiments
31
+ self.eraser_aug_prob = -1
32
+ else:
33
+ self.eraser_aug_prob = 0.5
34
+
35
+ def color_transform(self, img1, img2):
36
+ """ Photometric augmentation """
37
+
38
+ # asymmetric
39
+ if np.random.rand() < self.asymmetric_color_aug_prob:
40
+ img1 = np.array(self.photo_aug(Image.fromarray(img1)), dtype=np.uint8)
41
+ img2 = np.array(self.photo_aug(Image.fromarray(img2)), dtype=np.uint8)
42
+
43
+ # symmetric
44
+ else:
45
+ image_stack = np.concatenate([img1, img2], axis=0)
46
+ image_stack = np.array(self.photo_aug(Image.fromarray(image_stack)), dtype=np.uint8)
47
+ img1, img2 = np.split(image_stack, 2, axis=0)
48
+
49
+ return img1, img2
50
+
51
+ def eraser_transform(self, img1, img2, bounds=[50, 100]):
52
+ """ Occlusion augmentation """
53
+
54
+ ht, wd = img1.shape[:2]
55
+ if np.random.rand() < self.eraser_aug_prob:
56
+ mean_color = np.mean(img2.reshape(-1, 3), axis=0)
57
+ for _ in range(np.random.randint(1, 3)):
58
+ x0 = np.random.randint(0, wd)
59
+ y0 = np.random.randint(0, ht)
60
+ dx = np.random.randint(bounds[0], bounds[1])
61
+ dy = np.random.randint(bounds[0], bounds[1])
62
+ img2[y0:y0 + dy, x0:x0 + dx, :] = mean_color
63
+
64
+ return img1, img2
65
+
66
+ def spatial_transform(self, img1, img2, flow, occlusion=None):
67
+ # randomly sample scale
68
+ ht, wd = img1.shape[:2]
69
+
70
+ min_scale = np.maximum(
71
+ (self.crop_size[0] + 8) / float(ht),
72
+ (self.crop_size[1] + 8) / float(wd))
73
+
74
+ scale = 2 ** np.random.uniform(self.min_scale, self.max_scale)
75
+ scale_x = scale
76
+ scale_y = scale
77
+ if np.random.rand() < self.stretch_prob:
78
+ scale_x *= 2 ** np.random.uniform(-self.max_stretch, self.max_stretch)
79
+ scale_y *= 2 ** np.random.uniform(-self.max_stretch, self.max_stretch)
80
+
81
+ scale_x = np.clip(scale_x, min_scale, None)
82
+ scale_y = np.clip(scale_y, min_scale, None)
83
+
84
+ if np.random.rand() < self.spatial_aug_prob:
85
+ # rescale the images
86
+ img1 = cv2.resize(img1, None, fx=scale_x, fy=scale_y, interpolation=cv2.INTER_LINEAR)
87
+ img2 = cv2.resize(img2, None, fx=scale_x, fy=scale_y, interpolation=cv2.INTER_LINEAR)
88
+ flow = cv2.resize(flow, None, fx=scale_x, fy=scale_y, interpolation=cv2.INTER_LINEAR)
89
+ flow = flow * [scale_x, scale_y]
90
+
91
+ if occlusion is not None:
92
+ occlusion = cv2.resize(occlusion, None, fx=scale_x, fy=scale_y, interpolation=cv2.INTER_LINEAR)
93
+
94
+ if self.do_flip:
95
+ if np.random.rand() < self.h_flip_prob: # h-flip
96
+ img1 = img1[:, ::-1]
97
+ img2 = img2[:, ::-1]
98
+ flow = flow[:, ::-1] * [-1.0, 1.0]
99
+
100
+ if occlusion is not None:
101
+ occlusion = occlusion[:, ::-1]
102
+
103
+ if np.random.rand() < self.v_flip_prob: # v-flip
104
+ img1 = img1[::-1, :]
105
+ img2 = img2[::-1, :]
106
+ flow = flow[::-1, :] * [1.0, -1.0]
107
+
108
+ if occlusion is not None:
109
+ occlusion = occlusion[::-1, :]
110
+
111
+ # In case no cropping
112
+ if img1.shape[0] - self.crop_size[0] > 0:
113
+ y0 = np.random.randint(0, img1.shape[0] - self.crop_size[0])
114
+ else:
115
+ y0 = 0
116
+ if img1.shape[1] - self.crop_size[1] > 0:
117
+ x0 = np.random.randint(0, img1.shape[1] - self.crop_size[1])
118
+ else:
119
+ x0 = 0
120
+
121
+ img1 = img1[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
122
+ img2 = img2[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
123
+ flow = flow[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
124
+
125
+ if occlusion is not None:
126
+ occlusion = occlusion[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
127
+ return img1, img2, flow, occlusion
128
+
129
+ return img1, img2, flow
130
+
131
+ def __call__(self, img1, img2, flow, occlusion=None):
132
+ img1, img2 = self.color_transform(img1, img2)
133
+ img1, img2 = self.eraser_transform(img1, img2)
134
+
135
+ if occlusion is not None:
136
+ img1, img2, flow, occlusion = self.spatial_transform(
137
+ img1, img2, flow, occlusion)
138
+ else:
139
+ img1, img2, flow = self.spatial_transform(img1, img2, flow)
140
+
141
+ img1 = np.ascontiguousarray(img1)
142
+ img2 = np.ascontiguousarray(img2)
143
+ flow = np.ascontiguousarray(flow)
144
+
145
+ if occlusion is not None:
146
+ occlusion = np.ascontiguousarray(occlusion)
147
+ return img1, img2, flow, occlusion
148
+
149
+ return img1, img2, flow
150
+
151
+
152
+ class SparseFlowAugmentor:
153
+ def __init__(self, crop_size, min_scale=-0.2, max_scale=0.5, do_flip=False,
154
+ no_eraser_aug=True,
155
+ ):
156
+ # spatial augmentation params
157
+ self.crop_size = crop_size
158
+ self.min_scale = min_scale
159
+ self.max_scale = max_scale
160
+ self.spatial_aug_prob = 0.8
161
+ self.stretch_prob = 0.8
162
+ self.max_stretch = 0.2
163
+
164
+ # flip augmentation params
165
+ self.do_flip = do_flip
166
+ self.h_flip_prob = 0.5
167
+ self.v_flip_prob = 0.1
168
+
169
+ # photometric augmentation params
170
+ self.photo_aug = ColorJitter(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.3 / 3.14)
171
+ self.asymmetric_color_aug_prob = 0.2
172
+
173
+ if no_eraser_aug:
174
+ # we disable eraser aug since no obvious improvement is observed in our experiments
175
+ self.eraser_aug_prob = -1
176
+ else:
177
+ self.eraser_aug_prob = 0.5
178
+
179
+ def color_transform(self, img1, img2):
180
+ image_stack = np.concatenate([img1, img2], axis=0)
181
+ image_stack = np.array(self.photo_aug(Image.fromarray(image_stack)), dtype=np.uint8)
182
+ img1, img2 = np.split(image_stack, 2, axis=0)
183
+ return img1, img2
184
+
185
+ def eraser_transform(self, img1, img2):
186
+ ht, wd = img1.shape[:2]
187
+ if np.random.rand() < self.eraser_aug_prob:
188
+ mean_color = np.mean(img2.reshape(-1, 3), axis=0)
189
+ for _ in range(np.random.randint(1, 3)):
190
+ x0 = np.random.randint(0, wd)
191
+ y0 = np.random.randint(0, ht)
192
+ dx = np.random.randint(50, 100)
193
+ dy = np.random.randint(50, 100)
194
+ img2[y0:y0 + dy, x0:x0 + dx, :] = mean_color
195
+
196
+ return img1, img2
197
+
198
+ def resize_sparse_flow_map(self, flow, valid, fx=1.0, fy=1.0):
199
+ ht, wd = flow.shape[:2]
200
+ coords = np.meshgrid(np.arange(wd), np.arange(ht))
201
+ coords = np.stack(coords, axis=-1)
202
+
203
+ coords = coords.reshape(-1, 2).astype(np.float32)
204
+ flow = flow.reshape(-1, 2).astype(np.float32)
205
+ valid = valid.reshape(-1).astype(np.float32)
206
+
207
+ coords0 = coords[valid >= 1]
208
+ flow0 = flow[valid >= 1]
209
+
210
+ ht1 = int(round(ht * fy))
211
+ wd1 = int(round(wd * fx))
212
+
213
+ coords1 = coords0 * [fx, fy]
214
+ flow1 = flow0 * [fx, fy]
215
+
216
+ xx = np.round(coords1[:, 0]).astype(np.int32)
217
+ yy = np.round(coords1[:, 1]).astype(np.int32)
218
+
219
+ v = (xx > 0) & (xx < wd1) & (yy > 0) & (yy < ht1)
220
+ xx = xx[v]
221
+ yy = yy[v]
222
+ flow1 = flow1[v]
223
+
224
+ flow_img = np.zeros([ht1, wd1, 2], dtype=np.float32)
225
+ valid_img = np.zeros([ht1, wd1], dtype=np.int32)
226
+
227
+ flow_img[yy, xx] = flow1
228
+ valid_img[yy, xx] = 1
229
+
230
+ return flow_img, valid_img
231
+
232
+ def spatial_transform(self, img1, img2, flow, valid):
233
+ # randomly sample scale
234
+
235
+ ht, wd = img1.shape[:2]
236
+ min_scale = np.maximum(
237
+ (self.crop_size[0] + 1) / float(ht),
238
+ (self.crop_size[1] + 1) / float(wd))
239
+
240
+ scale = 2 ** np.random.uniform(self.min_scale, self.max_scale)
241
+ scale_x = np.clip(scale, min_scale, None)
242
+ scale_y = np.clip(scale, min_scale, None)
243
+
244
+ if np.random.rand() < self.spatial_aug_prob:
245
+ # rescale the images
246
+ img1 = cv2.resize(img1, None, fx=scale_x, fy=scale_y, interpolation=cv2.INTER_LINEAR)
247
+ img2 = cv2.resize(img2, None, fx=scale_x, fy=scale_y, interpolation=cv2.INTER_LINEAR)
248
+
249
+ flow, valid = self.resize_sparse_flow_map(flow, valid, fx=scale_x, fy=scale_y)
250
+
251
+ if self.do_flip:
252
+ if np.random.rand() < 0.5: # h-flip
253
+ img1 = img1[:, ::-1]
254
+ img2 = img2[:, ::-1]
255
+ flow = flow[:, ::-1] * [-1.0, 1.0]
256
+ valid = valid[:, ::-1]
257
+
258
+ margin_y = 20
259
+ margin_x = 50
260
+
261
+ y0 = np.random.randint(0, img1.shape[0] - self.crop_size[0] + margin_y)
262
+ x0 = np.random.randint(-margin_x, img1.shape[1] - self.crop_size[1] + margin_x)
263
+
264
+ y0 = np.clip(y0, 0, img1.shape[0] - self.crop_size[0])
265
+ x0 = np.clip(x0, 0, img1.shape[1] - self.crop_size[1])
266
+
267
+ img1 = img1[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
268
+ img2 = img2[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
269
+ flow = flow[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
270
+ valid = valid[y0:y0 + self.crop_size[0], x0:x0 + self.crop_size[1]]
271
+ return img1, img2, flow, valid
272
+
273
+ def __call__(self, img1, img2, flow, valid):
274
+ img1, img2 = self.color_transform(img1, img2)
275
+ img1, img2 = self.eraser_transform(img1, img2)
276
+
277
+ img1, img2, flow, valid = self.spatial_transform(img1, img2, flow, valid)
278
+
279
+ img1 = np.ascontiguousarray(img1)
280
+ img2 = np.ascontiguousarray(img2)
281
+ flow = np.ascontiguousarray(flow)
282
+ valid = np.ascontiguousarray(valid)
283
+
284
+ return img1, img2, flow, valid
basicsr/archs/gmflow/environment.yml ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: gmflow
2
+ channels:
3
+ - pytorch
4
+ - defaults
5
+ dependencies:
6
+ - _libgcc_mutex=0.1=main
7
+ - _openmp_mutex=4.5=1_gnu
8
+ - blas=1.0=mkl
9
+ - bottleneck=1.3.2=py38heb32a55_1
10
+ - brotli=1.0.9=he6710b0_2
11
+ - bzip2=1.0.8=h7b6447c_0
12
+ - ca-certificates=2021.10.26=h06a4308_2
13
+ - certifi=2021.10.8=py38h06a4308_2
14
+ - cudatoolkit=10.2.89=hfd86e86_1
15
+ - cycler=0.10.0=py38_0
16
+ - dbus=1.13.18=hb2f20db_0
17
+ - expat=2.4.1=h2531618_2
18
+ - ffmpeg=4.3=hf484d3e_0
19
+ - fontconfig=2.13.1=h6c09931_0
20
+ - fonttools=4.25.0=pyhd3eb1b0_0
21
+ - freetype=2.10.4=h5ab3b9f_0
22
+ - glib=2.69.0=h5202010_0
23
+ - gmp=6.2.1=h2531618_2
24
+ - gnutls=3.6.15=he1e5248_0
25
+ - gst-plugins-base=1.14.0=h8213a91_2
26
+ - gstreamer=1.14.0=h28cd5cc_2
27
+ - icu=58.2=he6710b0_3
28
+ - imageio=2.9.0=pyhd3eb1b0_0
29
+ - intel-openmp=2021.3.0=h06a4308_3350
30
+ - jpeg=9b=h024ee3a_2
31
+ - kiwisolver=1.3.1=py38h2531618_0
32
+ - lame=3.100=h7b6447c_0
33
+ - lcms2=2.12=h3be6417_0
34
+ - ld_impl_linux-64=2.35.1=h7274673_9
35
+ - libffi=3.3=he6710b0_2
36
+ - libgcc-ng=9.3.0=h5101ec6_17
37
+ - libgfortran-ng=7.5.0=ha8ba4b0_17
38
+ - libgfortran4=7.5.0=ha8ba4b0_17
39
+ - libgomp=9.3.0=h5101ec6_17
40
+ - libiconv=1.15=h63c8f33_5
41
+ - libidn2=2.3.2=h7f8727e_0
42
+ - libpng=1.6.37=hbc83047_0
43
+ - libstdcxx-ng=9.3.0=hd4cf53a_17
44
+ - libtasn1=4.16.0=h27cfd23_0
45
+ - libtiff=4.2.0=h85742a9_0
46
+ - libunistring=0.9.10=h27cfd23_0
47
+ - libuuid=1.0.3=h1bed415_2
48
+ - libuv=1.40.0=h7b6447c_0
49
+ - libwebp-base=1.2.0=h27cfd23_0
50
+ - libxcb=1.14=h7b6447c_0
51
+ - libxml2=2.9.12=h03d6c58_0
52
+ - lz4-c=1.9.3=h2531618_0
53
+ - matplotlib=3.4.2=py38h06a4308_0
54
+ - matplotlib-base=3.4.2=py38hab158f2_0
55
+ - mkl=2021.3.0=h06a4308_520
56
+ - mkl-service=2.4.0=py38h7f8727e_0
57
+ - mkl_fft=1.3.0=py38h42c9631_2
58
+ - mkl_random=1.2.2=py38h51133e4_0
59
+ - munkres=1.1.4=py_0
60
+ - ncurses=6.2=he6710b0_1
61
+ - nettle=3.7.3=hbbd107a_1
62
+ - ninja=1.10.2=hff7bd54_1
63
+ - numexpr=2.7.3=py38h22e1b3c_1
64
+ - numpy=1.20.3=py38hf144106_0
65
+ - numpy-base=1.20.3=py38h74d4b33_0
66
+ - olefile=0.46=py_0
67
+ - openh264=2.1.0=hd408876_0
68
+ - openjpeg=2.3.0=h05c96fa_1
69
+ - openssl=1.1.1m=h7f8727e_0
70
+ - pandas=1.3.2=py38h8c16a72_0
71
+ - pcre=8.45=h295c915_0
72
+ - pillow=8.3.1=py38h2c7a002_0
73
+ - pip=21.2.2=py38h06a4308_0
74
+ - pyparsing=2.4.7=pyhd3eb1b0_0
75
+ - pyqt=5.9.2=py38h05f1152_4
76
+ - python=3.8.11=h12debd9_0_cpython
77
+ - python-dateutil=2.8.2=pyhd3eb1b0_0
78
+ - pytorch=1.9.0=py3.8_cuda10.2_cudnn7.6.5_0
79
+ - pytz=2021.1=pyhd3eb1b0_0
80
+ - qt=5.9.7=h5867ecd_1
81
+ - readline=8.1=h27cfd23_0
82
+ - scipy=1.6.2=py38had2a1c9_1
83
+ - seaborn=0.11.2=pyhd3eb1b0_0
84
+ - setuptools=52.0.0=py38h06a4308_0
85
+ - sip=4.19.13=py38he6710b0_0
86
+ - six=1.16.0=pyhd3eb1b0_0
87
+ - sqlite=3.36.0=hc218d9a_0
88
+ - tk=8.6.10=hbc83047_0
89
+ - torchaudio=0.9.0=py38
90
+ - torchvision=0.10.0=py38_cu102
91
+ - tornado=6.1=py38h27cfd23_0
92
+ - typing_extensions=3.10.0.0=pyh06a4308_0
93
+ - wheel=0.36.2=pyhd3eb1b0_0
94
+ - xz=5.2.5=h7b6447c_0
95
+ - zlib=1.2.11=h7b6447c_3
96
+ - zstd=1.4.9=haebb681_0
97
+ - pip:
98
+ - absl-py==0.13.0
99
+ - argon2-cffi==21.1.0
100
+ - attrs==21.2.0
101
+ - backcall==0.2.0
102
+ - bleach==4.1.0
103
+ - cachetools==4.2.2
104
+ - cffi==1.14.6
105
+ - charset-normalizer==2.0.4
106
+ - debugpy==1.4.3
107
+ - decorator==5.1.0
108
+ - defusedxml==0.7.1
109
+ - einops==0.3.2
110
+ - entrypoints==0.3
111
+ - google-auth==1.34.0
112
+ - google-auth-oauthlib==0.4.5
113
+ - grpcio==1.39.0
114
+ - idna==3.2
115
+ - ipykernel==6.4.1
116
+ - ipython==7.27.0
117
+ - ipython-genutils==0.2.0
118
+ - jedi==0.18.0
119
+ - jinja2==3.0.1
120
+ - jsonschema==3.2.0
121
+ - jupyter-client==7.0.3
122
+ - jupyter-core==4.8.1
123
+ - jupyterlab-pygments==0.1.2
124
+ - markdown==3.3.4
125
+ - markupsafe==2.0.1
126
+ - matplotlib-inline==0.1.3
127
+ - mistune==0.8.4
128
+ - nbclient==0.5.4
129
+ - nbconvert==6.1.0
130
+ - nbformat==5.1.3
131
+ - nest-asyncio==1.5.1
132
+ - oauthlib==3.1.1
133
+ - opencv-python==4.5.3.56
134
+ - packaging==21.0
135
+ - pandocfilters==1.5.0
136
+ - parso==0.8.2
137
+ - pexpect==4.8.0
138
+ - pickleshare==0.7.5
139
+ - prometheus-client==0.11.0
140
+ - prompt-toolkit==3.0.20
141
+ - protobuf==3.17.3
142
+ - ptyprocess==0.7.0
143
+ - pyasn1==0.4.8
144
+ - pyasn1-modules==0.2.8
145
+ - pycparser==2.20
146
+ - pygments==2.10.0
147
+ - pyrsistent==0.18.0
148
+ - pyzmq==22.3.0
149
+ - requests==2.26.0
150
+ - requests-oauthlib==1.3.0
151
+ - rsa==4.7.2
152
+ - send2trash==1.8.0
153
+ - tensorboard==2.5.0
154
+ - tensorboard-data-server==0.6.1
155
+ - tensorboard-plugin-wit==1.8.0
156
+ - terminado==0.12.1
157
+ - testpath==0.5.0
158
+ - traitlets==5.1.0
159
+ - urllib3==1.26.6
160
+ - wcwidth==0.2.5
161
+ - webencodings==0.5.1
162
+ - werkzeug==2.0.1
basicsr/archs/gmflow/evaluate.py ADDED
@@ -0,0 +1,689 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image
2
+ import os
3
+ import time
4
+ import numpy as np
5
+ import torch
6
+ import torch.nn.functional as F
7
+
8
+ import data
9
+ from utils import frame_utils
10
+ from utils.flow_viz import save_vis_flow_tofile
11
+
12
+ from utils.utils import InputPadder, compute_out_of_boundary_mask
13
+ from glob import glob
14
+ from gmflow.geometry import forward_backward_consistency_check
15
+
16
+
17
+ @torch.no_grad()
18
+ def create_sintel_submission(model,
19
+ output_path='sintel_submission',
20
+ padding_factor=8,
21
+ save_vis_flow=False,
22
+ no_save_flo=False,
23
+ attn_splits_list=None,
24
+ corr_radius_list=None,
25
+ prop_radius_list=None,
26
+ ):
27
+ """ Create submission for the Sintel leaderboard """
28
+ model.eval()
29
+ for dstype in ['clean', 'final']:
30
+ test_dataset = data.MpiSintel(split='test', aug_params=None, dstype=dstype)
31
+
32
+ flow_prev, sequence_prev = None, None
33
+ for test_id in range(len(test_dataset)):
34
+ image1, image2, (sequence, frame) = test_dataset[test_id]
35
+ if sequence != sequence_prev:
36
+ flow_prev = None
37
+
38
+ padder = InputPadder(image1.shape, padding_factor=padding_factor)
39
+ image1, image2 = padder.pad(image1[None].cuda(), image2[None].cuda())
40
+
41
+ results_dict = model(image1, image2,
42
+ attn_splits_list=attn_splits_list,
43
+ corr_radius_list=corr_radius_list,
44
+ prop_radius_list=prop_radius_list,
45
+ )
46
+
47
+ flow_pr = results_dict['flow_preds'][-1] # [B, 2, H, W]
48
+
49
+ flow = padder.unpad(flow_pr[0]).permute(1, 2, 0).cpu().numpy()
50
+
51
+ output_dir = os.path.join(output_path, dstype, sequence)
52
+ output_file = os.path.join(output_dir, 'frame%04d.flo' % (frame + 1))
53
+
54
+ if not os.path.exists(output_dir):
55
+ os.makedirs(output_dir)
56
+
57
+ if not no_save_flo:
58
+ frame_utils.writeFlow(output_file, flow)
59
+ sequence_prev = sequence
60
+
61
+ # Save vis flow
62
+ if save_vis_flow:
63
+ vis_flow_file = output_file.replace('.flo', '.png')
64
+ save_vis_flow_tofile(flow, vis_flow_file)
65
+
66
+
67
+ @torch.no_grad()
68
+ def create_kitti_submission(model,
69
+ output_path='kitti_submission',
70
+ padding_factor=8,
71
+ save_vis_flow=False,
72
+ attn_splits_list=None,
73
+ corr_radius_list=None,
74
+ prop_radius_list=None,
75
+ ):
76
+ """ Create submission for the Sintel leaderboard """
77
+ model.eval()
78
+ test_dataset = data.KITTI(split='testing', aug_params=None)
79
+
80
+ if not os.path.exists(output_path):
81
+ os.makedirs(output_path)
82
+
83
+ for test_id in range(len(test_dataset)):
84
+ image1, image2, (frame_id,) = test_dataset[test_id]
85
+ padder = InputPadder(image1.shape, mode='kitti', padding_factor=padding_factor)
86
+ image1, image2 = padder.pad(image1[None].cuda(), image2[None].cuda())
87
+
88
+ results_dict = model(image1, image2,
89
+ attn_splits_list=attn_splits_list,
90
+ corr_radius_list=corr_radius_list,
91
+ prop_radius_list=prop_radius_list,
92
+ )
93
+
94
+ flow_pr = results_dict['flow_preds'][-1]
95
+
96
+ flow = padder.unpad(flow_pr[0]).permute(1, 2, 0).cpu().numpy()
97
+
98
+ output_filename = os.path.join(output_path, frame_id)
99
+
100
+ if save_vis_flow:
101
+ vis_flow_file = output_filename
102
+ save_vis_flow_tofile(flow, vis_flow_file)
103
+ else:
104
+ frame_utils.writeFlowKITTI(output_filename, flow)
105
+
106
+
107
+ @torch.no_grad()
108
+ def validate_chairs(model,
109
+ with_speed_metric=False,
110
+ attn_splits_list=False,
111
+ corr_radius_list=False,
112
+ prop_radius_list=False,
113
+ ):
114
+ """ Perform evaluation on the FlyingChairs (test) split """
115
+ model.eval()
116
+ epe_list = []
117
+ results = {}
118
+
119
+ if with_speed_metric:
120
+ s0_10_list = []
121
+ s10_40_list = []
122
+ s40plus_list = []
123
+
124
+ val_dataset = data.FlyingChairs(split='validation')
125
+
126
+ print('Number of validation image pairs: %d' % len(val_dataset))
127
+
128
+ for val_id in range(len(val_dataset)):
129
+ image1, image2, flow_gt, _ = val_dataset[val_id]
130
+
131
+ image1 = image1[None].cuda()
132
+ image2 = image2[None].cuda()
133
+
134
+ results_dict = model(image1, image2,
135
+ attn_splits_list=attn_splits_list,
136
+ corr_radius_list=corr_radius_list,
137
+ prop_radius_list=prop_radius_list,
138
+ )
139
+
140
+ flow_pr = results_dict['flow_preds'][-1] # [B, 2, H, W]
141
+
142
+ assert flow_pr.size()[-2:] == flow_gt.size()[-2:]
143
+
144
+ epe = torch.sum((flow_pr[0].cpu() - flow_gt) ** 2, dim=0).sqrt()
145
+ epe_list.append(epe.view(-1).numpy())
146
+
147
+ if with_speed_metric:
148
+ flow_gt_speed = torch.sum(flow_gt ** 2, dim=0).sqrt()
149
+ valid_mask = (flow_gt_speed < 10)
150
+ if valid_mask.max() > 0:
151
+ s0_10_list.append(epe[valid_mask].cpu().numpy())
152
+
153
+ valid_mask = (flow_gt_speed >= 10) * (flow_gt_speed <= 40)
154
+ if valid_mask.max() > 0:
155
+ s10_40_list.append(epe[valid_mask].cpu().numpy())
156
+
157
+ valid_mask = (flow_gt_speed > 40)
158
+ if valid_mask.max() > 0:
159
+ s40plus_list.append(epe[valid_mask].cpu().numpy())
160
+
161
+ epe_all = np.concatenate(epe_list)
162
+ epe = np.mean(epe_all)
163
+ px1 = np.mean(epe_all > 1)
164
+ px3 = np.mean(epe_all > 3)
165
+ px5 = np.mean(epe_all > 5)
166
+ print("Validation Chairs EPE: %.3f, 1px: %.3f, 3px: %.3f, 5px: %.3f" % (epe, px1, px3, px5))
167
+ results['chairs_epe'] = epe
168
+ results['chairs_1px'] = px1
169
+ results['chairs_3px'] = px3
170
+ results['chairs_5px'] = px5
171
+
172
+ if with_speed_metric:
173
+ s0_10 = np.mean(np.concatenate(s0_10_list))
174
+ s10_40 = np.mean(np.concatenate(s10_40_list))
175
+ s40plus = np.mean(np.concatenate(s40plus_list))
176
+
177
+ print("Validation Chairs s0_10: %.3f, s10_40: %.3f, s40+: %.3f" % (
178
+ s0_10,
179
+ s10_40,
180
+ s40plus))
181
+
182
+ results['chairs_s0_10'] = s0_10
183
+ results['chairs_s10_40'] = s10_40
184
+ results['chairs_s40+'] = s40plus
185
+
186
+ return results
187
+
188
+
189
+ @torch.no_grad()
190
+ def validate_things(model,
191
+ padding_factor=8,
192
+ with_speed_metric=False,
193
+ max_val_flow=400,
194
+ val_things_clean_only=True,
195
+ attn_splits_list=False,
196
+ corr_radius_list=False,
197
+ prop_radius_list=False,
198
+ ):
199
+ """ Peform validation using the Things (test) split """
200
+ model.eval()
201
+ results = {}
202
+
203
+ for dstype in ['frames_cleanpass', 'frames_finalpass']:
204
+ if val_things_clean_only:
205
+ if dstype == 'frames_finalpass':
206
+ continue
207
+
208
+ val_dataset = data.FlyingThings3D(dstype=dstype, test_set=True, validate_subset=True,
209
+ )
210
+ print('Number of validation image pairs: %d' % len(val_dataset))
211
+ epe_list = []
212
+
213
+ if with_speed_metric:
214
+ s0_10_list = []
215
+ s10_40_list = []
216
+ s40plus_list = []
217
+
218
+ for val_id in range(len(val_dataset)):
219
+ image1, image2, flow_gt, valid_gt = val_dataset[val_id]
220
+ image1 = image1[None].cuda()
221
+ image2 = image2[None].cuda()
222
+
223
+ padder = InputPadder(image1.shape, padding_factor=padding_factor)
224
+ image1, image2 = padder.pad(image1, image2)
225
+
226
+ results_dict = model(image1, image2,
227
+ attn_splits_list=attn_splits_list,
228
+ corr_radius_list=corr_radius_list,
229
+ prop_radius_list=prop_radius_list,
230
+ )
231
+ flow_pr = results_dict['flow_preds'][-1]
232
+
233
+ flow = padder.unpad(flow_pr[0]).cpu()
234
+
235
+ # Evaluation on flow <= max_val_flow
236
+ flow_gt_speed = torch.sum(flow_gt ** 2, dim=0).sqrt()
237
+ valid_gt = valid_gt * (flow_gt_speed < max_val_flow)
238
+ valid_gt = valid_gt.contiguous()
239
+
240
+ epe = torch.sum((flow - flow_gt) ** 2, dim=0).sqrt()
241
+ val = valid_gt >= 0.5
242
+ epe_list.append(epe[val].cpu().numpy())
243
+
244
+ if with_speed_metric:
245
+ valid_mask = (flow_gt_speed < 10) * (valid_gt >= 0.5)
246
+ if valid_mask.max() > 0:
247
+ s0_10_list.append(epe[valid_mask].cpu().numpy())
248
+
249
+ valid_mask = (flow_gt_speed >= 10) * (flow_gt_speed <= 40) * (valid_gt >= 0.5)
250
+ if valid_mask.max() > 0:
251
+ s10_40_list.append(epe[valid_mask].cpu().numpy())
252
+
253
+ valid_mask = (flow_gt_speed > 40) * (valid_gt >= 0.5)
254
+ if valid_mask.max() > 0:
255
+ s40plus_list.append(epe[valid_mask].cpu().numpy())
256
+
257
+ epe_list = np.mean(np.concatenate(epe_list))
258
+
259
+ epe = np.mean(epe_list)
260
+
261
+ if dstype == 'frames_cleanpass':
262
+ dstype = 'things_clean'
263
+ if dstype == 'frames_finalpass':
264
+ dstype = 'things_final'
265
+
266
+ print("Validation Things test set (%s) EPE: %.3f" % (dstype, epe))
267
+ results[dstype + '_epe'] = epe
268
+
269
+ if with_speed_metric:
270
+ s0_10 = np.mean(np.concatenate(s0_10_list))
271
+ s10_40 = np.mean(np.concatenate(s10_40_list))
272
+ s40plus = np.mean(np.concatenate(s40plus_list))
273
+
274
+ print("Validation Things test (%s) s0_10: %.3f, s10_40: %.3f, s40+: %.3f" % (
275
+ dstype, s0_10,
276
+ s10_40,
277
+ s40plus))
278
+
279
+ results[dstype + '_s0_10'] = s0_10
280
+ results[dstype + '_s10_40'] = s10_40
281
+ results[dstype + '_s40+'] = s40plus
282
+
283
+ return results
284
+
285
+
286
+ @torch.no_grad()
287
+ def validate_sintel(model,
288
+ count_time=False,
289
+ padding_factor=8,
290
+ with_speed_metric=False,
291
+ evaluate_matched_unmatched=False,
292
+ attn_splits_list=False,
293
+ corr_radius_list=False,
294
+ prop_radius_list=False,
295
+ ):
296
+ """ Peform validation using the Sintel (train) split """
297
+ model.eval()
298
+ results = {}
299
+
300
+ if count_time:
301
+ total_time = 0
302
+ num_runs = 100
303
+
304
+ for dstype in ['clean', 'final']:
305
+ val_dataset = data.MpiSintel(split='training', dstype=dstype,
306
+ load_occlusion=evaluate_matched_unmatched,
307
+ )
308
+
309
+ print('Number of validation image pairs: %d' % len(val_dataset))
310
+ epe_list = []
311
+
312
+ if evaluate_matched_unmatched:
313
+ matched_epe_list = []
314
+ unmatched_epe_list = []
315
+
316
+ if with_speed_metric:
317
+ s0_10_list = []
318
+ s10_40_list = []
319
+ s40plus_list = []
320
+
321
+ for val_id in range(len(val_dataset)):
322
+ if evaluate_matched_unmatched:
323
+ image1, image2, flow_gt, valid, noc_valid = val_dataset[val_id]
324
+
325
+ # compuate in-image-plane valid mask
326
+ in_image_valid = compute_out_of_boundary_mask(flow_gt.unsqueeze(0)).squeeze(0) # [H, W]
327
+
328
+ else:
329
+ image1, image2, flow_gt, _ = val_dataset[val_id]
330
+
331
+ image1 = image1[None].cuda()
332
+ image2 = image2[None].cuda()
333
+
334
+ padder = InputPadder(image1.shape, padding_factor=padding_factor)
335
+ image1, image2 = padder.pad(image1, image2)
336
+
337
+ if count_time and val_id >= 5: # 5 warmup
338
+ torch.cuda.synchronize()
339
+ time_start = time.perf_counter()
340
+
341
+ results_dict = model(image1, image2,
342
+ attn_splits_list=attn_splits_list,
343
+ corr_radius_list=corr_radius_list,
344
+ prop_radius_list=prop_radius_list,
345
+ )
346
+
347
+ # useful when using parallel branches
348
+ flow_pr = results_dict['flow_preds'][-1]
349
+
350
+ if count_time and val_id >= 5:
351
+ torch.cuda.synchronize()
352
+ total_time += time.perf_counter() - time_start
353
+
354
+ if val_id >= num_runs + 4:
355
+ break
356
+
357
+ flow = padder.unpad(flow_pr[0]).cpu()
358
+
359
+ epe = torch.sum((flow - flow_gt) ** 2, dim=0).sqrt()
360
+ epe_list.append(epe.view(-1).numpy())
361
+
362
+ if evaluate_matched_unmatched:
363
+ matched_valid_mask = (noc_valid > 0.5) & (in_image_valid > 0.5)
364
+
365
+ if matched_valid_mask.max() > 0:
366
+ matched_epe_list.append(epe[matched_valid_mask].cpu().numpy())
367
+ unmatched_epe_list.append(epe[~matched_valid_mask].cpu().numpy())
368
+
369
+ if with_speed_metric:
370
+ flow_gt_speed = torch.sum(flow_gt ** 2, dim=0).sqrt()
371
+ valid_mask = (flow_gt_speed < 10)
372
+ if valid_mask.max() > 0:
373
+ s0_10_list.append(epe[valid_mask].cpu().numpy())
374
+
375
+ valid_mask = (flow_gt_speed >= 10) * (flow_gt_speed <= 40)
376
+ if valid_mask.max() > 0:
377
+ s10_40_list.append(epe[valid_mask].cpu().numpy())
378
+
379
+ valid_mask = (flow_gt_speed > 40)
380
+ if valid_mask.max() > 0:
381
+ s40plus_list.append(epe[valid_mask].cpu().numpy())
382
+
383
+ epe_all = np.concatenate(epe_list)
384
+ epe = np.mean(epe_all)
385
+ px1 = np.mean(epe_all > 1)
386
+ px3 = np.mean(epe_all > 3)
387
+ px5 = np.mean(epe_all > 5)
388
+
389
+ dstype_ori = dstype
390
+
391
+ print("Validation Sintel (%s) EPE: %.3f, 1px: %.3f, 3px: %.3f, 5px: %.3f" % (dstype_ori, epe, px1, px3, px5))
392
+
393
+ dstype = 'sintel_' + dstype
394
+
395
+ results[dstype + '_epe'] = np.mean(epe_list)
396
+ results[dstype + '_1px'] = px1
397
+ results[dstype + '_3px'] = px3
398
+ results[dstype + '_5px'] = px5
399
+
400
+ if with_speed_metric:
401
+ s0_10 = np.mean(np.concatenate(s0_10_list))
402
+ s10_40 = np.mean(np.concatenate(s10_40_list))
403
+ s40plus = np.mean(np.concatenate(s40plus_list))
404
+
405
+ print("Validation Sintel (%s) s0_10: %.3f, s10_40: %.3f, s40+: %.3f" % (
406
+ dstype_ori, s0_10,
407
+ s10_40,
408
+ s40plus))
409
+
410
+ results[dstype + '_s0_10'] = s0_10
411
+ results[dstype + '_s10_40'] = s10_40
412
+ results[dstype + '_s40+'] = s40plus
413
+
414
+ if count_time:
415
+ print('Time: %.6fs' % (total_time / num_runs))
416
+ break # only the clean pass when counting time
417
+
418
+ if evaluate_matched_unmatched:
419
+ matched_epe = np.mean(np.concatenate(matched_epe_list))
420
+ unmatched_epe = np.mean(np.concatenate(unmatched_epe_list))
421
+
422
+ print('Validatation Sintel (%s) matched epe: %.3f, unmatched epe: %.3f' % (
423
+ dstype_ori, matched_epe, unmatched_epe))
424
+
425
+ results[dstype + '_matched'] = matched_epe
426
+ results[dstype + '_unmatched'] = unmatched_epe
427
+
428
+ return results
429
+
430
+
431
+ @torch.no_grad()
432
+ def validate_kitti(model,
433
+ padding_factor=8,
434
+ with_speed_metric=False,
435
+ average_over_pixels=True,
436
+ attn_splits_list=False,
437
+ corr_radius_list=False,
438
+ prop_radius_list=False,
439
+ ):
440
+ """ Peform validation using the KITTI-2015 (train) split """
441
+ model.eval()
442
+
443
+ val_dataset = data.KITTI(split='training')
444
+ print('Number of validation image pairs: %d' % len(val_dataset))
445
+
446
+ out_list, epe_list = [], []
447
+ results = {}
448
+
449
+ if with_speed_metric:
450
+ if average_over_pixels:
451
+ s0_10_list = []
452
+ s10_40_list = []
453
+ s40plus_list = []
454
+ else:
455
+ s0_10_epe_sum = 0
456
+ s0_10_valid_samples = 0
457
+ s10_40_epe_sum = 0
458
+ s10_40_valid_samples = 0
459
+ s40plus_epe_sum = 0
460
+ s40plus_valid_samples = 0
461
+
462
+ for val_id in range(len(val_dataset)):
463
+ image1, image2, flow_gt, valid_gt = val_dataset[val_id]
464
+ image1 = image1[None].cuda()
465
+ image2 = image2[None].cuda()
466
+
467
+ padder = InputPadder(image1.shape, mode='kitti', padding_factor=padding_factor)
468
+ image1, image2 = padder.pad(image1, image2)
469
+
470
+ results_dict = model(image1, image2,
471
+ attn_splits_list=attn_splits_list,
472
+ corr_radius_list=corr_radius_list,
473
+ prop_radius_list=prop_radius_list,
474
+ )
475
+
476
+ # useful when using parallel branches
477
+ flow_pr = results_dict['flow_preds'][-1]
478
+
479
+ flow = padder.unpad(flow_pr[0]).cpu()
480
+
481
+ epe = torch.sum((flow - flow_gt) ** 2, dim=0).sqrt()
482
+ mag = torch.sum(flow_gt ** 2, dim=0).sqrt()
483
+
484
+ if with_speed_metric:
485
+ # flow_gt_speed = torch.sum(flow_gt ** 2, dim=0).sqrt()
486
+ flow_gt_speed = mag
487
+
488
+ if average_over_pixels:
489
+ valid_mask = (flow_gt_speed < 10) * (valid_gt >= 0.5) # note KITTI GT is sparse
490
+ if valid_mask.max() > 0:
491
+ s0_10_list.append(epe[valid_mask].cpu().numpy())
492
+
493
+ valid_mask = (flow_gt_speed >= 10) * (flow_gt_speed <= 40) * (valid_gt >= 0.5)
494
+ if valid_mask.max() > 0:
495
+ s10_40_list.append(epe[valid_mask].cpu().numpy())
496
+
497
+ valid_mask = (flow_gt_speed > 40) * (valid_gt >= 0.5)
498
+ if valid_mask.max() > 0:
499
+ s40plus_list.append(epe[valid_mask].cpu().numpy())
500
+
501
+ else:
502
+ valid_mask = (flow_gt_speed < 10) * (valid_gt >= 0.5) # note KITTI GT is sparse
503
+ if valid_mask.max() > 0:
504
+ s0_10_epe_sum += (epe * valid_mask).sum() / valid_mask.sum()
505
+ s0_10_valid_samples += 1
506
+
507
+ valid_mask = (flow_gt_speed >= 10) * (flow_gt_speed <= 40) * (valid_gt >= 0.5)
508
+ if valid_mask.max() > 0:
509
+ s10_40_epe_sum += (epe * valid_mask).sum() / valid_mask.sum()
510
+ s10_40_valid_samples += 1
511
+
512
+ valid_mask = (flow_gt_speed > 40) * (valid_gt >= 0.5)
513
+ if valid_mask.max() > 0:
514
+ s40plus_epe_sum += (epe * valid_mask).sum() / valid_mask.sum()
515
+ s40plus_valid_samples += 1
516
+
517
+ epe = epe.view(-1)
518
+ mag = mag.view(-1)
519
+ val = valid_gt.view(-1) >= 0.5
520
+
521
+ out = ((epe > 3.0) & ((epe / mag) > 0.05)).float()
522
+
523
+ if average_over_pixels:
524
+ epe_list.append(epe[val].cpu().numpy())
525
+ else:
526
+ epe_list.append(epe[val].mean().item())
527
+
528
+ out_list.append(out[val].cpu().numpy())
529
+
530
+ if average_over_pixels:
531
+ epe_list = np.concatenate(epe_list)
532
+ else:
533
+ epe_list = np.array(epe_list)
534
+ out_list = np.concatenate(out_list)
535
+
536
+ epe = np.mean(epe_list)
537
+ f1 = 100 * np.mean(out_list)
538
+
539
+ print("Validation KITTI EPE: %.3f, F1-all: %.3f" % (epe, f1))
540
+ results['kitti_epe'] = epe
541
+ results['kitti_f1'] = f1
542
+
543
+ if with_speed_metric:
544
+ if average_over_pixels:
545
+ s0_10 = np.mean(np.concatenate(s0_10_list))
546
+ s10_40 = np.mean(np.concatenate(s10_40_list))
547
+ s40plus = np.mean(np.concatenate(s40plus_list))
548
+ else:
549
+ s0_10 = s0_10_epe_sum / s0_10_valid_samples
550
+ s10_40 = s10_40_epe_sum / s10_40_valid_samples
551
+ s40plus = s40plus_epe_sum / s40plus_valid_samples
552
+
553
+ print("Validation KITTI s0_10: %.3f, s10_40: %.3f, s40+: %.3f" % (
554
+ s0_10,
555
+ s10_40,
556
+ s40plus))
557
+
558
+ results['kitti_s0_10'] = s0_10
559
+ results['kitti_s10_40'] = s10_40
560
+ results['kitti_s40+'] = s40plus
561
+
562
+ return results
563
+
564
+
565
+ @torch.no_grad()
566
+ def inference_on_dir(model,
567
+ inference_dir,
568
+ output_path='output',
569
+ padding_factor=8,
570
+ inference_size=None,
571
+ paired_data=False, # dir of paired testdata instead of a sequence
572
+ save_flo_flow=False, # save as .flo for quantative evaluation
573
+ attn_splits_list=None,
574
+ corr_radius_list=None,
575
+ prop_radius_list=None,
576
+ pred_bidir_flow=False,
577
+ fwd_bwd_consistency_check=False,
578
+ ):
579
+ """ Inference on a directory """
580
+ model.eval()
581
+
582
+ if fwd_bwd_consistency_check:
583
+ assert pred_bidir_flow
584
+
585
+ if not os.path.exists(output_path):
586
+ os.makedirs(output_path)
587
+
588
+ filenames = sorted(glob(inference_dir + '/*'))
589
+ print('%d images found' % len(filenames))
590
+
591
+ stride = 2 if paired_data else 1
592
+
593
+ if paired_data:
594
+ assert len(filenames) % 2 == 0
595
+
596
+ for test_id in range(0, len(filenames) - 1, stride):
597
+
598
+ image1 = frame_utils.read_gen(filenames[test_id])
599
+ image2 = frame_utils.read_gen(filenames[test_id + 1])
600
+
601
+ image1 = np.array(image1).astype(np.uint8)
602
+ image2 = np.array(image2).astype(np.uint8)
603
+
604
+ if len(image1.shape) == 2: # gray image, for example, HD1K
605
+ image1 = np.tile(image1[..., None], (1, 1, 3))
606
+ image2 = np.tile(image2[..., None], (1, 1, 3))
607
+ else:
608
+ image1 = image1[..., :3]
609
+ image2 = image2[..., :3]
610
+
611
+ image1 = torch.from_numpy(image1).permute(2, 0, 1).float()
612
+ image2 = torch.from_numpy(image2).permute(2, 0, 1).float()
613
+
614
+ if inference_size is None:
615
+ padder = InputPadder(image1.shape, padding_factor=padding_factor)
616
+ image1, image2 = padder.pad(image1[None].cuda(), image2[None].cuda())
617
+ else:
618
+ image1, image2 = image1[None].cuda(), image2[None].cuda()
619
+
620
+ # resize before inference
621
+ if inference_size is not None:
622
+ assert isinstance(inference_size, list) or isinstance(inference_size, tuple)
623
+ ori_size = image1.shape[-2:]
624
+ image1 = F.interpolate(image1, size=inference_size, mode='bilinear',
625
+ align_corners=True)
626
+ image2 = F.interpolate(image2, size=inference_size, mode='bilinear',
627
+ align_corners=True)
628
+
629
+ results_dict = model(image1, image2,
630
+ attn_splits_list=attn_splits_list,
631
+ corr_radius_list=corr_radius_list,
632
+ prop_radius_list=prop_radius_list,
633
+ pred_bidir_flow=pred_bidir_flow,
634
+ )
635
+
636
+ flow_pr = results_dict['flow_preds'][-1] # [B, 2, H, W]
637
+
638
+ # resize back
639
+ if inference_size is not None:
640
+ flow_pr = F.interpolate(flow_pr, size=ori_size, mode='bilinear',
641
+ align_corners=True)
642
+ flow_pr[:, 0] = flow_pr[:, 0] * ori_size[-1] / inference_size[-1]
643
+ flow_pr[:, 1] = flow_pr[:, 1] * ori_size[-2] / inference_size[-2]
644
+
645
+ if inference_size is None:
646
+ flow = padder.unpad(flow_pr[0]).permute(1, 2, 0).cpu().numpy() # [H, W, 2]
647
+ else:
648
+ flow = flow_pr[0].permute(1, 2, 0).cpu().numpy() # [H, W, 2]
649
+
650
+ output_file = os.path.join(output_path, os.path.basename(filenames[test_id])[:-4] + '_flow.png')
651
+
652
+ # save vis flow
653
+ save_vis_flow_tofile(flow, output_file)
654
+
655
+ # also predict backward flow
656
+ if pred_bidir_flow:
657
+ assert flow_pr.size(0) == 2 # [2, H, W, 2]
658
+
659
+ if inference_size is None:
660
+ flow_bwd = padder.unpad(flow_pr[1]).permute(1, 2, 0).cpu().numpy() # [H, W, 2]
661
+ else:
662
+ flow_bwd = flow_pr[1].permute(1, 2, 0).cpu().numpy() # [H, W, 2]
663
+
664
+ output_file = os.path.join(output_path, os.path.basename(filenames[test_id])[:-4] + '_flow_bwd.png')
665
+
666
+ # save vis flow
667
+ save_vis_flow_tofile(flow_bwd, output_file)
668
+
669
+ # forward-backward consistency check
670
+ # occlusion is 1
671
+ if fwd_bwd_consistency_check:
672
+ if inference_size is None:
673
+ fwd_flow = padder.unpad(flow_pr[0]).unsqueeze(0) # [1, 2, H, W]
674
+ bwd_flow = padder.unpad(flow_pr[1]).unsqueeze(0) # [1, 2, H, W]
675
+ else:
676
+ fwd_flow = flow_pr[0].unsqueeze(0)
677
+ bwd_flow = flow_pr[1].unsqueeze(0)
678
+
679
+ fwd_occ, bwd_occ = forward_backward_consistency_check(fwd_flow, bwd_flow) # [1, H, W] float
680
+
681
+ fwd_occ_file = os.path.join(output_path, os.path.basename(filenames[test_id])[:-4] + '_occ.png')
682
+ bwd_occ_file = os.path.join(output_path, os.path.basename(filenames[test_id])[:-4] + '_occ_bwd.png')
683
+
684
+ Image.fromarray((fwd_occ[0].cpu().numpy() * 255.).astype(np.uint8)).save(fwd_occ_file)
685
+ Image.fromarray((bwd_occ[0].cpu().numpy() * 255.).astype(np.uint8)).save(bwd_occ_file)
686
+
687
+ if save_flo_flow:
688
+ output_file = os.path.join(output_path, os.path.basename(filenames[test_id])[:-4] + '_pred.flo')
689
+ frame_utils.writeFlow(output_file, flow)
basicsr/archs/gmflow/gmflow/__init__.py ADDED
File without changes
basicsr/archs/gmflow/gmflow/backbone.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch.nn as nn
2
+
3
+ from .trident_conv import MultiScaleTridentConv
4
+
5
+
6
+ class ResidualBlock(nn.Module):
7
+ def __init__(self, in_planes, planes, norm_layer=nn.InstanceNorm2d, stride=1, dilation=1,
8
+ ):
9
+ super(ResidualBlock, self).__init__()
10
+
11
+ self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3,
12
+ dilation=dilation, padding=dilation, stride=stride, bias=False)
13
+ self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
14
+ dilation=dilation, padding=dilation, bias=False)
15
+ self.relu = nn.ReLU(inplace=True)
16
+
17
+ self.norm1 = norm_layer(planes)
18
+ self.norm2 = norm_layer(planes)
19
+ if not stride == 1 or in_planes != planes:
20
+ self.norm3 = norm_layer(planes)
21
+
22
+ if stride == 1 and in_planes == planes:
23
+ self.downsample = None
24
+ else:
25
+ self.downsample = nn.Sequential(
26
+ nn.Conv2d(in_planes, planes, kernel_size=1, stride=stride), self.norm3)
27
+
28
+ def forward(self, x):
29
+ y = x
30
+ y = self.relu(self.norm1(self.conv1(y)))
31
+ y = self.relu(self.norm2(self.conv2(y)))
32
+
33
+ if self.downsample is not None:
34
+ x = self.downsample(x)
35
+
36
+ return self.relu(x + y)
37
+
38
+
39
+ class CNNEncoder(nn.Module):
40
+ def __init__(self, output_dim=128,
41
+ norm_layer=nn.InstanceNorm2d,
42
+ num_output_scales=1,
43
+ **kwargs,
44
+ ):
45
+ super(CNNEncoder, self).__init__()
46
+ self.num_branch = num_output_scales
47
+
48
+ feature_dims = [64, 96, 128]
49
+
50
+ self.conv1 = nn.Conv2d(3, feature_dims[0], kernel_size=7, stride=2, padding=3, bias=False) # 1/2
51
+ self.norm1 = norm_layer(feature_dims[0])
52
+ self.relu1 = nn.ReLU(inplace=True)
53
+
54
+ self.in_planes = feature_dims[0]
55
+ self.layer1 = self._make_layer(feature_dims[0], stride=1, norm_layer=norm_layer) # 1/2
56
+ self.layer2 = self._make_layer(feature_dims[1], stride=2, norm_layer=norm_layer) # 1/4
57
+
58
+ # highest resolution 1/4 or 1/8
59
+ stride = 2 if num_output_scales == 1 else 1
60
+ self.layer3 = self._make_layer(feature_dims[2], stride=stride,
61
+ norm_layer=norm_layer,
62
+ ) # 1/4 or 1/8
63
+
64
+ self.conv2 = nn.Conv2d(feature_dims[2], output_dim, 1, 1, 0)
65
+
66
+ if self.num_branch > 1:
67
+ if self.num_branch == 4:
68
+ strides = (1, 2, 4, 8)
69
+ elif self.num_branch == 3:
70
+ strides = (1, 2, 4)
71
+ elif self.num_branch == 2:
72
+ strides = (1, 2)
73
+ else:
74
+ raise ValueError
75
+
76
+ self.trident_conv = MultiScaleTridentConv(output_dim, output_dim,
77
+ kernel_size=3,
78
+ strides=strides,
79
+ paddings=1,
80
+ num_branch=self.num_branch,
81
+ )
82
+
83
+ for m in self.modules():
84
+ if isinstance(m, nn.Conv2d):
85
+ nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
86
+ elif isinstance(m, (nn.BatchNorm2d, nn.InstanceNorm2d, nn.GroupNorm)):
87
+ if m.weight is not None:
88
+ nn.init.constant_(m.weight, 1)
89
+ if m.bias is not None:
90
+ nn.init.constant_(m.bias, 0)
91
+
92
+ def _make_layer(self, dim, stride=1, dilation=1, norm_layer=nn.InstanceNorm2d):
93
+ layer1 = ResidualBlock(self.in_planes, dim, norm_layer=norm_layer, stride=stride, dilation=dilation)
94
+ layer2 = ResidualBlock(dim, dim, norm_layer=norm_layer, stride=1, dilation=dilation)
95
+
96
+ layers = (layer1, layer2)
97
+
98
+ self.in_planes = dim
99
+ return nn.Sequential(*layers)
100
+
101
+ def forward(self, x):
102
+ x = self.conv1(x)
103
+ x = self.norm1(x)
104
+ x = self.relu1(x)
105
+
106
+ x = self.layer1(x) # 1/2
107
+ x = self.layer2(x) # 1/4
108
+ x = self.layer3(x) # 1/8 or 1/4
109
+
110
+ x = self.conv2(x)
111
+
112
+ if self.num_branch > 1:
113
+ out = self.trident_conv([x] * self.num_branch) # high to low res
114
+ else:
115
+ out = [x]
116
+
117
+ return out
basicsr/archs/gmflow/gmflow/geometry.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn.functional as F
3
+
4
+
5
+ def coords_grid(b, h, w, homogeneous=False, device=None):
6
+ y, x = torch.meshgrid(torch.arange(h), torch.arange(w)) # [H, W]
7
+
8
+ stacks = [x, y]
9
+
10
+ if homogeneous:
11
+ ones = torch.ones_like(x) # [H, W]
12
+ stacks.append(ones)
13
+
14
+ grid = torch.stack(stacks, dim=0).float() # [2, H, W] or [3, H, W]
15
+
16
+ grid = grid[None].repeat(b, 1, 1, 1) # [B, 2, H, W] or [B, 3, H, W]
17
+
18
+ if device is not None:
19
+ grid = grid.to(device)
20
+
21
+ return grid
22
+
23
+
24
+ def generate_window_grid(h_min, h_max, w_min, w_max, len_h, len_w, device=None):
25
+ assert device is not None
26
+
27
+ x, y = torch.meshgrid([torch.linspace(w_min, w_max, len_w, device=device),
28
+ torch.linspace(h_min, h_max, len_h, device=device)],
29
+ )
30
+ grid = torch.stack((x, y), -1).transpose(0, 1).float() # [H, W, 2]
31
+
32
+ return grid
33
+
34
+
35
+ def normalize_coords(coords, h, w):
36
+ # coords: [B, H, W, 2]
37
+ c = torch.Tensor([(w - 1) / 2., (h - 1) / 2.]).float().to(coords.device)
38
+ return (coords - c) / c # [-1, 1]
39
+
40
+
41
+ def bilinear_sample(img, sample_coords, mode='bilinear', padding_mode='zeros', return_mask=False):
42
+ # img: [B, C, H, W]
43
+ # sample_coords: [B, 2, H, W] in image scale
44
+ if sample_coords.size(1) != 2: # [B, H, W, 2]
45
+ sample_coords = sample_coords.permute(0, 3, 1, 2)
46
+
47
+ b, _, h, w = sample_coords.shape
48
+
49
+ # Normalize to [-1, 1]
50
+ x_grid = 2 * sample_coords[:, 0] / (w - 1) - 1
51
+ y_grid = 2 * sample_coords[:, 1] / (h - 1) - 1
52
+
53
+ grid = torch.stack([x_grid, y_grid], dim=-1) # [B, H, W, 2]
54
+
55
+ img = F.grid_sample(img, grid, mode=mode, padding_mode=padding_mode, align_corners=True)
56
+
57
+ if return_mask:
58
+ mask = (x_grid >= -1) & (y_grid >= -1) & (x_grid <= 1) & (y_grid <= 1) # [B, H, W]
59
+
60
+ return img, mask
61
+
62
+ return img
63
+
64
+
65
+ def flow_warp(feature, flow, mask=False, padding_mode='zeros'):
66
+ b, c, h, w = feature.size()
67
+ assert flow.size(1) == 2
68
+
69
+ grid = coords_grid(b, h, w).to(flow.device) + flow # [B, 2, H, W]
70
+
71
+ return bilinear_sample(feature, grid, padding_mode=padding_mode,
72
+ return_mask=mask)
73
+
74
+
75
+ def forward_backward_consistency_check(fwd_flow, bwd_flow,
76
+ alpha=0.01,
77
+ beta=0.5
78
+ ):
79
+ # fwd_flow, bwd_flow: [B, 2, H, W]
80
+ # alpha and beta values are following UnFlow (https://arxiv.org/abs/1711.07837)
81
+ assert fwd_flow.dim() == 4 and bwd_flow.dim() == 4
82
+ assert fwd_flow.size(1) == 2 and bwd_flow.size(1) == 2
83
+ flow_mag = torch.norm(fwd_flow, dim=1) + torch.norm(bwd_flow, dim=1) # [B, H, W]
84
+
85
+ warped_bwd_flow = flow_warp(bwd_flow, fwd_flow) # [B, 2, H, W]
86
+ warped_fwd_flow = flow_warp(fwd_flow, bwd_flow) # [B, 2, H, W]
87
+
88
+ diff_fwd = torch.norm(fwd_flow + warped_bwd_flow, dim=1) # [B, H, W]
89
+ diff_bwd = torch.norm(bwd_flow + warped_fwd_flow, dim=1)
90
+
91
+ threshold = alpha * flow_mag + beta
92
+
93
+ fwd_occ = (diff_fwd > threshold).float() # [B, H, W]
94
+ bwd_occ = (diff_bwd > threshold).float()
95
+
96
+ return fwd_occ, bwd_occ
basicsr/archs/gmflow/gmflow/gmflow.py ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ from .backbone import CNNEncoder
6
+ from .transformer import FeatureTransformer, FeatureFlowAttention
7
+ from .matching import global_correlation_softmax, local_correlation_softmax
8
+ from .geometry import flow_warp
9
+ from .utils import normalize_img, feature_add_position
10
+
11
+
12
+ class GMFlow(nn.Module):
13
+ def __init__(self,
14
+ num_scales=1,
15
+ upsample_factor=8,
16
+ feature_channels=128,
17
+ attention_type='swin',
18
+ num_transformer_layers=6,
19
+ ffn_dim_expansion=4,
20
+ num_head=1,
21
+ **kwargs,
22
+ ):
23
+ super(GMFlow, self).__init__()
24
+
25
+ self.num_scales = num_scales
26
+ self.feature_channels = feature_channels
27
+ self.upsample_factor = upsample_factor
28
+ self.attention_type = attention_type
29
+ self.num_transformer_layers = num_transformer_layers
30
+
31
+ # CNN backbone
32
+ self.backbone = CNNEncoder(output_dim=feature_channels, num_output_scales=num_scales)
33
+
34
+ # Transformer
35
+ self.transformer = FeatureTransformer(num_layers=num_transformer_layers,
36
+ d_model=feature_channels,
37
+ nhead=num_head,
38
+ attention_type=attention_type,
39
+ ffn_dim_expansion=ffn_dim_expansion,
40
+ )
41
+
42
+ # flow propagation with self-attn
43
+ self.feature_flow_attn = FeatureFlowAttention(in_channels=feature_channels)
44
+
45
+ # convex upsampling: concat feature0 and flow as input
46
+ self.upsampler = nn.Sequential(nn.Conv2d(2 + feature_channels, 256, 3, 1, 1),
47
+ nn.ReLU(inplace=True),
48
+ nn.Conv2d(256, upsample_factor ** 2 * 9, 1, 1, 0))
49
+
50
+ def extract_feature(self, img0, img1):
51
+ concat = torch.cat((img0, img1), dim=0) # [2B, C, H, W]
52
+ features = self.backbone(concat) # list of [2B, C, H, W], resolution from high to low
53
+
54
+ # reverse: resolution from low to high
55
+ features = features[::-1]
56
+
57
+ feature0, feature1 = [], []
58
+
59
+ for i in range(len(features)):
60
+ feature = features[i]
61
+ chunks = torch.chunk(feature, 2, 0) # tuple
62
+ feature0.append(chunks[0])
63
+ feature1.append(chunks[1])
64
+
65
+ return feature0, feature1
66
+
67
+ def upsample_flow(self, flow, feature, bilinear=False, upsample_factor=8,
68
+ ):
69
+ if bilinear:
70
+ up_flow = F.interpolate(flow, scale_factor=upsample_factor,
71
+ mode='bilinear', align_corners=True) * upsample_factor
72
+
73
+ else:
74
+ # convex upsampling
75
+ concat = torch.cat((flow, feature), dim=1)
76
+
77
+ mask = self.upsampler(concat)
78
+ b, flow_channel, h, w = flow.shape
79
+ mask = mask.view(b, 1, 9, self.upsample_factor, self.upsample_factor, h, w) # [B, 1, 9, K, K, H, W]
80
+ mask = torch.softmax(mask, dim=2)
81
+
82
+ up_flow = F.unfold(self.upsample_factor * flow, [3, 3], padding=1)
83
+ up_flow = up_flow.view(b, flow_channel, 9, 1, 1, h, w) # [B, 2, 9, 1, 1, H, W]
84
+
85
+ up_flow = torch.sum(mask * up_flow, dim=2) # [B, 2, K, K, H, W]
86
+ up_flow = up_flow.permute(0, 1, 4, 2, 5, 3) # [B, 2, K, H, K, W]
87
+ up_flow = up_flow.reshape(b, flow_channel, self.upsample_factor * h,
88
+ self.upsample_factor * w) # [B, 2, K*H, K*W]
89
+
90
+ return up_flow
91
+
92
+ def forward(self, img0, img1,
93
+ attn_splits_list=None,
94
+ corr_radius_list=None,
95
+ prop_radius_list=None,
96
+ pred_bidir_flow=False,
97
+ **kwargs,
98
+ ):
99
+
100
+ results_dict = {}
101
+ flow_preds = []
102
+
103
+ img0, img1 = normalize_img(img0, img1) # [B, 3, H, W]
104
+
105
+ # resolution low to high
106
+ feature0_list, feature1_list = self.extract_feature(img0, img1) # list of features
107
+
108
+ flow = None
109
+
110
+ assert len(attn_splits_list) == len(corr_radius_list) == len(prop_radius_list) == self.num_scales
111
+
112
+ for scale_idx in range(self.num_scales):
113
+ feature0, feature1 = feature0_list[scale_idx], feature1_list[scale_idx]
114
+
115
+ if pred_bidir_flow and scale_idx > 0:
116
+ # predicting bidirectional flow with refinement
117
+ feature0, feature1 = torch.cat((feature0, feature1), dim=0), torch.cat((feature1, feature0), dim=0)
118
+
119
+ upsample_factor = self.upsample_factor * (2 ** (self.num_scales - 1 - scale_idx))
120
+
121
+ if scale_idx > 0:
122
+ flow = F.interpolate(flow, scale_factor=2, mode='bilinear', align_corners=True) * 2
123
+
124
+ if flow is not None:
125
+ flow = flow.detach()
126
+ feature1 = flow_warp(feature1, flow) # [B, C, H, W]
127
+
128
+ attn_splits = attn_splits_list[scale_idx]
129
+ corr_radius = corr_radius_list[scale_idx]
130
+ prop_radius = prop_radius_list[scale_idx]
131
+
132
+ # add position to features
133
+ feature0, feature1 = feature_add_position(feature0, feature1, attn_splits, self.feature_channels)
134
+
135
+ # Transformer
136
+ feature0, feature1 = self.transformer(feature0, feature1, attn_num_splits=attn_splits)
137
+
138
+ # correlation and softmax
139
+ if corr_radius == -1: # global matching
140
+ flow_pred = global_correlation_softmax(feature0, feature1, pred_bidir_flow)[0]
141
+ else: # local matching
142
+ flow_pred = local_correlation_softmax(feature0, feature1, corr_radius)[0]
143
+
144
+ # flow or residual flow
145
+ flow = flow + flow_pred if flow is not None else flow_pred
146
+
147
+ # upsample to the original resolution for supervison
148
+ if self.training: # only need to upsample intermediate flow predictions at training time
149
+ flow_bilinear = self.upsample_flow(flow, None, bilinear=True, upsample_factor=upsample_factor)
150
+ flow_preds.append(flow_bilinear)
151
+
152
+ # flow propagation with self-attn
153
+ if pred_bidir_flow and scale_idx == 0:
154
+ feature0 = torch.cat((feature0, feature1), dim=0) # [2*B, C, H, W] for propagation
155
+ flow = self.feature_flow_attn(feature0, flow.detach(),
156
+ local_window_attn=prop_radius > 0,
157
+ local_window_radius=prop_radius)
158
+
159
+ # bilinear upsampling at training time except the last one
160
+ if self.training and scale_idx < self.num_scales - 1:
161
+ flow_up = self.upsample_flow(flow, feature0, bilinear=True, upsample_factor=upsample_factor)
162
+ flow_preds.append(flow_up)
163
+
164
+ if scale_idx == self.num_scales - 1:
165
+ flow_up = self.upsample_flow(flow, feature0)
166
+ flow_preds.append(flow_up)
167
+
168
+ results_dict.update({'flow_preds': flow_preds})
169
+
170
+ return results_dict
basicsr/archs/gmflow/gmflow/matching.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn.functional as F
3
+
4
+ from .geometry import coords_grid, generate_window_grid, normalize_coords
5
+
6
+
7
+ def global_correlation_softmax(feature0, feature1,
8
+ pred_bidir_flow=False,
9
+ ):
10
+ # global correlation
11
+ b, c, h, w = feature0.shape
12
+ feature0 = feature0.view(b, c, -1).permute(0, 2, 1) # [B, H*W, C]
13
+ feature1 = feature1.view(b, c, -1) # [B, C, H*W]
14
+
15
+ correlation = torch.matmul(feature0, feature1).view(b, h, w, h, w) / (c ** 0.5) # [B, H, W, H, W]
16
+
17
+ # flow from softmax
18
+ init_grid = coords_grid(b, h, w).to(correlation.device) # [B, 2, H, W]
19
+ grid = init_grid.view(b, 2, -1).permute(0, 2, 1) # [B, H*W, 2]
20
+
21
+ correlation = correlation.view(b, h * w, h * w) # [B, H*W, H*W]
22
+
23
+ if pred_bidir_flow:
24
+ correlation = torch.cat((correlation, correlation.permute(0, 2, 1)), dim=0) # [2*B, H*W, H*W]
25
+ init_grid = init_grid.repeat(2, 1, 1, 1) # [2*B, 2, H, W]
26
+ grid = grid.repeat(2, 1, 1) # [2*B, H*W, 2]
27
+ b = b * 2
28
+
29
+ prob = F.softmax(correlation, dim=-1) # [B, H*W, H*W]
30
+
31
+ correspondence = torch.matmul(prob, grid).view(b, h, w, 2).permute(0, 3, 1, 2) # [B, 2, H, W]
32
+
33
+ # when predicting bidirectional flow, flow is the concatenation of forward flow and backward flow
34
+ flow = correspondence - init_grid
35
+
36
+ return flow, prob
37
+
38
+
39
+ def local_correlation_softmax(feature0, feature1, local_radius,
40
+ padding_mode='zeros',
41
+ ):
42
+ b, c, h, w = feature0.size()
43
+ coords_init = coords_grid(b, h, w).to(feature0.device) # [B, 2, H, W]
44
+ coords = coords_init.view(b, 2, -1).permute(0, 2, 1) # [B, H*W, 2]
45
+
46
+ local_h = 2 * local_radius + 1
47
+ local_w = 2 * local_radius + 1
48
+
49
+ window_grid = generate_window_grid(-local_radius, local_radius,
50
+ -local_radius, local_radius,
51
+ local_h, local_w, device=feature0.device) # [2R+1, 2R+1, 2]
52
+ window_grid = window_grid.reshape(-1, 2).repeat(b, 1, 1, 1) # [B, 1, (2R+1)^2, 2]
53
+ sample_coords = coords.unsqueeze(-2) + window_grid # [B, H*W, (2R+1)^2, 2]
54
+
55
+ sample_coords_softmax = sample_coords
56
+
57
+ # exclude coords that are out of image space
58
+ valid_x = (sample_coords[:, :, :, 0] >= 0) & (sample_coords[:, :, :, 0] < w) # [B, H*W, (2R+1)^2]
59
+ valid_y = (sample_coords[:, :, :, 1] >= 0) & (sample_coords[:, :, :, 1] < h) # [B, H*W, (2R+1)^2]
60
+
61
+ valid = valid_x & valid_y # [B, H*W, (2R+1)^2], used to mask out invalid values when softmax
62
+
63
+ # normalize coordinates to [-1, 1]
64
+ sample_coords_norm = normalize_coords(sample_coords, h, w) # [-1, 1]
65
+ window_feature = F.grid_sample(feature1, sample_coords_norm,
66
+ padding_mode=padding_mode, align_corners=True
67
+ ).permute(0, 2, 1, 3) # [B, H*W, C, (2R+1)^2]
68
+ feature0_view = feature0.permute(0, 2, 3, 1).view(b, h * w, 1, c) # [B, H*W, 1, C]
69
+
70
+ corr = torch.matmul(feature0_view, window_feature).view(b, h * w, -1) / (c ** 0.5) # [B, H*W, (2R+1)^2]
71
+
72
+ # mask invalid locations
73
+ corr[~valid] = -1e9
74
+
75
+ prob = F.softmax(corr, -1) # [B, H*W, (2R+1)^2]
76
+
77
+ correspondence = torch.matmul(prob.unsqueeze(-2), sample_coords_softmax).squeeze(-2).view(
78
+ b, h, w, 2).permute(0, 3, 1, 2) # [B, 2, H, W]
79
+
80
+ flow = correspondence - coords_init
81
+ match_prob = prob
82
+
83
+ return flow, match_prob
basicsr/archs/gmflow/gmflow/position.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2
+ # https://github.com/facebookresearch/detr/blob/main/models/position_encoding.py
3
+
4
+ import torch
5
+ import torch.nn as nn
6
+ import math
7
+
8
+
9
+ class PositionEmbeddingSine(nn.Module):
10
+ """
11
+ This is a more standard version of the position embedding, very similar to the one
12
+ used by the Attention is all you need paper, generalized to work on images.
13
+ """
14
+
15
+ def __init__(self, num_pos_feats=64, temperature=10000, normalize=True, scale=None):
16
+ super().__init__()
17
+ self.num_pos_feats = num_pos_feats
18
+ self.temperature = temperature
19
+ self.normalize = normalize
20
+ if scale is not None and normalize is False:
21
+ raise ValueError("normalize should be True if scale is passed")
22
+ if scale is None:
23
+ scale = 2 * math.pi
24
+ self.scale = scale
25
+
26
+ def forward(self, x):
27
+ # x = tensor_list.tensors # [B, C, H, W]
28
+ # mask = tensor_list.mask # [B, H, W], input with padding, valid as 0
29
+ b, c, h, w = x.size()
30
+ mask = torch.ones((b, h, w), device=x.device) # [B, H, W]
31
+ y_embed = mask.cumsum(1, dtype=torch.float32)
32
+ x_embed = mask.cumsum(2, dtype=torch.float32)
33
+ if self.normalize:
34
+ eps = 1e-6
35
+ y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
36
+ x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
37
+
38
+ dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device)
39
+ dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats)
40
+
41
+ pos_x = x_embed[:, :, :, None] / dim_t
42
+ pos_y = y_embed[:, :, :, None] / dim_t
43
+ pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)
44
+ pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)
45
+ pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)
46
+ return pos
basicsr/archs/gmflow/gmflow/transformer.py ADDED
@@ -0,0 +1,409 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ from .utils import split_feature, merge_splits
6
+
7
+
8
+ def single_head_full_attention(q, k, v):
9
+ # q, k, v: [B, L, C]
10
+ assert q.dim() == k.dim() == v.dim() == 3
11
+
12
+ scores = torch.matmul(q, k.permute(0, 2, 1)) / (q.size(2) ** .5) # [B, L, L]
13
+ attn = torch.softmax(scores, dim=2) # [B, L, L]
14
+ out = torch.matmul(attn, v) # [B, L, C]
15
+
16
+ return out
17
+
18
+
19
+ def generate_shift_window_attn_mask(input_resolution, window_size_h, window_size_w,
20
+ shift_size_h, shift_size_w, device=torch.device('cuda')):
21
+ # Ref: https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py
22
+ # calculate attention mask for SW-MSA
23
+ h, w = input_resolution
24
+ img_mask = torch.zeros((1, h, w, 1)).to(device) # 1 H W 1
25
+ h_slices = (slice(0, -window_size_h),
26
+ slice(-window_size_h, -shift_size_h),
27
+ slice(-shift_size_h, None))
28
+ w_slices = (slice(0, -window_size_w),
29
+ slice(-window_size_w, -shift_size_w),
30
+ slice(-shift_size_w, None))
31
+ cnt = 0
32
+ for h in h_slices:
33
+ for w in w_slices:
34
+ img_mask[:, h, w, :] = cnt
35
+ cnt += 1
36
+
37
+ mask_windows = split_feature(img_mask, num_splits=input_resolution[-1] // window_size_w, channel_last=True)
38
+
39
+ mask_windows = mask_windows.view(-1, window_size_h * window_size_w)
40
+ attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2)
41
+ attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0))
42
+
43
+ return attn_mask
44
+
45
+
46
+ def single_head_split_window_attention(q, k, v,
47
+ num_splits=1,
48
+ with_shift=False,
49
+ h=None,
50
+ w=None,
51
+ attn_mask=None,
52
+ ):
53
+ # Ref: https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer.py
54
+ # q, k, v: [B, L, C]
55
+ assert q.dim() == k.dim() == v.dim() == 3
56
+
57
+ assert h is not None and w is not None
58
+ assert q.size(1) == h * w
59
+
60
+ b, _, c = q.size()
61
+
62
+ b_new = b * num_splits * num_splits
63
+
64
+ window_size_h = h // num_splits
65
+ window_size_w = w // num_splits
66
+
67
+ q = q.view(b, h, w, c) # [B, H, W, C]
68
+ k = k.view(b, h, w, c)
69
+ v = v.view(b, h, w, c)
70
+
71
+ scale_factor = c ** 0.5
72
+
73
+ if with_shift:
74
+ assert attn_mask is not None # compute once
75
+ shift_size_h = window_size_h // 2
76
+ shift_size_w = window_size_w // 2
77
+
78
+ q = torch.roll(q, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2))
79
+ k = torch.roll(k, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2))
80
+ v = torch.roll(v, shifts=(-shift_size_h, -shift_size_w), dims=(1, 2))
81
+
82
+ q = split_feature(q, num_splits=num_splits, channel_last=True) # [B*K*K, H/K, W/K, C]
83
+ k = split_feature(k, num_splits=num_splits, channel_last=True)
84
+ v = split_feature(v, num_splits=num_splits, channel_last=True)
85
+
86
+ scores = torch.matmul(q.view(b_new, -1, c), k.view(b_new, -1, c).permute(0, 2, 1)
87
+ ) / scale_factor # [B*K*K, H/K*W/K, H/K*W/K]
88
+
89
+ if with_shift:
90
+ scores += attn_mask.repeat(b, 1, 1)
91
+
92
+ attn = torch.softmax(scores, dim=-1)
93
+
94
+ out = torch.matmul(attn, v.view(b_new, -1, c)) # [B*K*K, H/K*W/K, C]
95
+
96
+ out = merge_splits(out.view(b_new, h // num_splits, w // num_splits, c),
97
+ num_splits=num_splits, channel_last=True) # [B, H, W, C]
98
+
99
+ # shift back
100
+ if with_shift:
101
+ out = torch.roll(out, shifts=(shift_size_h, shift_size_w), dims=(1, 2))
102
+
103
+ out = out.view(b, -1, c)
104
+
105
+ return out
106
+
107
+
108
+ class TransformerLayer(nn.Module):
109
+ def __init__(self,
110
+ d_model=256,
111
+ nhead=1,
112
+ attention_type='swin',
113
+ no_ffn=False,
114
+ ffn_dim_expansion=4,
115
+ with_shift=False,
116
+ **kwargs,
117
+ ):
118
+ super(TransformerLayer, self).__init__()
119
+
120
+ self.dim = d_model
121
+ self.nhead = nhead
122
+ self.attention_type = attention_type
123
+ self.no_ffn = no_ffn
124
+
125
+ self.with_shift = with_shift
126
+
127
+ # multi-head attention
128
+ self.q_proj = nn.Linear(d_model, d_model, bias=False)
129
+ self.k_proj = nn.Linear(d_model, d_model, bias=False)
130
+ self.v_proj = nn.Linear(d_model, d_model, bias=False)
131
+
132
+ self.merge = nn.Linear(d_model, d_model, bias=False)
133
+
134
+ self.norm1 = nn.LayerNorm(d_model)
135
+
136
+ # no ffn after self-attn, with ffn after cross-attn
137
+ if not self.no_ffn:
138
+ in_channels = d_model * 2
139
+ self.mlp = nn.Sequential(
140
+ nn.Linear(in_channels, in_channels * ffn_dim_expansion, bias=False),
141
+ nn.GELU(),
142
+ nn.Linear(in_channels * ffn_dim_expansion, d_model, bias=False),
143
+ )
144
+
145
+ self.norm2 = nn.LayerNorm(d_model)
146
+
147
+ def forward(self, source, target,
148
+ height=None,
149
+ width=None,
150
+ shifted_window_attn_mask=None,
151
+ attn_num_splits=None,
152
+ **kwargs,
153
+ ):
154
+ # source, target: [B, L, C]
155
+ query, key, value = source, target, target
156
+
157
+ # single-head attention
158
+ query = self.q_proj(query) # [B, L, C]
159
+ key = self.k_proj(key) # [B, L, C]
160
+ value = self.v_proj(value) # [B, L, C]
161
+
162
+ if self.attention_type == 'swin' and attn_num_splits > 1:
163
+ if self.nhead > 1:
164
+ # we observe that multihead attention slows down the speed and increases the memory consumption
165
+ # without bringing obvious performance gains and thus the implementation is removed
166
+ raise NotImplementedError
167
+ else:
168
+ message = single_head_split_window_attention(query, key, value,
169
+ num_splits=attn_num_splits,
170
+ with_shift=self.with_shift,
171
+ h=height,
172
+ w=width,
173
+ attn_mask=shifted_window_attn_mask,
174
+ )
175
+ else:
176
+ message = single_head_full_attention(query, key, value) # [B, L, C]
177
+
178
+ message = self.merge(message) # [B, L, C]
179
+ message = self.norm1(message)
180
+
181
+ if not self.no_ffn:
182
+ message = self.mlp(torch.cat([source, message], dim=-1))
183
+ message = self.norm2(message)
184
+
185
+ return source + message
186
+
187
+
188
+ class TransformerBlock(nn.Module):
189
+ """self attention + cross attention + FFN"""
190
+
191
+ def __init__(self,
192
+ d_model=256,
193
+ nhead=1,
194
+ attention_type='swin',
195
+ ffn_dim_expansion=4,
196
+ with_shift=False,
197
+ **kwargs,
198
+ ):
199
+ super(TransformerBlock, self).__init__()
200
+
201
+ self.self_attn = TransformerLayer(d_model=d_model,
202
+ nhead=nhead,
203
+ attention_type=attention_type,
204
+ no_ffn=True,
205
+ ffn_dim_expansion=ffn_dim_expansion,
206
+ with_shift=with_shift,
207
+ )
208
+
209
+ self.cross_attn_ffn = TransformerLayer(d_model=d_model,
210
+ nhead=nhead,
211
+ attention_type=attention_type,
212
+ ffn_dim_expansion=ffn_dim_expansion,
213
+ with_shift=with_shift,
214
+ )
215
+
216
+ def forward(self, source, target,
217
+ height=None,
218
+ width=None,
219
+ shifted_window_attn_mask=None,
220
+ attn_num_splits=None,
221
+ **kwargs,
222
+ ):
223
+ # source, target: [B, L, C]
224
+
225
+ # self attention
226
+ source = self.self_attn(source, source,
227
+ height=height,
228
+ width=width,
229
+ shifted_window_attn_mask=shifted_window_attn_mask,
230
+ attn_num_splits=attn_num_splits,
231
+ )
232
+
233
+ # cross attention and ffn
234
+ source = self.cross_attn_ffn(source, target,
235
+ height=height,
236
+ width=width,
237
+ shifted_window_attn_mask=shifted_window_attn_mask,
238
+ attn_num_splits=attn_num_splits,
239
+ )
240
+
241
+ return source
242
+
243
+
244
+ class FeatureTransformer(nn.Module):
245
+ def __init__(self,
246
+ num_layers=6,
247
+ d_model=128,
248
+ nhead=1,
249
+ attention_type='swin',
250
+ ffn_dim_expansion=4,
251
+ **kwargs,
252
+ ):
253
+ super(FeatureTransformer, self).__init__()
254
+
255
+ self.attention_type = attention_type
256
+
257
+ self.d_model = d_model
258
+ self.nhead = nhead
259
+
260
+ self.layers = nn.ModuleList([
261
+ TransformerBlock(d_model=d_model,
262
+ nhead=nhead,
263
+ attention_type=attention_type,
264
+ ffn_dim_expansion=ffn_dim_expansion,
265
+ with_shift=True if attention_type == 'swin' and i % 2 == 1 else False,
266
+ )
267
+ for i in range(num_layers)])
268
+
269
+ for p in self.parameters():
270
+ if p.dim() > 1:
271
+ nn.init.xavier_uniform_(p)
272
+
273
+ def forward(self, feature0, feature1,
274
+ attn_num_splits=None,
275
+ **kwargs,
276
+ ):
277
+
278
+ b, c, h, w = feature0.shape
279
+ assert self.d_model == c
280
+
281
+ feature0 = feature0.flatten(-2).permute(0, 2, 1) # [B, H*W, C]
282
+ feature1 = feature1.flatten(-2).permute(0, 2, 1) # [B, H*W, C]
283
+
284
+ if self.attention_type == 'swin' and attn_num_splits > 1:
285
+ # global and refine use different number of splits
286
+ window_size_h = h // attn_num_splits
287
+ window_size_w = w // attn_num_splits
288
+
289
+ # compute attn mask once
290
+ shifted_window_attn_mask = generate_shift_window_attn_mask(
291
+ input_resolution=(h, w),
292
+ window_size_h=window_size_h,
293
+ window_size_w=window_size_w,
294
+ shift_size_h=window_size_h // 2,
295
+ shift_size_w=window_size_w // 2,
296
+ device=feature0.device,
297
+ ) # [K*K, H/K*W/K, H/K*W/K]
298
+ else:
299
+ shifted_window_attn_mask = None
300
+
301
+ # concat feature0 and feature1 in batch dimension to compute in parallel
302
+ concat0 = torch.cat((feature0, feature1), dim=0) # [2B, H*W, C]
303
+ concat1 = torch.cat((feature1, feature0), dim=0) # [2B, H*W, C]
304
+
305
+ for layer in self.layers:
306
+ concat0 = layer(concat0, concat1,
307
+ height=h,
308
+ width=w,
309
+ shifted_window_attn_mask=shifted_window_attn_mask,
310
+ attn_num_splits=attn_num_splits,
311
+ )
312
+
313
+ # update feature1
314
+ concat1 = torch.cat(concat0.chunk(chunks=2, dim=0)[::-1], dim=0)
315
+
316
+ feature0, feature1 = concat0.chunk(chunks=2, dim=0) # [B, H*W, C]
317
+
318
+ # reshape back
319
+ feature0 = feature0.view(b, h, w, c).permute(0, 3, 1, 2).contiguous() # [B, C, H, W]
320
+ feature1 = feature1.view(b, h, w, c).permute(0, 3, 1, 2).contiguous() # [B, C, H, W]
321
+
322
+ return feature0, feature1
323
+
324
+
325
+ class FeatureFlowAttention(nn.Module):
326
+ """
327
+ flow propagation with self-attention on feature
328
+ query: feature0, key: feature0, value: flow
329
+ """
330
+
331
+ def __init__(self, in_channels,
332
+ **kwargs,
333
+ ):
334
+ super(FeatureFlowAttention, self).__init__()
335
+
336
+ self.q_proj = nn.Linear(in_channels, in_channels)
337
+ self.k_proj = nn.Linear(in_channels, in_channels)
338
+
339
+ for p in self.parameters():
340
+ if p.dim() > 1:
341
+ nn.init.xavier_uniform_(p)
342
+
343
+ def forward(self, feature0, flow,
344
+ local_window_attn=False,
345
+ local_window_radius=1,
346
+ **kwargs,
347
+ ):
348
+ # q, k: feature [B, C, H, W], v: flow [B, 2, H, W]
349
+ if local_window_attn:
350
+ return self.forward_local_window_attn(feature0, flow,
351
+ local_window_radius=local_window_radius)
352
+
353
+ b, c, h, w = feature0.size()
354
+
355
+ query = feature0.view(b, c, h * w).permute(0, 2, 1) # [B, H*W, C]
356
+
357
+ # a note: the ``correct'' implementation should be:
358
+ # ``query = self.q_proj(query), key = self.k_proj(query)''
359
+ # this problem is observed while cleaning up the code
360
+ # however, this doesn't affect the performance since the projection is a linear operation,
361
+ # thus the two projection matrices for key can be merged
362
+ # so I just leave it as is in order to not re-train all models :)
363
+ query = self.q_proj(query) # [B, H*W, C]
364
+ key = self.k_proj(query) # [B, H*W, C]
365
+
366
+ value = flow.view(b, flow.size(1), h * w).permute(0, 2, 1) # [B, H*W, 2]
367
+
368
+ scores = torch.matmul(query, key.permute(0, 2, 1)) / (c ** 0.5) # [B, H*W, H*W]
369
+ prob = torch.softmax(scores, dim=-1)
370
+
371
+ out = torch.matmul(prob, value) # [B, H*W, 2]
372
+ out = out.view(b, h, w, value.size(-1)).permute(0, 3, 1, 2) # [B, 2, H, W]
373
+
374
+ return out
375
+
376
+ def forward_local_window_attn(self, feature0, flow,
377
+ local_window_radius=1,
378
+ ):
379
+ assert flow.size(1) == 2
380
+ assert local_window_radius > 0
381
+
382
+ b, c, h, w = feature0.size()
383
+
384
+ feature0_reshape = self.q_proj(feature0.view(b, c, -1).permute(0, 2, 1)
385
+ ).reshape(b * h * w, 1, c) # [B*H*W, 1, C]
386
+
387
+ kernel_size = 2 * local_window_radius + 1
388
+
389
+ feature0_proj = self.k_proj(feature0.view(b, c, -1).permute(0, 2, 1)).permute(0, 2, 1).reshape(b, c, h, w)
390
+
391
+ feature0_window = F.unfold(feature0_proj, kernel_size=kernel_size,
392
+ padding=local_window_radius) # [B, C*(2R+1)^2), H*W]
393
+
394
+ feature0_window = feature0_window.view(b, c, kernel_size ** 2, h, w).permute(
395
+ 0, 3, 4, 1, 2).reshape(b * h * w, c, kernel_size ** 2) # [B*H*W, C, (2R+1)^2]
396
+
397
+ flow_window = F.unfold(flow, kernel_size=kernel_size,
398
+ padding=local_window_radius) # [B, 2*(2R+1)^2), H*W]
399
+
400
+ flow_window = flow_window.view(b, 2, kernel_size ** 2, h, w).permute(
401
+ 0, 3, 4, 2, 1).reshape(b * h * w, kernel_size ** 2, 2) # [B*H*W, (2R+1)^2, 2]
402
+
403
+ scores = torch.matmul(feature0_reshape, feature0_window) / (c ** 0.5) # [B*H*W, 1, (2R+1)^2]
404
+
405
+ prob = torch.softmax(scores, dim=-1)
406
+
407
+ out = torch.matmul(prob, flow_window).view(b, h, w, 2).permute(0, 3, 1, 2).contiguous() # [B, 2, H, W]
408
+
409
+ return out
basicsr/archs/gmflow/gmflow/trident_conv.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ # https://github.com/facebookresearch/detectron2/blob/main/projects/TridentNet/tridentnet/trident_conv.py
3
+
4
+ import torch
5
+ from torch import nn
6
+ from torch.nn import functional as F
7
+ from torch.nn.modules.utils import _pair
8
+
9
+
10
+ class MultiScaleTridentConv(nn.Module):
11
+ def __init__(
12
+ self,
13
+ in_channels,
14
+ out_channels,
15
+ kernel_size,
16
+ stride=1,
17
+ strides=1,
18
+ paddings=0,
19
+ dilations=1,
20
+ dilation=1,
21
+ groups=1,
22
+ num_branch=1,
23
+ test_branch_idx=-1,
24
+ bias=False,
25
+ norm=None,
26
+ activation=None,
27
+ ):
28
+ super(MultiScaleTridentConv, self).__init__()
29
+ self.in_channels = in_channels
30
+ self.out_channels = out_channels
31
+ self.kernel_size = _pair(kernel_size)
32
+ self.num_branch = num_branch
33
+ self.stride = _pair(stride)
34
+ self.groups = groups
35
+ self.with_bias = bias
36
+ self.dilation = dilation
37
+ if isinstance(paddings, int):
38
+ paddings = [paddings] * self.num_branch
39
+ if isinstance(dilations, int):
40
+ dilations = [dilations] * self.num_branch
41
+ if isinstance(strides, int):
42
+ strides = [strides] * self.num_branch
43
+ self.paddings = [_pair(padding) for padding in paddings]
44
+ self.dilations = [_pair(dilation) for dilation in dilations]
45
+ self.strides = [_pair(stride) for stride in strides]
46
+ self.test_branch_idx = test_branch_idx
47
+ self.norm = norm
48
+ self.activation = activation
49
+
50
+ assert len({self.num_branch, len(self.paddings), len(self.strides)}) == 1
51
+
52
+ self.weight = nn.Parameter(
53
+ torch.Tensor(out_channels, in_channels // groups, *self.kernel_size)
54
+ )
55
+ if bias:
56
+ self.bias = nn.Parameter(torch.Tensor(out_channels))
57
+ else:
58
+ self.bias = None
59
+
60
+ nn.init.kaiming_uniform_(self.weight, nonlinearity="relu")
61
+ if self.bias is not None:
62
+ nn.init.constant_(self.bias, 0)
63
+
64
+ def forward(self, inputs):
65
+ num_branch = self.num_branch if self.training or self.test_branch_idx == -1 else 1
66
+ assert len(inputs) == num_branch
67
+
68
+ if self.training or self.test_branch_idx == -1:
69
+ outputs = [
70
+ F.conv2d(input, self.weight, self.bias, stride, padding, self.dilation, self.groups)
71
+ for input, stride, padding in zip(inputs, self.strides, self.paddings)
72
+ ]
73
+ else:
74
+ outputs = [
75
+ F.conv2d(
76
+ inputs[0],
77
+ self.weight,
78
+ self.bias,
79
+ self.strides[self.test_branch_idx] if self.test_branch_idx == -1 else self.strides[-1],
80
+ self.paddings[self.test_branch_idx] if self.test_branch_idx == -1 else self.paddings[-1],
81
+ self.dilation,
82
+ self.groups,
83
+ )
84
+ ]
85
+
86
+ if self.norm is not None:
87
+ outputs = [self.norm(x) for x in outputs]
88
+ if self.activation is not None:
89
+ outputs = [self.activation(x) for x in outputs]
90
+ return outputs
basicsr/archs/gmflow/gmflow/utils.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from .position import PositionEmbeddingSine
3
+
4
+
5
+ def split_feature(feature,
6
+ num_splits=2,
7
+ channel_last=False,
8
+ ):
9
+ if channel_last: # [B, H, W, C]
10
+ b, h, w, c = feature.size()
11
+ assert h % num_splits == 0 and w % num_splits == 0
12
+
13
+ b_new = b * num_splits * num_splits
14
+ h_new = h // num_splits
15
+ w_new = w // num_splits
16
+
17
+ feature = feature.view(b, num_splits, h // num_splits, num_splits, w // num_splits, c
18
+ ).permute(0, 1, 3, 2, 4, 5).reshape(b_new, h_new, w_new, c) # [B*K*K, H/K, W/K, C]
19
+ else: # [B, C, H, W]
20
+ b, c, h, w = feature.size()
21
+ assert h % num_splits == 0 and w % num_splits == 0
22
+
23
+ b_new = b * num_splits * num_splits
24
+ h_new = h // num_splits
25
+ w_new = w // num_splits
26
+
27
+ feature = feature.view(b, c, num_splits, h // num_splits, num_splits, w // num_splits
28
+ ).permute(0, 2, 4, 1, 3, 5).reshape(b_new, c, h_new, w_new) # [B*K*K, C, H/K, W/K]
29
+
30
+ return feature
31
+
32
+
33
+ def merge_splits(splits,
34
+ num_splits=2,
35
+ channel_last=False,
36
+ ):
37
+ if channel_last: # [B*K*K, H/K, W/K, C]
38
+ b, h, w, c = splits.size()
39
+ new_b = b // num_splits // num_splits
40
+
41
+ splits = splits.view(new_b, num_splits, num_splits, h, w, c)
42
+ merge = splits.permute(0, 1, 3, 2, 4, 5).contiguous().view(
43
+ new_b, num_splits * h, num_splits * w, c) # [B, H, W, C]
44
+ else: # [B*K*K, C, H/K, W/K]
45
+ b, c, h, w = splits.size()
46
+ new_b = b // num_splits // num_splits
47
+
48
+ splits = splits.view(new_b, num_splits, num_splits, c, h, w)
49
+ merge = splits.permute(0, 3, 1, 4, 2, 5).contiguous().view(
50
+ new_b, c, num_splits * h, num_splits * w) # [B, C, H, W]
51
+
52
+ return merge
53
+
54
+
55
+ def normalize_img(img0, img1):
56
+ # loaded images are in [0, 255]
57
+ # normalize by ImageNet mean and std
58
+ mean = torch.tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).to(img1.device)
59
+ std = torch.tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).to(img1.device)
60
+ img0 = (img0 / 255. - mean) / std
61
+ img1 = (img1 / 255. - mean) / std
62
+
63
+ return img0, img1
64
+
65
+
66
+ def feature_add_position(feature0, feature1, attn_splits, feature_channels):
67
+ pos_enc = PositionEmbeddingSine(num_pos_feats=feature_channels // 2)
68
+
69
+ if attn_splits > 1: # add position in splited window
70
+ feature0_splits = split_feature(feature0, num_splits=attn_splits)
71
+ feature1_splits = split_feature(feature1, num_splits=attn_splits)
72
+
73
+ position = pos_enc(feature0_splits)
74
+
75
+ feature0_splits = feature0_splits + position
76
+ feature1_splits = feature1_splits + position
77
+
78
+ feature0 = merge_splits(feature0_splits, num_splits=attn_splits)
79
+ feature1 = merge_splits(feature1_splits, num_splits=attn_splits)
80
+ else:
81
+ position = pos_enc(feature0)
82
+
83
+ feature0 = feature0 + position
84
+ feature1 = feature1 + position
85
+
86
+ return feature0, feature1
basicsr/archs/gmflow/loss.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+
4
+ def flow_loss_func(flow_preds, flow_gt, valid,
5
+ gamma=0.9,
6
+ max_flow=400,
7
+ **kwargs,
8
+ ):
9
+ n_predictions = len(flow_preds)
10
+ flow_loss = 0.0
11
+
12
+ # exlude invalid pixels and extremely large diplacements
13
+ mag = torch.sum(flow_gt ** 2, dim=1).sqrt() # [B, H, W]
14
+ valid = (valid >= 0.5) & (mag < max_flow)
15
+
16
+ for i in range(n_predictions):
17
+ i_weight = gamma ** (n_predictions - i - 1)
18
+
19
+ i_loss = (flow_preds[i] - flow_gt).abs()
20
+
21
+ flow_loss += i_weight * (valid[:, None] * i_loss).mean()
22
+
23
+ epe = torch.sum((flow_preds[-1] - flow_gt) ** 2, dim=1).sqrt()
24
+
25
+ if valid.max() < 0.5:
26
+ pass
27
+
28
+ epe = epe.view(-1)[valid.view(-1)]
29
+
30
+ metrics = {
31
+ 'epe': epe.mean().item(),
32
+ '1px': (epe > 1).float().mean().item(),
33
+ '3px': (epe > 3).float().mean().item(),
34
+ '5px': (epe > 5).float().mean().item(),
35
+ }
36
+
37
+ return flow_loss, metrics
basicsr/archs/gmflow/main.py ADDED
@@ -0,0 +1,557 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from torch.utils.data import DataLoader
3
+ from torch.utils.tensorboard import SummaryWriter
4
+
5
+ import argparse
6
+ import numpy as np
7
+ import os
8
+
9
+ from data import build_train_dataset
10
+ from gmflow.gmflow import GMFlow
11
+ from loss import flow_loss_func
12
+ from evaluate import (validate_chairs, validate_things, validate_sintel, validate_kitti,
13
+ create_sintel_submission, create_kitti_submission, inference_on_dir)
14
+
15
+ from utils.logger import Logger
16
+ from utils import misc
17
+ from utils.dist_utils import get_dist_info, init_dist, setup_for_distributed
18
+
19
+
20
+ def get_args_parser():
21
+ parser = argparse.ArgumentParser()
22
+
23
+ # dataset
24
+ parser.add_argument('--checkpoint_dir', default='tmp', type=str,
25
+ help='where to save the training log and models')
26
+ parser.add_argument('--stage', default='chairs', type=str,
27
+ help='training stage')
28
+ parser.add_argument('--image_size', default=[384, 512], type=int, nargs='+',
29
+ help='image size for training')
30
+ parser.add_argument('--padding_factor', default=16, type=int,
31
+ help='the input should be divisible by padding_factor, otherwise do padding')
32
+
33
+ parser.add_argument('--max_flow', default=400, type=int,
34
+ help='exclude very large motions during training')
35
+ parser.add_argument('--val_dataset', default=['chairs'], type=str, nargs='+',
36
+ help='validation dataset')
37
+ parser.add_argument('--with_speed_metric', action='store_true',
38
+ help='with speed metric when evaluation')
39
+
40
+ # training
41
+ parser.add_argument('--lr', default=4e-4, type=float)
42
+ parser.add_argument('--batch_size', default=12, type=int)
43
+ parser.add_argument('--num_workers', default=4, type=int)
44
+ parser.add_argument('--weight_decay', default=1e-4, type=float)
45
+ parser.add_argument('--grad_clip', default=1.0, type=float)
46
+ parser.add_argument('--num_steps', default=100000, type=int)
47
+ parser.add_argument('--seed', default=326, type=int)
48
+ parser.add_argument('--summary_freq', default=100, type=int)
49
+ parser.add_argument('--val_freq', default=10000, type=int)
50
+ parser.add_argument('--save_ckpt_freq', default=10000, type=int)
51
+ parser.add_argument('--save_latest_ckpt_freq', default=1000, type=int)
52
+
53
+ # resume pretrained model or resume training
54
+ parser.add_argument('--resume', default=None, type=str,
55
+ help='resume from pretrain model for finetuing or resume from terminated training')
56
+ parser.add_argument('--strict_resume', action='store_true')
57
+ parser.add_argument('--no_resume_optimizer', action='store_true')
58
+
59
+ # GMFlow model
60
+ parser.add_argument('--num_scales', default=1, type=int,
61
+ help='basic gmflow model uses a single 1/8 feature, the refinement uses 1/4 feature')
62
+ parser.add_argument('--feature_channels', default=128, type=int)
63
+ parser.add_argument('--upsample_factor', default=8, type=int)
64
+ parser.add_argument('--num_transformer_layers', default=6, type=int)
65
+ parser.add_argument('--num_head', default=1, type=int)
66
+ parser.add_argument('--attention_type', default='swin', type=str)
67
+ parser.add_argument('--ffn_dim_expansion', default=4, type=int)
68
+
69
+ parser.add_argument('--attn_splits_list', default=[2], type=int, nargs='+',
70
+ help='number of splits in attention')
71
+ parser.add_argument('--corr_radius_list', default=[-1], type=int, nargs='+',
72
+ help='correlation radius for matching, -1 indicates global matching')
73
+ parser.add_argument('--prop_radius_list', default=[-1], type=int, nargs='+',
74
+ help='self-attention radius for flow propagation, -1 indicates global attention')
75
+
76
+ # loss
77
+ parser.add_argument('--gamma', default=0.9, type=float,
78
+ help='loss weight')
79
+
80
+ # evaluation
81
+ parser.add_argument('--eval', action='store_true')
82
+ parser.add_argument('--save_eval_to_file', action='store_true')
83
+ parser.add_argument('--evaluate_matched_unmatched', action='store_true')
84
+
85
+ # inference on a directory
86
+ parser.add_argument('--inference_dir', default=None, type=str)
87
+ parser.add_argument('--inference_size', default=None, type=int, nargs='+',
88
+ help='can specify the inference size')
89
+ parser.add_argument('--dir_paired_data', action='store_true',
90
+ help='Paired data in a dir instead of a sequence')
91
+ parser.add_argument('--save_flo_flow', action='store_true')
92
+ parser.add_argument('--pred_bidir_flow', action='store_true',
93
+ help='predict bidirectional flow')
94
+ parser.add_argument('--fwd_bwd_consistency_check', action='store_true',
95
+ help='forward backward consistency check with bidirection flow')
96
+
97
+ # predict on sintel and kitti test set for submission
98
+ parser.add_argument('--submission', action='store_true',
99
+ help='submission to sintel or kitti test sets')
100
+ parser.add_argument('--output_path', default='output', type=str,
101
+ help='where to save the prediction results')
102
+ parser.add_argument('--save_vis_flow', action='store_true',
103
+ help='visualize flow prediction as .png image')
104
+ parser.add_argument('--no_save_flo', action='store_true',
105
+ help='not save flow as .flo')
106
+
107
+ # distributed training
108
+ parser.add_argument('--local_rank', default=0, type=int)
109
+ parser.add_argument('--distributed', action='store_true')
110
+ parser.add_argument('--launcher', default='none', type=str, choices=['none', 'pytorch'])
111
+ parser.add_argument('--gpu_ids', default=0, type=int, nargs='+')
112
+
113
+ parser.add_argument('--count_time', action='store_true',
114
+ help='measure the inference time on sintel')
115
+
116
+ return parser
117
+
118
+
119
+ def main(args):
120
+ if not args.eval and not args.submission and args.inference_dir is None:
121
+ if args.local_rank == 0:
122
+ print('pytorch version:', torch.__version__)
123
+ print(args)
124
+ misc.save_args(args)
125
+ misc.check_path(args.checkpoint_dir)
126
+ misc.save_command(args.checkpoint_dir)
127
+
128
+ seed = args.seed
129
+ torch.manual_seed(seed)
130
+ np.random.seed(seed)
131
+
132
+ torch.backends.cudnn.benchmark = True
133
+
134
+ if args.launcher == 'none':
135
+ args.distributed = False
136
+ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
137
+ else:
138
+ args.distributed = True
139
+
140
+ # adjust batch size for each gpu
141
+ assert args.batch_size % torch.cuda.device_count() == 0
142
+ args.batch_size = args.batch_size // torch.cuda.device_count()
143
+
144
+ dist_params = dict(backend='nccl')
145
+ init_dist(args.launcher, **dist_params)
146
+ # re-set gpu_ids with distributed training mode
147
+ _, world_size = get_dist_info()
148
+ args.gpu_ids = range(world_size)
149
+ device = torch.device('cuda:{}'.format(args.local_rank))
150
+
151
+ setup_for_distributed(args.local_rank == 0)
152
+
153
+ # model
154
+ model = GMFlow(feature_channels=args.feature_channels,
155
+ num_scales=args.num_scales,
156
+ upsample_factor=args.upsample_factor,
157
+ num_head=args.num_head,
158
+ attention_type=args.attention_type,
159
+ ffn_dim_expansion=args.ffn_dim_expansion,
160
+ num_transformer_layers=args.num_transformer_layers,
161
+ ).to(device)
162
+
163
+ if not args.eval and not args.submission and not args.inference_dir:
164
+ print('Model definition:')
165
+ print(model)
166
+
167
+ if args.distributed:
168
+ model = torch.nn.parallel.DistributedDataParallel(
169
+ model.to(device),
170
+ device_ids=[args.local_rank],
171
+ output_device=args.local_rank)
172
+ model_without_ddp = model.module
173
+ else:
174
+ if torch.cuda.device_count() > 1:
175
+ print('Use %d GPUs' % torch.cuda.device_count())
176
+ model = torch.nn.DataParallel(model)
177
+
178
+ model_without_ddp = model.module
179
+ else:
180
+ model_without_ddp = model
181
+
182
+ num_params = sum(p.numel() for p in model.parameters())
183
+ print('Number of params:', num_params)
184
+ if not args.eval and not args.submission and args.inference_dir is None:
185
+ save_name = '%d_parameters' % num_params
186
+ open(os.path.join(args.checkpoint_dir, save_name), 'a').close()
187
+
188
+ optimizer = torch.optim.AdamW(model_without_ddp.parameters(), lr=args.lr,
189
+ weight_decay=args.weight_decay)
190
+
191
+ start_epoch = 0
192
+ start_step = 0
193
+ # resume checkpoints
194
+ if args.resume:
195
+ print('Load checkpoint: %s' % args.resume)
196
+
197
+ loc = 'cuda:{}'.format(args.local_rank)
198
+ checkpoint = torch.load(args.resume, map_location=loc)
199
+
200
+ weights = checkpoint['model'] if 'model' in checkpoint else checkpoint
201
+
202
+ model_without_ddp.load_state_dict(weights, strict=args.strict_resume)
203
+
204
+ if 'optimizer' in checkpoint and 'step' in checkpoint and 'epoch' in checkpoint and not \
205
+ args.no_resume_optimizer:
206
+ print('Load optimizer')
207
+ optimizer.load_state_dict(checkpoint['optimizer'])
208
+ start_epoch = checkpoint['epoch']
209
+ start_step = checkpoint['step']
210
+
211
+ print('start_epoch: %d, start_step: %d' % (start_epoch, start_step))
212
+
213
+ # evaluate
214
+ if args.eval:
215
+ val_results = {}
216
+
217
+ if 'chairs' in args.val_dataset:
218
+ results_dict = validate_chairs(model_without_ddp,
219
+ with_speed_metric=args.with_speed_metric,
220
+ attn_splits_list=args.attn_splits_list,
221
+ corr_radius_list=args.corr_radius_list,
222
+ prop_radius_list=args.prop_radius_list,
223
+ )
224
+
225
+ val_results.update(results_dict)
226
+
227
+ if 'things' in args.val_dataset:
228
+ results_dict = validate_things(model_without_ddp,
229
+ padding_factor=args.padding_factor,
230
+ with_speed_metric=args.with_speed_metric,
231
+ attn_splits_list=args.attn_splits_list,
232
+ corr_radius_list=args.corr_radius_list,
233
+ prop_radius_list=args.prop_radius_list,
234
+ )
235
+ val_results.update(results_dict)
236
+
237
+ if 'sintel' in args.val_dataset:
238
+ results_dict = validate_sintel(model_without_ddp,
239
+ count_time=args.count_time,
240
+ padding_factor=args.padding_factor,
241
+ with_speed_metric=args.with_speed_metric,
242
+ evaluate_matched_unmatched=args.evaluate_matched_unmatched,
243
+ attn_splits_list=args.attn_splits_list,
244
+ corr_radius_list=args.corr_radius_list,
245
+ prop_radius_list=args.prop_radius_list,
246
+ )
247
+ val_results.update(results_dict)
248
+
249
+ if 'kitti' in args.val_dataset:
250
+ results_dict = validate_kitti(model_without_ddp,
251
+ padding_factor=args.padding_factor,
252
+ with_speed_metric=args.with_speed_metric,
253
+ attn_splits_list=args.attn_splits_list,
254
+ corr_radius_list=args.corr_radius_list,
255
+ prop_radius_list=args.prop_radius_list,
256
+ )
257
+ val_results.update(results_dict)
258
+
259
+ if args.save_eval_to_file:
260
+ misc.check_path(args.checkpoint_dir)
261
+ val_file = os.path.join(args.checkpoint_dir, 'val_results.txt')
262
+ with open(val_file, 'a') as f:
263
+ f.write('\neval results after training done\n\n')
264
+ metrics = ['chairs_epe', 'chairs_s0_10', 'chairs_s10_40', 'chairs_s40+',
265
+ 'things_clean_epe', 'things_clean_s0_10', 'things_clean_s10_40', 'things_clean_s40+',
266
+ 'things_final_epe', 'things_final_s0_10', 'things_final_s10_40', 'things_final_s40+',
267
+ 'sintel_clean_epe', 'sintel_clean_s0_10', 'sintel_clean_s10_40', 'sintel_clean_s40+',
268
+ 'sintel_final_epe', 'sintel_final_s0_10', 'sintel_final_s10_40', 'sintel_final_s40+',
269
+ 'kitti_epe', 'kitti_f1', 'kitti_s0_10', 'kitti_s10_40', 'kitti_s40+',
270
+ ]
271
+ eval_metrics = []
272
+ for metric in metrics:
273
+ if metric in val_results.keys():
274
+ eval_metrics.append(metric)
275
+
276
+ metrics_values = [val_results[metric] for metric in eval_metrics]
277
+
278
+ num_metrics = len(eval_metrics)
279
+
280
+ # save as markdown format
281
+ f.write(("| {:>20} " * num_metrics + '\n').format(*eval_metrics))
282
+ f.write(("| {:20.3f} " * num_metrics).format(*metrics_values))
283
+
284
+ f.write('\n\n')
285
+
286
+ return
287
+
288
+ # Sintel and KITTI submission
289
+ if args.submission:
290
+ # NOTE: args.val_dataset is a list
291
+ if args.val_dataset[0] == 'sintel':
292
+ create_sintel_submission(model_without_ddp,
293
+ output_path=args.output_path,
294
+ padding_factor=args.padding_factor,
295
+ save_vis_flow=args.save_vis_flow,
296
+ no_save_flo=args.no_save_flo,
297
+ attn_splits_list=args.attn_splits_list,
298
+ corr_radius_list=args.corr_radius_list,
299
+ prop_radius_list=args.prop_radius_list,
300
+ )
301
+ elif args.val_dataset[0] == 'kitti':
302
+ create_kitti_submission(model_without_ddp,
303
+ output_path=args.output_path,
304
+ padding_factor=args.padding_factor,
305
+ save_vis_flow=args.save_vis_flow,
306
+ attn_splits_list=args.attn_splits_list,
307
+ corr_radius_list=args.corr_radius_list,
308
+ prop_radius_list=args.prop_radius_list,
309
+ )
310
+ else:
311
+ raise ValueError(f'Not supported dataset for submission')
312
+
313
+ return
314
+
315
+ # inferece on a dir
316
+ if args.inference_dir is not None:
317
+ inference_on_dir(model_without_ddp,
318
+ inference_dir=args.inference_dir,
319
+ output_path=args.output_path,
320
+ padding_factor=args.padding_factor,
321
+ inference_size=args.inference_size,
322
+ paired_data=args.dir_paired_data,
323
+ save_flo_flow=args.save_flo_flow,
324
+ attn_splits_list=args.attn_splits_list,
325
+ corr_radius_list=args.corr_radius_list,
326
+ prop_radius_list=args.prop_radius_list,
327
+ pred_bidir_flow=args.pred_bidir_flow,
328
+ fwd_bwd_consistency_check=args.fwd_bwd_consistency_check,
329
+ )
330
+
331
+ return
332
+
333
+ # training datset
334
+ train_dataset = build_train_dataset(args)
335
+ print('Number of training images:', len(train_dataset))
336
+
337
+ # Multi-processing
338
+ if args.distributed:
339
+ train_sampler = torch.utils.data.distributed.DistributedSampler(
340
+ train_dataset,
341
+ num_replicas=torch.cuda.device_count(),
342
+ rank=args.local_rank)
343
+ else:
344
+ train_sampler = None
345
+
346
+ shuffle = False if args.distributed else True
347
+ train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size,
348
+ shuffle=shuffle, num_workers=args.num_workers,
349
+ pin_memory=True, drop_last=True,
350
+ sampler=train_sampler)
351
+
352
+ last_epoch = start_step if args.resume and start_step > 0 else -1
353
+ lr_scheduler = torch.optim.lr_scheduler.OneCycleLR(
354
+ optimizer, args.lr,
355
+ args.num_steps + 10,
356
+ pct_start=0.05,
357
+ cycle_momentum=False,
358
+ anneal_strategy='cos',
359
+ last_epoch=last_epoch,
360
+ )
361
+
362
+ if args.local_rank == 0:
363
+ summary_writer = SummaryWriter(args.checkpoint_dir)
364
+ logger = Logger(lr_scheduler, summary_writer, args.summary_freq,
365
+ start_step=start_step)
366
+
367
+ total_steps = start_step
368
+ epoch = start_epoch
369
+ print('Start training')
370
+
371
+ while total_steps < args.num_steps:
372
+ model.train()
373
+
374
+ # mannual change random seed for shuffling every epoch
375
+ if args.distributed:
376
+ train_sampler.set_epoch(epoch)
377
+
378
+ for i, sample in enumerate(train_loader):
379
+ img1, img2, flow_gt, valid = [x.to(device) for x in sample]
380
+
381
+ results_dict = model(img1, img2,
382
+ attn_splits_list=args.attn_splits_list,
383
+ corr_radius_list=args.corr_radius_list,
384
+ prop_radius_list=args.prop_radius_list,
385
+ )
386
+
387
+ flow_preds = results_dict['flow_preds']
388
+
389
+ loss, metrics = flow_loss_func(flow_preds, flow_gt, valid,
390
+ gamma=args.gamma,
391
+ max_flow=args.max_flow,
392
+ )
393
+
394
+ if isinstance(loss, float):
395
+ continue
396
+
397
+ if torch.isnan(loss):
398
+ continue
399
+
400
+ metrics.update({'total_loss': loss.item()})
401
+
402
+ # more efficient zero_grad
403
+ for param in model_without_ddp.parameters():
404
+ param.grad = None
405
+
406
+ loss.backward()
407
+
408
+ # Gradient clipping
409
+ torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip)
410
+
411
+ optimizer.step()
412
+
413
+ lr_scheduler.step()
414
+
415
+ if args.local_rank == 0:
416
+ logger.push(metrics)
417
+
418
+ logger.add_image_summary(img1, img2, flow_preds, flow_gt)
419
+
420
+ total_steps += 1
421
+
422
+ if total_steps % args.save_ckpt_freq == 0 or total_steps == args.num_steps:
423
+ if args.local_rank == 0:
424
+ checkpoint_path = os.path.join(args.checkpoint_dir, 'step_%06d.pth' % total_steps)
425
+ torch.save({
426
+ 'model': model_without_ddp.state_dict()
427
+ }, checkpoint_path)
428
+
429
+ if total_steps % args.save_latest_ckpt_freq == 0:
430
+ checkpoint_path = os.path.join(args.checkpoint_dir, 'checkpoint_latest.pth')
431
+
432
+ if args.local_rank == 0:
433
+ torch.save({
434
+ 'model': model_without_ddp.state_dict(),
435
+ 'optimizer': optimizer.state_dict(),
436
+ 'step': total_steps,
437
+ 'epoch': epoch,
438
+ }, checkpoint_path)
439
+
440
+ if total_steps % args.val_freq == 0:
441
+ print('Start validation')
442
+
443
+ val_results = {}
444
+ # support validation on multiple datasets
445
+ if 'chairs' in args.val_dataset:
446
+ results_dict = validate_chairs(model_without_ddp,
447
+ with_speed_metric=args.with_speed_metric,
448
+ attn_splits_list=args.attn_splits_list,
449
+ corr_radius_list=args.corr_radius_list,
450
+ prop_radius_list=args.prop_radius_list,
451
+ )
452
+ if args.local_rank == 0:
453
+ val_results.update(results_dict)
454
+
455
+ if 'things' in args.val_dataset:
456
+ results_dict = validate_things(model_without_ddp,
457
+ padding_factor=args.padding_factor,
458
+ with_speed_metric=args.with_speed_metric,
459
+ attn_splits_list=args.attn_splits_list,
460
+ corr_radius_list=args.corr_radius_list,
461
+ prop_radius_list=args.prop_radius_list,
462
+ )
463
+ if args.local_rank == 0:
464
+ val_results.update(results_dict)
465
+
466
+ if 'sintel' in args.val_dataset:
467
+ results_dict = validate_sintel(model_without_ddp,
468
+ count_time=args.count_time,
469
+ padding_factor=args.padding_factor,
470
+ with_speed_metric=args.with_speed_metric,
471
+ evaluate_matched_unmatched=args.evaluate_matched_unmatched,
472
+ attn_splits_list=args.attn_splits_list,
473
+ corr_radius_list=args.corr_radius_list,
474
+ prop_radius_list=args.prop_radius_list,
475
+ )
476
+ if args.local_rank == 0:
477
+ val_results.update(results_dict)
478
+
479
+ if 'kitti' in args.val_dataset:
480
+ results_dict = validate_kitti(model_without_ddp,
481
+ padding_factor=args.padding_factor,
482
+ with_speed_metric=args.with_speed_metric,
483
+ attn_splits_list=args.attn_splits_list,
484
+ corr_radius_list=args.corr_radius_list,
485
+ prop_radius_list=args.prop_radius_list,
486
+ )
487
+ if args.local_rank == 0:
488
+ val_results.update(results_dict)
489
+
490
+ if args.local_rank == 0:
491
+ logger.write_dict(val_results)
492
+
493
+ # Save validation results
494
+ val_file = os.path.join(args.checkpoint_dir, 'val_results.txt')
495
+ with open(val_file, 'a') as f:
496
+ f.write('step: %06d\n' % total_steps)
497
+ if args.evaluate_matched_unmatched:
498
+ metrics = ['chairs_epe',
499
+ 'chairs_s0_10', 'chairs_s10_40', 'chairs_s40+',
500
+ 'things_clean_epe', 'things_clean_s0_10', 'things_clean_s10_40',
501
+ 'things_clean_s40+',
502
+ 'sintel_clean_epe', 'sintel_clean_matched', 'sintel_clean_unmatched',
503
+ 'sintel_clean_s0_10', 'sintel_clean_s10_40',
504
+ 'sintel_clean_s40+',
505
+ 'sintel_final_epe', 'sintel_final_matched', 'sintel_final_unmatched',
506
+ 'sintel_final_s0_10', 'sintel_final_s10_40',
507
+ 'sintel_final_s40+',
508
+ 'kitti_epe', 'kitti_f1', 'kitti_s0_10', 'kitti_s10_40', 'kitti_s40+',
509
+ ]
510
+ else:
511
+ metrics = ['chairs_epe', 'chairs_s0_10', 'chairs_s10_40', 'chairs_s40+',
512
+ 'things_clean_epe', 'things_clean_s0_10', 'things_clean_s10_40',
513
+ 'things_clean_s40+',
514
+ 'sintel_clean_epe', 'sintel_clean_s0_10', 'sintel_clean_s10_40',
515
+ 'sintel_clean_s40+',
516
+ 'sintel_final_epe', 'sintel_final_s0_10', 'sintel_final_s10_40',
517
+ 'sintel_final_s40+',
518
+ 'kitti_epe', 'kitti_f1', 'kitti_s0_10', 'kitti_s10_40', 'kitti_s40+',
519
+ ]
520
+
521
+ eval_metrics = []
522
+ for metric in metrics:
523
+ if metric in val_results.keys():
524
+ eval_metrics.append(metric)
525
+
526
+ metrics_values = [val_results[metric] for metric in eval_metrics]
527
+
528
+ num_metrics = len(eval_metrics)
529
+
530
+ # save as markdown format
531
+ if args.evaluate_matched_unmatched:
532
+ f.write(("| {:>25} " * num_metrics + '\n').format(*eval_metrics))
533
+ f.write(("| {:25.3f} " * num_metrics).format(*metrics_values))
534
+ else:
535
+ f.write(("| {:>20} " * num_metrics + '\n').format(*eval_metrics))
536
+ f.write(("| {:20.3f} " * num_metrics).format(*metrics_values))
537
+
538
+ f.write('\n\n')
539
+
540
+ model.train()
541
+
542
+ if total_steps >= args.num_steps:
543
+ print('Training done')
544
+
545
+ return
546
+
547
+ epoch += 1
548
+
549
+
550
+ if __name__ == '__main__':
551
+ parser = get_args_parser()
552
+ args = parser.parse_args()
553
+
554
+ if 'LOCAL_RANK' not in os.environ:
555
+ os.environ['LOCAL_RANK'] = str(args.local_rank)
556
+
557
+ main(args)
basicsr/archs/gmflow/scripts/demo.sh ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+
3
+ # inference GMFlow without refinement
4
+
5
+ # sintel
6
+
7
+ # only predict forward flow
8
+ CUDA_VISIBLE_DEVICES=0 python main.py \
9
+ --inference_dir demo/sintel_market_1 \
10
+ --output_path output/gmflow-norefine-sintel_market_1 \
11
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
12
+
13
+ # predict forward & backward flow
14
+ CUDA_VISIBLE_DEVICES=0 python main.py \
15
+ --inference_dir demo/sintel_market_1 \
16
+ --output_path output/gmflow-norefine-sintel_market_1 \
17
+ --pred_bidir_flow \
18
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
19
+
20
+
21
+ # predict forward & backward flow with forward-backward consistency check
22
+ CUDA_VISIBLE_DEVICES=0 python main.py \
23
+ --inference_dir demo/sintel_market_1 \
24
+ --output_path output/gmflow-norefine-sintel_market_1 \
25
+ --pred_bidir_flow \
26
+ --fwd_bwd_consistency_check \
27
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
28
+
29
+
30
+ # davis
31
+
32
+ CUDA_VISIBLE_DEVICES=0 python main.py \
33
+ --inference_dir demo/davis_breakdance-flare \
34
+ --output_path output/gmflow-norefine-davis_breakdance-flare \
35
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
36
+
37
+
38
+
39
+
40
+ # inference GMFlow with refinement
41
+
42
+ CUDA_VISIBLE_DEVICES=0 python main.py \
43
+ --inference_dir demo/davis_breakdance-flare \
44
+ --output_path output/gmflow-withrefine-davis_breakdance-flare \
45
+ --resume pretrained/gmflow_with_refine_sintel-3ed1cf48.pth \
46
+ --padding_factor 32 \
47
+ --upsample_factor 4 \
48
+ --num_scales 2 \
49
+ --attn_splits_list 2 8 \
50
+ --corr_radius_list -1 4 \
51
+ --prop_radius_list -1 1
52
+
53
+
54
+
55
+
56
+ CUDA_VISIBLE_DEVICES=0 python main.py \
57
+ --inference_dir demo/sintel_test_clean_market_1 \
58
+ --output_path output/gmflow-norefine-sintel_test_clean_market_1 \
59
+ --pred_bidir_flow \
60
+ --fwd_bwd_consistency_check \
61
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
62
+
63
+
basicsr/archs/gmflow/scripts/evaluate.sh ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+
3
+ # evaluate GMFlow without refinement
4
+
5
+ # evaluate chairs & things trained model on things and sintel (Table 3 of GMFlow paper)
6
+ # the output should be:
7
+ # Number of validation image pairs: 1024
8
+ # Validation Things test set (things_clean) EPE: 3.475
9
+ # Validation Things test (things_clean) s0_10: 0.666, s10_40: 1.310, s40+: 8.968
10
+ # Number of validation image pairs: 1041
11
+ # Validation Sintel (clean) EPE: 1.495, 1px: 0.161, 3px: 0.059, 5px: 0.040
12
+ # Validation Sintel (clean) s0_10: 0.457, s10_40: 1.770, s40+: 8.257
13
+ # Number of validation image pairs: 1041
14
+ # Validation Sintel (final) EPE: 2.955, 1px: 0.209, 3px: 0.098, 5px: 0.071
15
+ # Validation Sintel (final) s0_10: 0.725, s10_40: 3.446, s40+: 17.701
16
+
17
+ CUDA_VISIBLE_DEVICES=0 python main.py \
18
+ --eval \
19
+ --resume pretrained/gmflow_things-e9887eda.pth \
20
+ --val_dataset things sintel \
21
+ --with_speed_metric
22
+
23
+
24
+
25
+ # evaluate GMFlow with refinement
26
+
27
+ # evaluate chairs & things trained model on things and sintel (Table 3 of GMFlow paper)
28
+ # the output should be:
29
+ # Validation Things test set (things_clean) EPE: 2.804
30
+ # Validation Things test (things_clean) s0_10: 0.527, s10_40: 1.009, s40+: 7.314
31
+ # Number of validation image pairs: 1041
32
+ # Validation Sintel (clean) EPE: 1.084, 1px: 0.092, 3px: 0.040, 5px: 0.028
33
+ # Validation Sintel (clean) s0_10: 0.303, s10_40: 1.252, s40+: 6.261
34
+ # Number of validation image pairs: 1041
35
+ # Validation Sintel (final) EPE: 2.475, 1px: 0.147, 3px: 0.077, 5px: 0.058
36
+ # Validation Sintel (final) s0_10: 0.511, s10_40: 2.810, s40+: 15.669
37
+
38
+ CUDA_VISIBLE_DEVICES=0 python main.py \
39
+ --eval \
40
+ --resume pretrained/gmflow_with_refine_things-36579974.pth \
41
+ --val_dataset things sintel \
42
+ --with_speed_metric \
43
+ --padding_factor 32 \
44
+ --upsample_factor 4 \
45
+ --num_scales 2 \
46
+ --attn_splits_list 2 8 \
47
+ --corr_radius_list -1 4 \
48
+ --prop_radius_list -1 1
49
+
50
+
51
+
52
+ # evaluate matched & matched on sintel
53
+
54
+ # evaluate GMFlow without refinement
55
+
56
+ CUDA_VISIBLE_DEVICES=0 python main.py \
57
+ --eval \
58
+ --evaluate_matched_unmatched \
59
+ --resume pretrained/gmflow_things-e9887eda.pth \
60
+ --val_dataset sintel
61
+
62
+ # evaluate GMFlow with refinement
63
+
64
+ CUDA_VISIBLE_DEVICES=0 python main.py \
65
+ --eval \
66
+ --evaluate_matched_unmatched \
67
+ --resume pretrained/gmflow_with_refine_things-36579974.pth \
68
+ --val_dataset sintel \
69
+ --with_speed_metric \
70
+ --padding_factor 32 \
71
+ --upsample_factor 4 \
72
+ --num_scales 2 \
73
+ --attn_splits_list 2 8 \
74
+ --corr_radius_list -1 4 \
75
+ --prop_radius_list -1 1
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
basicsr/archs/gmflow/scripts/submission.sh ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+
3
+
4
+ # generate prediction results for submission on sintel and kitti online servers
5
+
6
+
7
+ # GMFlow without refinement
8
+
9
+ # submission to sintel
10
+ CUDA_VISIBLE_DEVICES=0 python main.py \
11
+ --submission \
12
+ --output_path submission/sintel-gmflow-norefine \
13
+ --val_dataset sintel \
14
+ --resume pretrained/gmflow_sintel-0c07dcb3.pth
15
+
16
+ # submission to kitti
17
+ CUDA_VISIBLE_DEVICES=0 python main.py \
18
+ --submission \
19
+ --output_path submission/kitti-gmflow-norefine \
20
+ --val_dataset kitti \
21
+ --resume pretrained/gmflow_kitti-285701a8.pth
22
+
23
+
24
+ # you can also visualize the predictions before submission
25
+ # CUDA_VISIBLE_DEVICES=0 python main.py \
26
+ # --submission \
27
+ # --output_path submission/sintel-gmflow-norefine-vis \
28
+ # --save_vis_flow \
29
+ # --no_save_flo \
30
+ # --val_dataset sintel \
31
+ # --resume pretrained/gmflow_sintel.pth
32
+
33
+
34
+
35
+
36
+ # GMFlow with refinement
37
+
38
+ # submission to sintel
39
+ CUDA_VISIBLE_DEVICES=0 python main.py \
40
+ --submission \
41
+ --output_path submission/sintel-gmflow-withrefine \
42
+ --val_dataset sintel \
43
+ --resume pretrained/gmflow_with_refine_sintel-3ed1cf48.pth \
44
+ --padding_factor 32 \
45
+ --upsample_factor 4 \
46
+ --num_scales 2 \
47
+ --attn_splits_list 2 8 \
48
+ --corr_radius_list -1 4 \
49
+ --prop_radius_list -1 1
50
+
51
+ # submission to kitti
52
+ CUDA_VISIBLE_DEVICES=0 python main.py \
53
+ --submission \
54
+ --output_path submission/kitti-gmflow-withrefine \
55
+ --val_dataset kitti \
56
+ --resume pretrained/gmflow_with_refine_kitti-8d3b9786.pth \
57
+ --padding_factor 32 \
58
+ --upsample_factor 4 \
59
+ --num_scales 2 \
60
+ --attn_splits_list 2 8 \
61
+ --corr_radius_list -1 4 \
62
+ --prop_radius_list -1 1
63
+
64
+
65
+
66
+
67
+
basicsr/archs/gmflow/scripts/train_gmflow.sh ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+
3
+ # GMFlow without refinement
4
+
5
+ # number of gpus for training, please set according to your hardware
6
+ # by default use all gpus on a machine
7
+ # can be trained on 4x 16GB V100 or 2x 32GB V100 or 2x 40GB A100 gpus
8
+ NUM_GPUS=4
9
+
10
+ # chairs
11
+ CHECKPOINT_DIR=checkpoints/chairs-gmflow && \
12
+ mkdir -p ${CHECKPOINT_DIR} && \
13
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
14
+ --launcher pytorch \
15
+ --checkpoint_dir ${CHECKPOINT_DIR} \
16
+ --batch_size 16 \
17
+ --val_dataset chairs sintel kitti \
18
+ --lr 4e-4 \
19
+ --image_size 384 512 \
20
+ --padding_factor 16 \
21
+ --upsample_factor 8 \
22
+ --with_speed_metric \
23
+ --val_freq 10000 \
24
+ --save_ckpt_freq 10000 \
25
+ --num_steps 100000 \
26
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
27
+
28
+ # things (our final model is trained for 800K iterations, for ablation study, you can train for 200K)
29
+ CHECKPOINT_DIR=checkpoints/things-gmflow && \
30
+ mkdir -p ${CHECKPOINT_DIR} && \
31
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
32
+ --launcher pytorch \
33
+ --checkpoint_dir ${CHECKPOINT_DIR} \
34
+ --resume checkpoints/chairs-gmflow/step_100000.pth \
35
+ --stage things \
36
+ --batch_size 8 \
37
+ --val_dataset things sintel kitti \
38
+ --lr 2e-4 \
39
+ --image_size 384 768 \
40
+ --padding_factor 16 \
41
+ --upsample_factor 8 \
42
+ --with_speed_metric \
43
+ --val_freq 40000 \
44
+ --save_ckpt_freq 50000 \
45
+ --num_steps 800000 \
46
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
47
+
48
+ # sintel
49
+ CHECKPOINT_DIR=checkpoints/sintel-gmflow && \
50
+ mkdir -p ${CHECKPOINT_DIR} && \
51
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
52
+ --launcher pytorch \
53
+ --checkpoint_dir ${CHECKPOINT_DIR} \
54
+ --resume checkpoints/things-gmflow/step_800000.pth \
55
+ --stage sintel \
56
+ --batch_size 8 \
57
+ --val_dataset sintel kitti \
58
+ --lr 2e-4 \
59
+ --image_size 320 896 \
60
+ --padding_factor 16 \
61
+ --upsample_factor 8 \
62
+ --with_speed_metric \
63
+ --val_freq 20000 \
64
+ --save_ckpt_freq 20000 \
65
+ --num_steps 200000 \
66
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
67
+
68
+ # kitti
69
+ CHECKPOINT_DIR=checkpoints/kitti-gmflow && \
70
+ mkdir -p ${CHECKPOINT_DIR} && \
71
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
72
+ --launcher pytorch \
73
+ --checkpoint_dir ${CHECKPOINT_DIR} \
74
+ --resume checkpoints/sintel-gmflow/step_200000.pth \
75
+ --stage kitti \
76
+ --batch_size 8 \
77
+ --val_dataset kitti \
78
+ --lr 2e-4 \
79
+ --image_size 320 1152 \
80
+ --padding_factor 16 \
81
+ --upsample_factor 8 \
82
+ --with_speed_metric \
83
+ --val_freq 10000 \
84
+ --save_ckpt_freq 10000 \
85
+ --num_steps 100000 \
86
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
87
+
88
+
89
+ # a final note: if your training is terminated unexpectedly, you can resume from the latest checkpoint
90
+ # an example: resume chairs training
91
+ # CHECKPOINT_DIR=checkpoints/chairs-gmflow && \
92
+ # mkdir -p ${CHECKPOINT_DIR} && \
93
+ # python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
94
+ # --launcher pytorch \
95
+ # --checkpoint_dir ${CHECKPOINT_DIR} \
96
+ # --resume checkpoints/chairs-gmflow/checkpoint_latest.pth \
97
+ # --batch_size 16 \
98
+ # --val_dataset chairs sintel kitti \
99
+ # --lr 4e-4 \
100
+ # --image_size 384 512 \
101
+ # --padding_factor 16 \
102
+ # --upsample_factor 8 \
103
+ # --with_speed_metric \
104
+ # --val_freq 10000 \
105
+ # --save_ckpt_freq 10000 \
106
+ # --num_steps 100000 \
107
+ # 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
108
+
basicsr/archs/gmflow/scripts/train_gmflow_with_refine.sh ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env bash
2
+
3
+ # GMFlow with refinement
4
+
5
+ # number of gpus for training, please set according to your hardware
6
+ # by default use all gpus on a machine
7
+ # can be trained on 4x 32G V100 or 4x 40GB A100 or 8x 16G V100 gpus
8
+ NUM_GPUS=4
9
+
10
+ # chairs
11
+ CHECKPOINT_DIR=checkpoints/chairs-gmflow_with_refine && \
12
+ mkdir -p ${CHECKPOINT_DIR} && \
13
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
14
+ --launcher pytorch \
15
+ --checkpoint_dir ${CHECKPOINT_DIR} \
16
+ --batch_size 16 \
17
+ --val_dataset chairs sintel kitti \
18
+ --lr 4e-4 \
19
+ --image_size 384 512 \
20
+ --padding_factor 32 \
21
+ --upsample_factor 4 \
22
+ --num_scales 2 \
23
+ --attn_splits_list 2 8 \
24
+ --corr_radius_list -1 4 \
25
+ --prop_radius_list -1 1 \
26
+ --with_speed_metric \
27
+ --val_freq 10000 \
28
+ --save_ckpt_freq 10000 \
29
+ --num_steps 100000 \
30
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
31
+
32
+ # things (our final model is trained for 800K iterations, for ablation study, you can train for 200K)
33
+ CHECKPOINT_DIR=checkpoints/things-gmflow_with_refine && \
34
+ mkdir -p ${CHECKPOINT_DIR} && \
35
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
36
+ --launcher pytorch \
37
+ --checkpoint_dir ${CHECKPOINT_DIR} \
38
+ --resume checkpoints/chairs-gmflow_with_refine/step_100000.pth \
39
+ --stage things \
40
+ --batch_size 8 \
41
+ --val_dataset things sintel kitti \
42
+ --lr 2e-4 \
43
+ --image_size 384 768 \
44
+ --padding_factor 32 \
45
+ --upsample_factor 4 \
46
+ --num_scales 2 \
47
+ --attn_splits_list 2 8 \
48
+ --corr_radius_list -1 4 \
49
+ --prop_radius_list -1 1 \
50
+ --with_speed_metric \
51
+ --val_freq 40000 \
52
+ --save_ckpt_freq 50000 \
53
+ --num_steps 800000 \
54
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
55
+
56
+ # sintel
57
+ CHECKPOINT_DIR=checkpoints/sintel-gmflow_with_refine && \
58
+ mkdir -p ${CHECKPOINT_DIR} && \
59
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
60
+ --launcher pytorch \
61
+ --checkpoint_dir ${CHECKPOINT_DIR} \
62
+ --resume checkpoints/things-gmflow_with_refine/step_800000.pth \
63
+ --stage sintel \
64
+ --batch_size 8 \
65
+ --val_dataset sintel kitti \
66
+ --lr 2e-4 \
67
+ --image_size 320 896 \
68
+ --padding_factor 32 \
69
+ --upsample_factor 4 \
70
+ --num_scales 2 \
71
+ --attn_splits_list 2 8 \
72
+ --corr_radius_list -1 4 \
73
+ --prop_radius_list -1 1 \
74
+ --with_speed_metric \
75
+ --val_freq 20000 \
76
+ --save_ckpt_freq 20000 \
77
+ --num_steps 200000 \
78
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
79
+
80
+ # kitti
81
+ CHECKPOINT_DIR=checkpoints/kitti-gmflow_with_refine && \
82
+ mkdir -p ${CHECKPOINT_DIR} && \
83
+ python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
84
+ --launcher pytorch \
85
+ --checkpoint_dir ${CHECKPOINT_DIR} \
86
+ --resume checkpoints/sintel-gmflow_with_refine/step_200000.pth \
87
+ --stage kitti \
88
+ --batch_size 8 \
89
+ --val_dataset kitti \
90
+ --lr 2e-4 \
91
+ --image_size 320 1152 \
92
+ --padding_factor 32 \
93
+ --upsample_factor 4 \
94
+ --num_scales 2 \
95
+ --attn_splits_list 2 8 \
96
+ --corr_radius_list -1 4 \
97
+ --prop_radius_list -1 1 \
98
+ --with_speed_metric \
99
+ --val_freq 10000 \
100
+ --save_ckpt_freq 10000 \
101
+ --num_steps 100000 \
102
+ 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
103
+
104
+
105
+
106
+ # a final note: if your training is terminated unexpectedly, you can resume from the latest checkpoint
107
+ # an example: resume chairs training
108
+ # CHECKPOINT_DIR=checkpoints/chairs-gmflow_with_refine && \
109
+ # mkdir -p ${CHECKPOINT_DIR} && \
110
+ # python -m torch.distributed.launch --nproc_per_node=${NUM_GPUS} --master_port=9989 main.py \
111
+ # --launcher pytorch \
112
+ # --checkpoint_dir ${CHECKPOINT_DIR} \
113
+ # --resume checkpoints/chairs-gmflow_with_refine/checkpoint_latest.pth \
114
+ # --batch_size 16 \
115
+ # --val_dataset chairs sintel kitti \
116
+ # --lr 4e-4 \
117
+ # --image_size 384 512 \
118
+ # --padding_factor 32 \
119
+ # --upsample_factor 4 \
120
+ # --num_scales 2 \
121
+ # --attn_splits_list 2 8 \
122
+ # --corr_radius_list -1 4 \
123
+ # --prop_radius_list -1 1 \
124
+ # --with_speed_metric \
125
+ # --val_freq 10000 \
126
+ # --save_ckpt_freq 10000 \
127
+ # --num_steps 100000 \
128
+ # 2>&1 | tee -a ${CHECKPOINT_DIR}/train.log
basicsr/archs/gmflow/utils/dist_utils.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) OpenMMLab. All rights reserved.
2
+ # https://github.com/open-mmlab/mmcv/blob/7540cf73ac7e5d1e14d0ffbd9b6759e83929ecfc/mmcv/runner/dist_utils.py
3
+
4
+ import os
5
+ import subprocess
6
+
7
+ import torch
8
+ import torch.multiprocessing as mp
9
+ from torch import distributed as dist
10
+
11
+
12
+ def init_dist(launcher, backend='nccl', **kwargs):
13
+ if mp.get_start_method(allow_none=True) is None:
14
+ mp.set_start_method('spawn')
15
+ if launcher == 'pytorch':
16
+ _init_dist_pytorch(backend, **kwargs)
17
+ elif launcher == 'mpi':
18
+ _init_dist_mpi(backend, **kwargs)
19
+ elif launcher == 'slurm':
20
+ _init_dist_slurm(backend, **kwargs)
21
+ else:
22
+ raise ValueError(f'Invalid launcher type: {launcher}')
23
+
24
+
25
+ def _init_dist_pytorch(backend, **kwargs):
26
+ # TODO: use local_rank instead of rank % num_gpus
27
+ rank = int(os.environ['RANK'])
28
+ num_gpus = torch.cuda.device_count()
29
+ torch.cuda.set_device(rank % num_gpus)
30
+ dist.init_process_group(backend=backend, **kwargs)
31
+
32
+
33
+ def _init_dist_mpi(backend, **kwargs):
34
+ rank = int(os.environ['OMPI_COMM_WORLD_RANK'])
35
+ num_gpus = torch.cuda.device_count()
36
+ torch.cuda.set_device(rank % num_gpus)
37
+ dist.init_process_group(backend=backend, **kwargs)
38
+
39
+
40
+ def _init_dist_slurm(backend, port=None):
41
+ """Initialize slurm distributed training environment.
42
+ If argument ``port`` is not specified, then the master port will be system
43
+ environment variable ``MASTER_PORT``. If ``MASTER_PORT`` is not in system
44
+ environment variable, then a default port ``29500`` will be used.
45
+ Args:
46
+ backend (str): Backend of torch.distributed.
47
+ port (int, optional): Master port. Defaults to None.
48
+ """
49
+ proc_id = int(os.environ['SLURM_PROCID'])
50
+ ntasks = int(os.environ['SLURM_NTASKS'])
51
+ node_list = os.environ['SLURM_NODELIST']
52
+ num_gpus = torch.cuda.device_count()
53
+ torch.cuda.set_device(proc_id % num_gpus)
54
+ addr = subprocess.getoutput(
55
+ f'scontrol show hostname {node_list} | head -n1')
56
+ # specify master port
57
+ if port is not None:
58
+ os.environ['MASTER_PORT'] = str(port)
59
+ elif 'MASTER_PORT' in os.environ:
60
+ pass # use MASTER_PORT in the environment variable
61
+ else:
62
+ # 29500 is torch.distributed default port
63
+ os.environ['MASTER_PORT'] = '29500'
64
+ # use MASTER_ADDR in the environment variable if it already exists
65
+ if 'MASTER_ADDR' not in os.environ:
66
+ os.environ['MASTER_ADDR'] = addr
67
+ os.environ['WORLD_SIZE'] = str(ntasks)
68
+ os.environ['LOCAL_RANK'] = str(proc_id % num_gpus)
69
+ os.environ['RANK'] = str(proc_id)
70
+ dist.init_process_group(backend=backend)
71
+
72
+
73
+ def get_dist_info():
74
+ if dist.is_available():
75
+ initialized = dist.is_initialized()
76
+ else:
77
+ initialized = False
78
+ if initialized:
79
+ rank = dist.get_rank()
80
+ world_size = dist.get_world_size()
81
+ else:
82
+ rank = 0
83
+ world_size = 1
84
+ return rank, world_size
85
+
86
+
87
+ def setup_for_distributed(is_master):
88
+ """
89
+ This function disables printing when not in master process
90
+ """
91
+ import builtins as __builtin__
92
+ builtin_print = __builtin__.print
93
+
94
+ def print(*args, **kwargs):
95
+ force = kwargs.pop('force', False)
96
+ if is_master or force:
97
+ builtin_print(*args, **kwargs)
98
+
99
+ __builtin__.print = print
basicsr/archs/gmflow/utils/flow_viz.py ADDED
@@ -0,0 +1,291 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # MIT License
2
+ #
3
+ # Copyright (c) 2018 Tom Runia
4
+ #
5
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ # of this software and associated documentation files (the "Software"), to deal
7
+ # in the Software without restriction, including without limitation the rights
8
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ # copies of the Software, and to permit persons to whom the Software is
10
+ # furnished to do so, subject to conditions.
11
+ #
12
+ # Author: Tom Runia
13
+ # Date Created: 2018-08-03
14
+
15
+ from __future__ import absolute_import
16
+ from __future__ import division
17
+ from __future__ import print_function
18
+
19
+ import numpy as np
20
+
21
+
22
+ def make_colorwheel():
23
+ '''
24
+ Generates a color wheel for optical flow visualization as presented in:
25
+ Baker et al. "A Database and Evaluation Methodology for Optical Flow" (ICCV, 2007)
26
+ URL: http://vision.middlebury.edu/flow/flowEval-iccv07.pdf
27
+ According to the C++ source code of Daniel Scharstein
28
+ According to the Matlab source code of Deqing Sun
29
+ '''
30
+
31
+ RY = 15
32
+ YG = 6
33
+ GC = 4
34
+ CB = 11
35
+ BM = 13
36
+ MR = 6
37
+
38
+ ncols = RY + YG + GC + CB + BM + MR
39
+ colorwheel = np.zeros((ncols, 3))
40
+ col = 0
41
+
42
+ # RY
43
+ colorwheel[0:RY, 0] = 255
44
+ colorwheel[0:RY, 1] = np.floor(255 * np.arange(0, RY) / RY)
45
+ col = col + RY
46
+ # YG
47
+ colorwheel[col:col + YG, 0] = 255 - np.floor(255 * np.arange(0, YG) / YG)
48
+ colorwheel[col:col + YG, 1] = 255
49
+ col = col + YG
50
+ # GC
51
+ colorwheel[col:col + GC, 1] = 255
52
+ colorwheel[col:col + GC, 2] = np.floor(255 * np.arange(0, GC) / GC)
53
+ col = col + GC
54
+ # CB
55
+ colorwheel[col:col + CB, 1] = 255 - np.floor(255 * np.arange(CB) / CB)
56
+ colorwheel[col:col + CB, 2] = 255
57
+ col = col + CB
58
+ # BM
59
+ colorwheel[col:col + BM, 2] = 255
60
+ colorwheel[col:col + BM, 0] = np.floor(255 * np.arange(0, BM) / BM)
61
+ col = col + BM
62
+ # MR
63
+ colorwheel[col:col + MR, 2] = 255 - np.floor(255 * np.arange(MR) / MR)
64
+ colorwheel[col:col + MR, 0] = 255
65
+ return colorwheel
66
+
67
+
68
+ def flow_compute_color(u, v, convert_to_bgr=False):
69
+ '''
70
+ Applies the flow color wheel to (possibly clipped) flow components u and v.
71
+ According to the C++ source code of Daniel Scharstein
72
+ According to the Matlab source code of Deqing Sun
73
+ :param u: np.ndarray, input horizontal flow
74
+ :param v: np.ndarray, input vertical flow
75
+ :param convert_to_bgr: bool, whether to change ordering and output BGR instead of RGB
76
+ :return:
77
+ '''
78
+
79
+ flow_image = np.zeros((u.shape[0], u.shape[1], 3), np.uint8)
80
+
81
+ colorwheel = make_colorwheel() # shape [55x3]
82
+ ncols = colorwheel.shape[0]
83
+
84
+ rad = np.sqrt(np.square(u) + np.square(v))
85
+ a = np.arctan2(-v, -u) / np.pi
86
+
87
+ fk = (a + 1) / 2 * (ncols - 1) + 1
88
+ k0 = np.floor(fk).astype(np.int32)
89
+ k1 = k0 + 1
90
+ k1[k1 == ncols] = 1
91
+ f = fk - k0
92
+
93
+ for i in range(colorwheel.shape[1]):
94
+ tmp = colorwheel[:, i]
95
+ col0 = tmp[k0] / 255.0
96
+ col1 = tmp[k1] / 255.0
97
+ col = (1 - f) * col0 + f * col1
98
+
99
+ idx = (rad <= 1)
100
+ col[idx] = 1 - rad[idx] * (1 - col[idx])
101
+ col[~idx] = col[~idx] * 0.75 # out of range?
102
+
103
+ # Note the 2-i => BGR instead of RGB
104
+ ch_idx = 2 - i if convert_to_bgr else i
105
+ flow_image[:, :, ch_idx] = np.floor(255 * col)
106
+
107
+ return flow_image
108
+
109
+
110
+ def flow_to_color(flow_uv, clip_flow=None, convert_to_bgr=False):
111
+ '''
112
+ Expects a two dimensional flow image of shape [H,W,2]
113
+ According to the C++ source code of Daniel Scharstein
114
+ According to the Matlab source code of Deqing Sun
115
+ :param flow_uv: np.ndarray of shape [H,W,2]
116
+ :param clip_flow: float, maximum clipping value for flow
117
+ :return:
118
+ '''
119
+
120
+ assert flow_uv.ndim == 3, 'input flow must have three dimensions'
121
+ assert flow_uv.shape[2] == 2, 'input flow must have shape [H,W,2]'
122
+
123
+ if clip_flow is not None:
124
+ flow_uv = np.clip(flow_uv, 0, clip_flow)
125
+
126
+ u = flow_uv[:, :, 0]
127
+ v = flow_uv[:, :, 1]
128
+
129
+ rad = np.sqrt(np.square(u) + np.square(v))
130
+ rad_max = np.max(rad)
131
+
132
+ epsilon = 1e-5
133
+ u = u / (rad_max + epsilon)
134
+ v = v / (rad_max + epsilon)
135
+
136
+ return flow_compute_color(u, v, convert_to_bgr)
137
+
138
+
139
+ UNKNOWN_FLOW_THRESH = 1e7
140
+ SMALLFLOW = 0.0
141
+ LARGEFLOW = 1e8
142
+
143
+
144
+ def make_color_wheel():
145
+ """
146
+ Generate color wheel according Middlebury color code
147
+ :return: Color wheel
148
+ """
149
+ RY = 15
150
+ YG = 6
151
+ GC = 4
152
+ CB = 11
153
+ BM = 13
154
+ MR = 6
155
+
156
+ ncols = RY + YG + GC + CB + BM + MR
157
+
158
+ colorwheel = np.zeros([ncols, 3])
159
+
160
+ col = 0
161
+
162
+ # RY
163
+ colorwheel[0:RY, 0] = 255
164
+ colorwheel[0:RY, 1] = np.transpose(np.floor(255 * np.arange(0, RY) / RY))
165
+ col += RY
166
+
167
+ # YG
168
+ colorwheel[col:col + YG, 0] = 255 - np.transpose(np.floor(255 * np.arange(0, YG) / YG))
169
+ colorwheel[col:col + YG, 1] = 255
170
+ col += YG
171
+
172
+ # GC
173
+ colorwheel[col:col + GC, 1] = 255
174
+ colorwheel[col:col + GC, 2] = np.transpose(np.floor(255 * np.arange(0, GC) / GC))
175
+ col += GC
176
+
177
+ # CB
178
+ colorwheel[col:col + CB, 1] = 255 - np.transpose(np.floor(255 * np.arange(0, CB) / CB))
179
+ colorwheel[col:col + CB, 2] = 255
180
+ col += CB
181
+
182
+ # BM
183
+ colorwheel[col:col + BM, 2] = 255
184
+ colorwheel[col:col + BM, 0] = np.transpose(np.floor(255 * np.arange(0, BM) / BM))
185
+ col += + BM
186
+
187
+ # MR
188
+ colorwheel[col:col + MR, 2] = 255 - np.transpose(np.floor(255 * np.arange(0, MR) / MR))
189
+ colorwheel[col:col + MR, 0] = 255
190
+
191
+ return colorwheel
192
+
193
+
194
+ def compute_color(u, v):
195
+ """
196
+ compute optical flow color map
197
+ :param u: optical flow horizontal map
198
+ :param v: optical flow vertical map
199
+ :return: optical flow in color code
200
+ """
201
+ [h, w] = u.shape
202
+ img = np.zeros([h, w, 3])
203
+ nanIdx = np.isnan(u) | np.isnan(v)
204
+ u[nanIdx] = 0
205
+ v[nanIdx] = 0
206
+
207
+ colorwheel = make_color_wheel()
208
+ ncols = np.size(colorwheel, 0)
209
+
210
+ rad = np.sqrt(u ** 2 + v ** 2)
211
+
212
+ a = np.arctan2(-v, -u) / np.pi
213
+
214
+ fk = (a + 1) / 2 * (ncols - 1) + 1
215
+
216
+ k0 = np.floor(fk).astype(int)
217
+
218
+ k1 = k0 + 1
219
+ k1[k1 == ncols + 1] = 1
220
+ f = fk - k0
221
+
222
+ for i in range(0, np.size(colorwheel, 1)):
223
+ tmp = colorwheel[:, i]
224
+ col0 = tmp[k0 - 1] / 255
225
+ col1 = tmp[k1 - 1] / 255
226
+ col = (1 - f) * col0 + f * col1
227
+
228
+ idx = rad <= 1
229
+ col[idx] = 1 - rad[idx] * (1 - col[idx])
230
+ notidx = np.logical_not(idx)
231
+
232
+ col[notidx] *= 0.75
233
+ img[:, :, i] = np.uint8(np.floor(255 * col * (1 - nanIdx)))
234
+
235
+ return img
236
+
237
+
238
+ # from https://github.com/gengshan-y/VCN
239
+ def flow_to_image(flow):
240
+ """
241
+ Convert flow into middlebury color code image
242
+ :param flow: optical flow map
243
+ :return: optical flow image in middlebury color
244
+ """
245
+ u = flow[:, :, 0]
246
+ v = flow[:, :, 1]
247
+
248
+ maxu = -999.
249
+ maxv = -999.
250
+ minu = 999.
251
+ minv = 999.
252
+
253
+ idxUnknow = (abs(u) > UNKNOWN_FLOW_THRESH) | (abs(v) > UNKNOWN_FLOW_THRESH)
254
+ u[idxUnknow] = 0
255
+ v[idxUnknow] = 0
256
+
257
+ maxu = max(maxu, np.max(u))
258
+ minu = min(minu, np.min(u))
259
+
260
+ maxv = max(maxv, np.max(v))
261
+ minv = min(minv, np.min(v))
262
+
263
+ rad = np.sqrt(u ** 2 + v ** 2)
264
+ maxrad = max(-1, np.max(rad))
265
+
266
+ u = u / (maxrad + np.finfo(float).eps)
267
+ v = v / (maxrad + np.finfo(float).eps)
268
+
269
+ img = compute_color(u, v)
270
+
271
+ idx = np.repeat(idxUnknow[:, :, np.newaxis], 3, axis=2)
272
+ img[idx] = 0
273
+
274
+ return np.uint8(img)
275
+
276
+
277
+ def save_vis_flow_tofile(flow, output_path):
278
+ vis_flow = flow_to_image(flow)
279
+ from PIL import Image
280
+ img = Image.fromarray(vis_flow)
281
+ img.save(output_path)
282
+
283
+
284
+ def flow_tensor_to_image(flow):
285
+ """Used for tensorboard visualization"""
286
+ flow = flow.permute(1, 2, 0) # [H, W, 2]
287
+ flow = flow.detach().cpu().numpy()
288
+ flow = flow_to_image(flow) # [H, W, 3]
289
+ flow = np.transpose(flow, (2, 0, 1)) # [3, H, W]
290
+
291
+ return flow
basicsr/archs/gmflow/utils/frame_utils.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ from PIL import Image
3
+ from os.path import *
4
+ import re
5
+ import cv2
6
+
7
+ TAG_CHAR = np.array([202021.25], np.float32)
8
+
9
+
10
+ def readFlow(fn):
11
+ """ Read .flo file in Middlebury format"""
12
+ # Code adapted from:
13
+ # http://stackoverflow.com/questions/28013200/reading-middlebury-flow-files-with-python-bytes-array-numpy
14
+
15
+ # WARNING: this will work on little-endian architectures (eg Intel x86) only!
16
+ # print 'fn = %s'%(fn)
17
+ with open(fn, 'rb') as f:
18
+ magic = np.fromfile(f, np.float32, count=1)
19
+ if 202021.25 != magic:
20
+ print('Magic number incorrect. Invalid .flo file')
21
+ return None
22
+ else:
23
+ w = np.fromfile(f, np.int32, count=1)
24
+ h = np.fromfile(f, np.int32, count=1)
25
+ # print 'Reading %d x %d flo file\n' % (w, h)
26
+ data = np.fromfile(f, np.float32, count=2 * int(w) * int(h))
27
+ # Reshape testdata into 3D array (columns, rows, bands)
28
+ # The reshape here is for visualization, the original code is (w,h,2)
29
+ return np.resize(data, (int(h), int(w), 2))
30
+
31
+
32
+ def readPFM(file):
33
+ file = open(file, 'rb')
34
+
35
+ color = None
36
+ width = None
37
+ height = None
38
+ scale = None
39
+ endian = None
40
+
41
+ header = file.readline().rstrip()
42
+ if header == b'PF':
43
+ color = True
44
+ elif header == b'Pf':
45
+ color = False
46
+ else:
47
+ raise Exception('Not a PFM file.')
48
+
49
+ dim_match = re.match(rb'^(\d+)\s(\d+)\s$', file.readline())
50
+ if dim_match:
51
+ width, height = map(int, dim_match.groups())
52
+ else:
53
+ raise Exception('Malformed PFM header.')
54
+
55
+ scale = float(file.readline().rstrip())
56
+ if scale < 0: # little-endian
57
+ endian = '<'
58
+ scale = -scale
59
+ else:
60
+ endian = '>' # big-endian
61
+
62
+ data = np.fromfile(file, endian + 'f')
63
+ shape = (height, width, 3) if color else (height, width)
64
+
65
+ data = np.reshape(data, shape)
66
+ data = np.flipud(data)
67
+ return data
68
+
69
+
70
+ def writeFlow(filename, uv, v=None):
71
+ """ Write optical flow to file.
72
+
73
+ If v is None, uv is assumed to contain both u and v channels,
74
+ stacked in depth.
75
+ Original code by Deqing Sun, adapted from Daniel Scharstein.
76
+ """
77
+ nBands = 2
78
+
79
+ if v is None:
80
+ assert (uv.ndim == 3)
81
+ assert (uv.shape[2] == 2)
82
+ u = uv[:, :, 0]
83
+ v = uv[:, :, 1]
84
+ else:
85
+ u = uv
86
+
87
+ assert (u.shape == v.shape)
88
+ height, width = u.shape
89
+ f = open(filename, 'wb')
90
+ # write the header
91
+ f.write(TAG_CHAR)
92
+ np.array(width).astype(np.int32).tofile(f)
93
+ np.array(height).astype(np.int32).tofile(f)
94
+ # arrange into matrix form
95
+ tmp = np.zeros((height, width * nBands))
96
+ tmp[:, np.arange(width) * 2] = u
97
+ tmp[:, np.arange(width) * 2 + 1] = v
98
+ tmp.astype(np.float32).tofile(f)
99
+ f.close()
100
+
101
+
102
+ def readFlowKITTI(filename):
103
+ flow = cv2.imread(filename, cv2.IMREAD_ANYDEPTH | cv2.IMREAD_COLOR)
104
+ flow = flow[:, :, ::-1].astype(np.float32)
105
+ flow, valid = flow[:, :, :2], flow[:, :, 2]
106
+ flow = (flow - 2 ** 15) / 64.0
107
+ return flow, valid
108
+
109
+
110
+ def writeFlowKITTI(filename, uv):
111
+ uv = 64.0 * uv + 2 ** 15
112
+ valid = np.ones([uv.shape[0], uv.shape[1], 1])
113
+ uv = np.concatenate([uv, valid], axis=-1).astype(np.uint16)
114
+ cv2.imwrite(filename, uv[..., ::-1])
115
+
116
+
117
+ def read_gen(file_name, pil=False):
118
+ ext = splitext(file_name)[-1]
119
+ if ext == '.png' or ext == '.jpeg' or ext == '.ppm' or ext == '.jpg':
120
+ return Image.open(file_name)
121
+ elif ext == '.bin' or ext == '.raw':
122
+ return np.load(file_name)
123
+ elif ext == '.flo':
124
+ return readFlow(file_name).astype(np.float32)
125
+ elif ext == '.pfm':
126
+ flow = readPFM(file_name).astype(np.float32)
127
+ if len(flow.shape) == 2:
128
+ return flow
129
+ else:
130
+ return flow[:, :, :-1]
131
+ return []
basicsr/archs/gmflow/utils/logger.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ from utils.flow_viz import flow_tensor_to_image
4
+
5
+
6
+ class Logger:
7
+ def __init__(self, lr_scheduler,
8
+ summary_writer,
9
+ summary_freq=100,
10
+ start_step=0,
11
+ ):
12
+ self.lr_scheduler = lr_scheduler
13
+ self.total_steps = start_step
14
+ self.running_loss = {}
15
+ self.summary_writer = summary_writer
16
+ self.summary_freq = summary_freq
17
+
18
+ def print_training_status(self, mode='train'):
19
+
20
+ print('step: %06d \t epe: %.3f' % (self.total_steps, self.running_loss['epe'] / self.summary_freq))
21
+
22
+ for k in self.running_loss:
23
+ self.summary_writer.add_scalar(mode + '/' + k,
24
+ self.running_loss[k] / self.summary_freq, self.total_steps)
25
+ self.running_loss[k] = 0.0
26
+
27
+ def lr_summary(self):
28
+ lr = self.lr_scheduler.get_last_lr()[0]
29
+ self.summary_writer.add_scalar('lr', lr, self.total_steps)
30
+
31
+ def add_image_summary(self, img1, img2, flow_preds, flow_gt, mode='train',
32
+ ):
33
+ if self.total_steps % self.summary_freq == 0:
34
+ img_concat = torch.cat((img1[0].detach().cpu(), img2[0].detach().cpu()), dim=-1)
35
+ img_concat = img_concat.type(torch.uint8) # convert to uint8 to visualize in tensorboard
36
+
37
+ flow_pred = flow_tensor_to_image(flow_preds[-1][0])
38
+ forward_flow_gt = flow_tensor_to_image(flow_gt[0])
39
+ flow_concat = torch.cat((torch.from_numpy(flow_pred),
40
+ torch.from_numpy(forward_flow_gt)), dim=-1)
41
+
42
+ concat = torch.cat((img_concat, flow_concat), dim=-2)
43
+
44
+ self.summary_writer.add_image(mode + '/img_pred_gt', concat, self.total_steps)
45
+
46
+ def push(self, metrics, mode='train'):
47
+ self.total_steps += 1
48
+
49
+ self.lr_summary()
50
+
51
+ for key in metrics:
52
+ if key not in self.running_loss:
53
+ self.running_loss[key] = 0.0
54
+
55
+ self.running_loss[key] += metrics[key]
56
+
57
+ if self.total_steps % self.summary_freq == 0:
58
+ self.print_training_status(mode)
59
+ self.running_loss = {}
60
+
61
+ def write_dict(self, results):
62
+ for key in results:
63
+ tag = key.split('_')[0]
64
+ tag = tag + '/' + key
65
+ self.summary_writer.add_scalar(tag, results[key], self.total_steps)
66
+
67
+ def close(self):
68
+ self.summary_writer.close()
basicsr/archs/gmflow/utils/misc.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import numpy as np
3
+ import sys
4
+ import json
5
+
6
+
7
+ def read_text_lines(filepath):
8
+ with open(filepath, 'r') as f:
9
+ lines = f.readlines()
10
+ lines = [l.rstrip() for l in lines]
11
+ return lines
12
+
13
+
14
+ def check_path(path):
15
+ if not os.path.exists(path):
16
+ os.makedirs(path, exist_ok=True) # explicitly set exist_ok when multi-processing
17
+
18
+
19
+ def save_command(save_path, filename='command_train.txt'):
20
+ check_path(save_path)
21
+ command = sys.argv
22
+ save_file = os.path.join(save_path, filename)
23
+ # Save all training commands when resuming training
24
+ with open(save_file, 'a') as f:
25
+ f.write(' '.join(command))
26
+ f.write('\n\n')
27
+
28
+
29
+ def save_args(args, filename='args.json'):
30
+ args_dict = vars(args)
31
+ check_path(args.checkpoint_dir)
32
+ save_path = os.path.join(args.checkpoint_dir, filename)
33
+
34
+ # Save all training args when resuming training
35
+ with open(save_path, 'a') as f:
36
+ json.dump(args_dict, f, indent=4, sort_keys=False)
37
+ f.write('\n\n')
38
+
39
+
40
+ def int_list(s):
41
+ """Convert string to int list"""
42
+ return [int(x) for x in s.split(',')]
basicsr/archs/gmflow/utils/utils.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn.functional as F
3
+
4
+
5
+ class InputPadder:
6
+ """ Pads images such that dimensions are divisible by 8 """
7
+
8
+ def __init__(self, dims, mode='sintel', padding_factor=8):
9
+ self.ht, self.wd = dims[-2:]
10
+ pad_ht = (((self.ht // padding_factor) + 1) * padding_factor - self.ht) % padding_factor
11
+ pad_wd = (((self.wd // padding_factor) + 1) * padding_factor - self.wd) % padding_factor
12
+ if mode == 'sintel':
13
+ self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, pad_ht // 2, pad_ht - pad_ht // 2]
14
+ else:
15
+ self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, 0, pad_ht]
16
+
17
+ def pad(self, *inputs):
18
+ return [F.pad(x, self._pad, mode='replicate') for x in inputs]
19
+
20
+ def unpad(self, x):
21
+ ht, wd = x.shape[-2:]
22
+ c = [self._pad[2], ht - self._pad[3], self._pad[0], wd - self._pad[1]]
23
+ return x[..., c[0]:c[1], c[2]:c[3]]
24
+
25
+
26
+ def coords_grid(batch, ht, wd, normalize=False):
27
+ if normalize: # [-1, 1]
28
+ coords = torch.meshgrid(2 * torch.arange(ht) / (ht - 1) - 1,
29
+ 2 * torch.arange(wd) / (wd - 1) - 1)
30
+ else:
31
+ coords = torch.meshgrid(torch.arange(ht), torch.arange(wd))
32
+ coords = torch.stack(coords[::-1], dim=0).float()
33
+ return coords[None].repeat(batch, 1, 1, 1) # [B, 2, H, W]
34
+
35
+
36
+ def compute_out_of_boundary_mask(flow):
37
+ # flow: [B, 2, H, W]
38
+ assert flow.dim() == 4 and flow.size(1) == 2
39
+ b, _, h, w = flow.shape
40
+ init_coords = coords_grid(b, h, w).to(flow.device)
41
+ corres = init_coords + flow # [B, 2, H, W]
42
+
43
+ max_w = w - 1
44
+ max_h = h - 1
45
+
46
+ valid_mask = (corres[:, 0] >= 0) & (corres[:, 0] <= max_w) & (corres[:, 1] >= 0) & (corres[:, 1] <= max_h)
47
+
48
+ # in case very large flow
49
+ flow_mask = (flow[:, 0].abs() <= max_w) & (flow[:, 1].abs() <= max_h)
50
+
51
+ valid_mask = valid_mask & flow_mask
52
+
53
+ return valid_mask # [B, H, W]
54
+
55
+
56
+ def count_parameters(model):
57
+ num = sum(p.numel() for p in model.parameters() if p.requires_grad)
58
+ return num
basicsr/archs/gmflow_arch.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import torch
3
+ import torch.nn as nn
4
+ import torch.nn.functional as F
5
+ import pdb
6
+
7
+ from basicsr.archs.gmflow.gmflow.gmflow import GMFlow
8
+
9
+
10
+ class FlowGenerator(nn.Module):
11
+ """GM flow generation.
12
+
13
+ Args:
14
+ path (str): Pre-trained path. Default: None.
15
+ requires_grad (bool): If true, the parameters of VGG network will be
16
+ optimized. Default: False.
17
+ """
18
+
19
+ def __init__(self,
20
+ path=None,
21
+ requires_grad=False,):
22
+ super().__init__()
23
+
24
+ self.model = GMFlow()
25
+
26
+ if path != None:
27
+ weights = torch.load(
28
+ path, map_location=lambda storage, loc: storage)['model']
29
+ self.model.load_state_dict(weights, strict=True)
30
+
31
+ if not requires_grad:
32
+ self.model.eval()
33
+ for param in self.parameters():
34
+ param.requires_grad = False
35
+ else:
36
+ self.model.train()
37
+ for param in self.parameters():
38
+ param.requires_grad = True
39
+
40
+ def forward(self, im1, im2,
41
+ attn_splits_list=[2],
42
+ corr_radius_list=[-1],
43
+ prop_radius_list=[-1]):
44
+ """Forward function.
45
+
46
+ Args:
47
+ im1 (Tensor): Input tensor with shape (n, c, h, w).
48
+ im2 (Tensor): Input tensor with shape (n, c, h, w).
49
+
50
+ Returns:
51
+ Tensor: Forward results.
52
+ """
53
+ assert im1.shape == im2.shape
54
+ N, C, H, W = im1.shape
55
+
56
+ im1 = (im1 + 1) / 2 * 255
57
+ im2 = (im2 + 1) / 2 * 255
58
+
59
+ flow = self.model(im1, im2,
60
+ attn_splits_list=attn_splits_list,
61
+ corr_radius_list=corr_radius_list,
62
+ prop_radius_list=prop_radius_list,
63
+ pred_bidir_flow=False)['flow_preds'][-1]
64
+ # backward_flow = flow[N:]
65
+
66
+ return flow
67
+
68
+
69
+ if __name__ == '__main__':
70
+ h, w = 512, 512
71
+ # model = RAFT().cuda()
72
+ model = FlowGenerator(
73
+ load_path='../../weights/GMFlow/gmflow_sintel-0c07dcb3.pth').cuda()
74
+ model.eval()
75
+ print(model)
76
+
77
+ x = torch.randn((1, 3, h, w)).cuda()
78
+ y = torch.randn((1, 3, h, w)).cuda()
79
+ with torch.no_grad():
80
+ out = model(x, y)
81
+ pdb.set_trace()
82
+ print(out.shape)
basicsr/archs/keep_arch.py ADDED
@@ -0,0 +1,936 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ from re import T
3
+ import numpy as np
4
+ import pdb
5
+ import torch
6
+ from torch import nn, Tensor
7
+ import torch.nn.functional as F
8
+ from typing import Optional, List
9
+ from torch.profiler import profile, record_function, ProfilerActivity
10
+ from collections import defaultdict
11
+
12
+ # from gpu_mem_track import MemTracker
13
+ from einops import rearrange, repeat
14
+
15
+ from basicsr.archs.vqgan_arch import Encoder, VectorQuantizer, GumbelQuantizer, Generator, ResBlock
16
+ from basicsr.archs.arch_util import flow_warp, resize_flow
17
+ from basicsr.archs.gmflow_arch import FlowGenerator
18
+ from basicsr.utils import get_root_logger
19
+ from basicsr.utils.registry import ARCH_REGISTRY
20
+
21
+ from diffusers.models.attention import CrossAttention, FeedForward, AdaLayerNorm
22
+
23
+ # gpu_tracker = MemTracker()
24
+
25
+
26
+ def calc_mean_std(feat, eps=1e-5):
27
+ """Calculate mean and std for adaptive_instance_normalization.
28
+
29
+ Args:
30
+ feat (Tensor): 4D tensor.
31
+ eps (float): A small value added to the variance to avoid
32
+ divide-by-zero. Default: 1e-5.
33
+ """
34
+ size = feat.size()
35
+ assert len(size) == 4, 'The input feature should be 4D tensor.'
36
+ b, c = size[:2]
37
+ feat_var = feat.view(b, c, -1).var(dim=2) + eps
38
+ feat_std = feat_var.sqrt().view(b, c, 1, 1)
39
+ feat_mean = feat.view(b, c, -1).mean(dim=2).view(b, c, 1, 1)
40
+ return feat_mean, feat_std
41
+
42
+
43
+ def adaptive_instance_normalization(content_feat, style_feat):
44
+ """Adaptive instance normalization.
45
+
46
+ Adjust the reference features to have the similar color and illuminations
47
+ as those in the degradate features.
48
+
49
+ Args:
50
+ content_feat (Tensor): The reference feature.
51
+ style_feat (Tensor): The degradate features.
52
+ """
53
+ size = content_feat.size()
54
+ style_mean, style_std = calc_mean_std(style_feat)
55
+ content_mean, content_std = calc_mean_std(content_feat)
56
+ normalized_feat = (content_feat - content_mean.expand(size)
57
+ ) / content_std.expand(size)
58
+ return normalized_feat * style_std.expand(size) + style_mean.expand(size)
59
+
60
+
61
+ class PositionEmbeddingSine(nn.Module):
62
+ """
63
+ This is a more standard version of the position embedding, very similar to the one
64
+ used by the Attention is all you need paper, generalized to work on images.
65
+ """
66
+
67
+ def __init__(self, num_pos_feats=64, temperature=10000, normalize=False, scale=None):
68
+ super().__init__()
69
+ self.num_pos_feats = num_pos_feats
70
+ self.temperature = temperature
71
+ self.normalize = normalize
72
+ if scale is not None and normalize is False:
73
+ raise ValueError("normalize should be True if scale is passed")
74
+ if scale is None:
75
+ scale = 2 * math.pi
76
+ self.scale = scale
77
+
78
+ def forward(self, x, mask=None):
79
+ if mask is None:
80
+ mask = torch.zeros((x.size(0), x.size(2), x.size(3)),
81
+ device=x.device, dtype=torch.bool)
82
+ not_mask = ~mask
83
+ y_embed = not_mask.cumsum(1, dtype=torch.float32)
84
+ x_embed = not_mask.cumsum(2, dtype=torch.float32)
85
+ if self.normalize:
86
+ eps = 1e-6
87
+ y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
88
+ x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
89
+
90
+ dim_t = torch.arange(self.num_pos_feats,
91
+ dtype=torch.float32, device=x.device)
92
+ dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats)
93
+
94
+ pos_x = x_embed[:, :, :, None] / dim_t
95
+ pos_y = y_embed[:, :, :, None] / dim_t
96
+ pos_x = torch.stack(
97
+ (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4
98
+ ).flatten(3)
99
+ pos_y = torch.stack(
100
+ (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4
101
+ ).flatten(3)
102
+ pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2)
103
+ return pos
104
+
105
+
106
+ def _get_activation_fn(activation):
107
+ """Return an activation function given a string"""
108
+ if activation == "relu":
109
+ return F.relu
110
+ if activation == "gelu":
111
+ return F.gelu
112
+ if activation == "glu":
113
+ return F.glu
114
+ raise RuntimeError(F"activation should be relu/gelu, not {activation}.")
115
+
116
+
117
+ class TransformerSALayer(nn.Module):
118
+ def __init__(self, embed_dim, nhead=8, dim_mlp=2048, dropout=0.0, activation="gelu"):
119
+ super().__init__()
120
+ self.self_attn = nn.MultiheadAttention(
121
+ embed_dim, nhead, dropout=dropout)
122
+ # Implementation of Feedforward model - MLP
123
+ self.linear1 = nn.Linear(embed_dim, dim_mlp)
124
+ self.dropout = nn.Dropout(dropout)
125
+ self.linear2 = nn.Linear(dim_mlp, embed_dim)
126
+
127
+ self.norm1 = nn.LayerNorm(embed_dim)
128
+ self.norm2 = nn.LayerNorm(embed_dim)
129
+ self.dropout1 = nn.Dropout(dropout)
130
+ self.dropout2 = nn.Dropout(dropout)
131
+
132
+ self.activation = _get_activation_fn(activation)
133
+
134
+ # self.apply(self._init_weights)
135
+
136
+ def _init_weights(self, module):
137
+ if isinstance(module, nn.MultiheadAttention):
138
+ nn.init.xavier_uniform_(module.in_proj_weight)
139
+ nn.init.xavier_uniform_(module.out_proj.weight)
140
+ if module.in_proj_bias is not None:
141
+ nn.init.constant_(module.in_proj_bias, 0.)
142
+ nn.init.constant_(module.out_proj.bias, 0.)
143
+ elif isinstance(module, nn.Linear):
144
+ module.weight.data.normal_(mean=0.0, std=0.02)
145
+ if module.bias is not None:
146
+ module.bias.data.zero_()
147
+ elif isinstance(module, nn.LayerNorm):
148
+ module.bias.data.zero_()
149
+ module.weight.data.fill_(1.0)
150
+
151
+
152
+ def with_pos_embed(self, tensor, pos: Optional[Tensor]):
153
+ return tensor if pos is None else tensor + pos
154
+
155
+ def forward(self, tgt,
156
+ tgt_mask: Optional[Tensor] = None,
157
+ tgt_key_padding_mask: Optional[Tensor] = None,
158
+ query_pos: Optional[Tensor] = None):
159
+
160
+ # self attention
161
+ tgt2 = self.norm1(tgt)
162
+ q = k = self.with_pos_embed(tgt2, query_pos)
163
+ tgt2 = self.self_attn(q, k, value=tgt2, attn_mask=tgt_mask,
164
+ key_padding_mask=tgt_key_padding_mask)[0]
165
+ tgt = tgt + self.dropout1(tgt2)
166
+
167
+ # ffn
168
+ tgt2 = self.norm2(tgt)
169
+ tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt2))))
170
+ tgt = tgt + self.dropout2(tgt2)
171
+ return tgt
172
+
173
+
174
+ class Fuse_sft_block(nn.Module):
175
+ def __init__(self, in_ch, out_ch):
176
+ super().__init__()
177
+ self.encode_enc = ResBlock(2*in_ch, out_ch)
178
+
179
+ self.scale = nn.Sequential(
180
+ nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1),
181
+ nn.LeakyReLU(0.2, True),
182
+ nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1))
183
+
184
+ self.shift = nn.Sequential(
185
+ nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1),
186
+ nn.LeakyReLU(0.2, True),
187
+ nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1))
188
+
189
+ self.apply(self._init_weights)
190
+
191
+ def _init_weights(self, module):
192
+ if isinstance(module, nn.Conv2d):
193
+ module.weight.data.zero_()
194
+ if module.bias is not None:
195
+ module.bias.data.zero_()
196
+
197
+ def forward(self, enc_feat, dec_feat, w=1):
198
+ # print(enc_feat.shape, dec_feat.shape)
199
+ enc_feat = self.encode_enc(torch.cat([enc_feat, dec_feat], dim=1))
200
+ scale = self.scale(enc_feat)
201
+ shift = self.shift(enc_feat)
202
+ residual = w * (dec_feat * scale + shift)
203
+ out = dec_feat + residual
204
+ return out
205
+
206
+
207
+ class CrossFrameFusionLayer(nn.Module):
208
+ def __init__(
209
+ self,
210
+ dim: int,
211
+ num_attention_heads: int,
212
+ attention_head_dim: int,
213
+ dropout=0.0,
214
+ cross_attention_dim: Optional[int] = None,
215
+ activation_fn: str = "geglu",
216
+ num_embeds_ada_norm: Optional[int] = None,
217
+ attention_bias: bool = False,
218
+ upcast_attention: bool = False,
219
+ ):
220
+ super().__init__()
221
+ self.use_ada_layer_norm = num_embeds_ada_norm is not None
222
+
223
+ self.norm1 = nn.LayerNorm(dim)
224
+ self.norm2 = nn.LayerNorm(dim)
225
+
226
+ # Feed-forward
227
+ self.ff = FeedForward(dim, dropout=dropout,
228
+ activation_fn=activation_fn)
229
+
230
+ # Cross Frame Attention
231
+ self.attn = CrossAttention(
232
+ query_dim=dim,
233
+ heads=num_attention_heads,
234
+ dim_head=attention_head_dim,
235
+ dropout=dropout,
236
+ bias=attention_bias,
237
+ upcast_attention=upcast_attention,
238
+ )
239
+ nn.init.zeros_(self.attn.to_out[0].weight.data)
240
+ self.apply(self._init_weights)
241
+
242
+ def _init_weights(self, module):
243
+ if isinstance(module, nn.Linear):
244
+ module.weight.data.zero_()
245
+ if module.bias is not None:
246
+ module.bias.data.zero_()
247
+ elif isinstance(module, nn.LayerNorm):
248
+ module.weight.data.fill_(1.0)
249
+ module.bias.data.zero_()
250
+
251
+ def forward(self, curr_states, prev_states, residual=True):
252
+ B, C, H, W = curr_states.shape
253
+ curr_states = rearrange(curr_states, "b c h w -> b (h w) c")
254
+ prev_states = rearrange(prev_states, "b c h w -> b (h w) c")
255
+
256
+ if residual:
257
+ res = curr_states
258
+
259
+ curr_states = self.attn(curr_states, prev_states)
260
+ curr_states = self.norm1(curr_states)
261
+
262
+ if residual:
263
+ curr_states = curr_states + res
264
+ res = curr_states
265
+
266
+ curr_states = self.ff(curr_states)
267
+ curr_states = self.norm2(curr_states)
268
+
269
+ if residual:
270
+ curr_states = curr_states + res
271
+
272
+ curr_states = rearrange(curr_states, "b (h w) c -> b c h w", h=H)
273
+ return curr_states
274
+
275
+
276
+ class BasicTransformerBlock(nn.Module):
277
+ def __init__(
278
+ self,
279
+ dim: int,
280
+ num_attention_heads: int,
281
+ attention_head_dim: int,
282
+ dropout=0.0,
283
+ cross_attention_dim: Optional[int] = None,
284
+ activation_fn: str = "geglu",
285
+ num_embeds_ada_norm: Optional[int] = None,
286
+ attention_bias: bool = False,
287
+ only_cross_attention: bool = False,
288
+ upcast_attention: bool = False,
289
+ ):
290
+ super().__init__()
291
+ self.only_cross_attention = only_cross_attention
292
+ self.use_ada_layer_norm = num_embeds_ada_norm is not None
293
+
294
+ # SC-Attn
295
+ self.attn1 = SparseCausalAttention(
296
+ query_dim=dim,
297
+ heads=num_attention_heads,
298
+ dim_head=attention_head_dim,
299
+ dropout=dropout,
300
+ bias=attention_bias,
301
+ cross_attention_dim=cross_attention_dim if only_cross_attention else None,
302
+ upcast_attention=upcast_attention,
303
+ )
304
+ self.norm1 = AdaLayerNorm(
305
+ dim, num_embeds_ada_norm) if self.use_ada_layer_norm else nn.LayerNorm(dim)
306
+
307
+ # # Cross-Attn
308
+ # if cross_attention_dim is not None:
309
+ # self.attn2 = CrossAttention(
310
+ # query_dim=dim,
311
+ # cross_attention_dim=cross_attention_dim,
312
+ # heads=num_attention_heads,
313
+ # dim_head=attention_head_dim,
314
+ # dropout=dropout,
315
+ # bias=attention_bias,
316
+ # upcast_attention=upcast_attention,
317
+ # )
318
+ # else:
319
+ # self.attn2 = None
320
+
321
+ # if cross_attention_dim is not None:
322
+ # self.norm2 = AdaLayerNorm(dim, num_embeds_ada_norm) if self.use_ada_layer_norm else nn.LayerNorm(dim)
323
+ # else:
324
+ # self.norm2 = None
325
+
326
+ # Feed-forward
327
+ self.ff = FeedForward(dim, dropout=dropout,
328
+ activation_fn=activation_fn)
329
+ self.norm3 = nn.LayerNorm(dim)
330
+
331
+ # Temp-Attn
332
+ self.attn_temp = CrossAttention(
333
+ query_dim=dim,
334
+ heads=num_attention_heads,
335
+ dim_head=attention_head_dim,
336
+ dropout=dropout,
337
+ bias=attention_bias,
338
+ upcast_attention=upcast_attention,
339
+ )
340
+ nn.init.zeros_(self.attn_temp.to_out[0].weight.data)
341
+ self.norm_temp = AdaLayerNorm(
342
+ dim, num_embeds_ada_norm) if self.use_ada_layer_norm else nn.LayerNorm(dim)
343
+
344
+ def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool):
345
+ if not is_xformers_available():
346
+ print("Here is how to install it")
347
+ raise ModuleNotFoundError(
348
+ "Refer to https://github.com/facebookresearch/xformers for more information on how to install"
349
+ " xformers",
350
+ name="xformers",
351
+ )
352
+ elif not torch.cuda.is_available():
353
+ raise ValueError(
354
+ "torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is only"
355
+ " available for GPU "
356
+ )
357
+ else:
358
+ try:
359
+ # Make sure we can run the memory efficient attention
360
+ _ = xformers.ops.memory_efficient_attention(
361
+ torch.randn((1, 2, 40), device="cuda"),
362
+ torch.randn((1, 2, 40), device="cuda"),
363
+ torch.randn((1, 2, 40), device="cuda"),
364
+ )
365
+ except Exception as e:
366
+ raise e
367
+ self.attn1._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers
368
+ if self.attn2 is not None:
369
+ self.attn2._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers
370
+ # self.attn_temp._use_memory_efficient_attention_xformers = use_memory_efficient_attention_xformers
371
+
372
+ def forward(self, hidden_states, encoder_hidden_states=None, timestep=None, attention_mask=None, video_length=None):
373
+ # SparseCausal-Attention
374
+ norm_hidden_states = (
375
+ self.norm1(hidden_states, timestep) if self.use_ada_layer_norm else self.norm1(
376
+ hidden_states)
377
+ )
378
+
379
+ if self.only_cross_attention:
380
+ hidden_states = (
381
+ self.attn1(norm_hidden_states, encoder_hidden_states,
382
+ attention_mask=attention_mask) + hidden_states
383
+ )
384
+ else:
385
+ hidden_states = self.attn1(
386
+ norm_hidden_states, attention_mask=attention_mask, video_length=video_length) + hidden_states
387
+
388
+ # if self.attn2 is not None:
389
+ # # Cross-Attention
390
+ # norm_hidden_states = (
391
+ # self.norm2(hidden_states, timestep) if self.use_ada_layer_norm else self.norm2(hidden_states)
392
+ # )
393
+ # hidden_states = (
394
+ # self.attn2(
395
+ # norm_hidden_states, encoder_hidden_states=encoder_hidden_states, attention_mask=attention_mask
396
+ # )
397
+ # + hidden_states
398
+ # )
399
+
400
+ # Feed-forward
401
+ hidden_states = self.ff(self.norm3(hidden_states)) + hidden_states
402
+
403
+ # Temporal-Attention
404
+ d = hidden_states.shape[1]
405
+ hidden_states = rearrange(
406
+ hidden_states, "(b f) d c -> (b d) f c", f=video_length)
407
+ norm_hidden_states = (
408
+ self.norm_temp(hidden_states, timestep) if self.use_ada_layer_norm else self.norm_temp(
409
+ hidden_states)
410
+ )
411
+ hidden_states = self.attn_temp(norm_hidden_states) + hidden_states
412
+ hidden_states = rearrange(hidden_states, "(b d) f c -> (b f) d c", d=d)
413
+
414
+ return hidden_states
415
+
416
+
417
+ class SparseCausalAttention(CrossAttention):
418
+ def forward(self, hidden_states, encoder_hidden_states=None, attention_mask=None, video_length=None):
419
+ batch_size, sequence_length, _ = hidden_states.shape
420
+
421
+ if self.group_norm is not None:
422
+ hidden_states = self.group_norm(
423
+ hidden_states.transpose(1, 2)).transpose(1, 2)
424
+
425
+ query = self.to_q(hidden_states)
426
+ dim = query.shape[-1]
427
+ query = self.reshape_heads_to_batch_dim(query)
428
+
429
+ if self.added_kv_proj_dim is not None:
430
+ raise NotImplementedError
431
+
432
+ encoder_hidden_states = encoder_hidden_states if encoder_hidden_states is not None else hidden_states
433
+ key = self.to_k(encoder_hidden_states)
434
+ value = self.to_v(encoder_hidden_states)
435
+
436
+ former_frame_index = torch.arange(video_length) - 1
437
+ former_frame_index[0] = 0
438
+
439
+ # d = h*w
440
+ key = rearrange(key, "(b f) d c -> b f d c", f=video_length)
441
+ key = torch.cat([key[:, [0] * video_length],
442
+ key[:, former_frame_index]], dim=2)
443
+ key = rearrange(key, "b f d c -> (b f) d c")
444
+
445
+ value = rearrange(value, "(b f) d c -> b f d c", f=video_length)
446
+ value = torch.cat([value[:, [0] * video_length],
447
+ value[:, former_frame_index]], dim=2)
448
+ value = rearrange(value, "b f d c -> (b f) d c")
449
+
450
+ key = self.reshape_heads_to_batch_dim(key)
451
+ value = self.reshape_heads_to_batch_dim(value)
452
+
453
+ if attention_mask is not None:
454
+ if attention_mask.shape[-1] != query.shape[1]:
455
+ target_length = query.shape[1]
456
+ attention_mask = F.pad(
457
+ attention_mask, (0, target_length), value=0.0)
458
+ attention_mask = attention_mask.repeat_interleave(
459
+ self.heads, dim=0)
460
+
461
+ # attention, what we cannot get enough of
462
+ if self._use_memory_efficient_attention_xformers:
463
+ hidden_states = self._memory_efficient_attention_xformers(
464
+ query, key, value, attention_mask)
465
+ # Some versions of xformers return output in fp32, cast it back to the dtype of the input
466
+ hidden_states = hidden_states.to(query.dtype)
467
+ else:
468
+ if self._slice_size is None or query.shape[0] // self._slice_size == 1:
469
+ hidden_states = self._attention(
470
+ query, key, value, attention_mask)
471
+ else:
472
+ hidden_states = self._sliced_attention(
473
+ query, key, value, sequence_length, dim, attention_mask)
474
+
475
+ # linear proj
476
+ hidden_states = self.to_out[0](hidden_states)
477
+
478
+ # dropout
479
+ hidden_states = self.to_out[1](hidden_states)
480
+ return hidden_states
481
+
482
+
483
+ class KalmanFilter(nn.Module):
484
+ def __init__(self, emb_dim, num_attention_heads,
485
+ attention_head_dim, num_uncertainty_layers):
486
+ super().__init__()
487
+ self.uncertainty_estimator = nn.ModuleList(
488
+ [
489
+ BasicTransformerBlock(
490
+ emb_dim,
491
+ num_attention_heads,
492
+ attention_head_dim,
493
+ )
494
+ for d in range(num_uncertainty_layers)
495
+ ]
496
+ )
497
+
498
+ self.kalman_gain_calculator = nn.Sequential(
499
+ ResBlock(emb_dim, emb_dim),
500
+ ResBlock(emb_dim, emb_dim),
501
+ ResBlock(emb_dim, emb_dim),
502
+ nn.Conv2d(emb_dim, 1, kernel_size=1, padding=0),
503
+ nn.Sigmoid()
504
+ )
505
+
506
+ self.apply(self._init_weights)
507
+
508
+ def _init_weights(self, module):
509
+ if isinstance(module, nn.Conv2d):
510
+ nn.init.kaiming_normal_(module.weight)
511
+ if module.bias is not None:
512
+ module.bias.data.zero_()
513
+ elif isinstance(module, (nn.Linear, nn.Embedding)):
514
+ module.weight.data.normal_(mean=0.0, std=0.02)
515
+ if isinstance(module, nn.Linear) and module.bias is not None:
516
+ module.bias.data.zero_()
517
+ elif isinstance(module, nn.LayerNorm):
518
+ module.bias.data.zero_()
519
+ module.weight.data.fill_(1.0)
520
+
521
+
522
+ def predict(self, z_hat, flow):
523
+ # Predict the next state based on the current state and flow (if available)
524
+ flow = rearrange(flow, "n c h w -> n h w c")
525
+ z_prime = flow_warp(z_hat, flow)
526
+ return z_prime
527
+
528
+ def update(self, z_code, z_prime, gain):
529
+ # Update the state and uncertainty based on the measurement and Kalman gain
530
+ z_hat = (1 - gain) * z_code + gain * z_prime
531
+ return z_hat
532
+
533
+ def calc_gain(self, z_codes):
534
+ assert z_codes.dim(
535
+ ) == 5, f"Expected z_codes to have ndim=5, but got ndim={z_codes.dim()}."
536
+ video_length = z_codes.shape[1]
537
+ height, width = z_codes.shape[3:5]
538
+
539
+ # Assume input shape of uncertainty_estimator to be [(b f) d c]
540
+ z_tmp = rearrange(z_codes, "b f c h w -> (b f) (h w) c")
541
+ h_codes = z_tmp
542
+ for block in self.uncertainty_estimator:
543
+ h_codes = block(h_codes, video_length=video_length)
544
+
545
+ h_codes = rearrange(
546
+ h_codes, "(b f) (h w) c -> (b f) c h w", h=height, f=video_length)
547
+ w_codes = self.kalman_gain_calculator(h_codes)
548
+
549
+ w_codes = rearrange(
550
+ w_codes, "(b f) c h w -> b f c h w", f=video_length)
551
+
552
+ # pdb.set_trace()
553
+ return w_codes
554
+
555
+
556
+ def load_vqgan_checkpoint(model, vqgan_path, logger=None):
557
+ """Load VQGAN checkpoint into model components.
558
+
559
+ Args:
560
+ model: The model to load weights into
561
+ vqgan_path (str): Path to the VQGAN checkpoint
562
+ logger: Logger instance
563
+ """
564
+ if logger is None:
565
+ logger = get_root_logger()
566
+
567
+ # Load VQGAN checkpoint, load params_ema or params
568
+ ckpt = torch.load(vqgan_path, map_location='cpu', weights_only=True)
569
+ if 'params_ema' in ckpt:
570
+ state_dict = ckpt['params_ema']
571
+ logger.info(f'Loading VQGAN from: {vqgan_path} [params_ema]')
572
+ elif 'params' in ckpt:
573
+ state_dict = ckpt['params']
574
+ logger.info(f'Loading VQGAN from: {vqgan_path} [params]')
575
+ else:
576
+ raise ValueError(f'Wrong params in checkpoint: {vqgan_path}')
577
+
578
+ # Load encoder weights into both encoders
579
+ encoder_state_dict = {k.split('encoder.')[-1]: v for k, v in state_dict.items() if k.startswith('encoder.')}
580
+ model.encoder.load_state_dict(encoder_state_dict, strict=True)
581
+ model.hq_encoder.load_state_dict(encoder_state_dict, strict=True)
582
+
583
+ # Load quantizer weights
584
+ quantizer_state_dict = {k.split('quantize.')[-1]: v for k, v in state_dict.items() if k.startswith('quantize.')}
585
+ model.quantize.load_state_dict(quantizer_state_dict, strict=True)
586
+
587
+ # Load generator weights
588
+ generator_state_dict = {k.split('generator.')[-1]: v for k, v in state_dict.items() if k.startswith('generator.')}
589
+ model.generator.load_state_dict(generator_state_dict, strict=True)
590
+
591
+
592
+ @ARCH_REGISTRY.register()
593
+ class KEEP(nn.Module):
594
+ def __init__(self, img_size=512, nf=64, ch_mult=[1, 2, 2, 4, 4, 8], quantizer_type="nearest",
595
+ res_blocks=2, attn_resolutions=[16], codebook_size=1024, emb_dim=256,
596
+ beta=0.25, gumbel_straight_through=False, gumbel_kl_weight=1e-8, vqgan_path=None,
597
+ dim_embd=512, n_head=8, n_layers=9, latent_size=256,
598
+ cft_list=['32', '64', '128', '256'], fix_modules=['quantize', 'generator'],
599
+ flownet_path=None, kalman_attn_head_dim=64, num_uncertainty_layers=4,
600
+ cond=1, cfa_list=[], cfa_nhead=4, cfa_dim=256,
601
+ cfa_nlayers=4, cross_residual=True,
602
+ temp_reg_list=[], mask_ratio=0.):
603
+ super().__init__()
604
+
605
+ self.cond = cond
606
+ self.cft_list = cft_list
607
+ self.cfa_list = cfa_list
608
+ self.temp_reg_list = temp_reg_list
609
+ self.use_residual = cross_residual
610
+ self.mask_ratio = mask_ratio
611
+ self.latent_size = latent_size
612
+ logger = get_root_logger()
613
+
614
+ # alignment
615
+ self.flownet = FlowGenerator(path=flownet_path)
616
+
617
+ # Kalman Filter
618
+ self.kalman_filter = KalmanFilter(
619
+ emb_dim=emb_dim,
620
+ num_attention_heads=n_head,
621
+ attention_head_dim=kalman_attn_head_dim,
622
+ num_uncertainty_layers=num_uncertainty_layers,
623
+ )
624
+
625
+ # Create encoders with same architecture
626
+ encoder_config = dict(
627
+ in_channels=3,
628
+ nf=nf,
629
+ emb_dim=emb_dim,
630
+ ch_mult=ch_mult,
631
+ num_res_blocks=res_blocks,
632
+ resolution=img_size,
633
+ attn_resolutions=attn_resolutions
634
+ )
635
+
636
+ self.hq_encoder = Encoder(**encoder_config)
637
+ self.encoder = Encoder(**encoder_config)
638
+
639
+ # VQGAN components
640
+ if quantizer_type == "nearest":
641
+ self.quantize = VectorQuantizer(codebook_size, emb_dim, beta)
642
+ elif quantizer_type == "gumbel":
643
+ self.quantize = GumbelQuantizer(
644
+ codebook_size, emb_dim, emb_dim, gumbel_straight_through, gumbel_kl_weight
645
+ )
646
+
647
+ self.generator = Generator(
648
+ nf=nf,
649
+ emb_dim=emb_dim,
650
+ ch_mult=ch_mult,
651
+ res_blocks=res_blocks,
652
+ img_size=img_size,
653
+ attn_resolutions=attn_resolutions
654
+ )
655
+
656
+ # Load VQGAN checkpoint if provided
657
+ if vqgan_path is not None:
658
+ load_vqgan_checkpoint(self, vqgan_path, logger)
659
+
660
+ self.position_emb = nn.Parameter(torch.zeros(latent_size, dim_embd))
661
+ self.feat_emb = nn.Linear(emb_dim, dim_embd)
662
+
663
+ # transformer
664
+ self.ft_layers = nn.Sequential(*[TransformerSALayer(embed_dim=dim_embd, nhead=n_head,
665
+ dim_mlp=dim_embd*2, dropout=0.0) for _ in range(n_layers)])
666
+
667
+ # logits_predict head
668
+ self.idx_pred_layer = nn.Sequential(
669
+ nn.LayerNorm(dim_embd),
670
+ nn.Linear(dim_embd, codebook_size, bias=False))
671
+
672
+ self.channels = {
673
+ '16': 512,
674
+ '32': 256,
675
+ '64': 256,
676
+ '128': 128,
677
+ '256': 128,
678
+ '512': 64,
679
+ }
680
+
681
+ # after second residual block for > 16, before attn layer for ==16
682
+ self.fuse_encoder_block = {
683
+ '512': 2, '256': 5, '128': 8, '64': 11, '32': 14, '16': 18}
684
+ # after first residual block for > 16, before attn layer for ==16
685
+ self.fuse_generator_block = {
686
+ '16': 6, '32': 9, '64': 12, '128': 15, '256': 18, '512': 21}
687
+
688
+ # cross frame attention fusion
689
+ self.cfa = nn.ModuleDict()
690
+ for f_size in self.cfa_list:
691
+ in_ch = self.channels[f_size]
692
+ self.cfa[f_size] = CrossFrameFusionLayer(dim=in_ch,
693
+ num_attention_heads=cfa_nhead,
694
+ attention_head_dim=cfa_dim)
695
+
696
+ # Controllable Feature Transformation (CFT)
697
+ self.cft = nn.ModuleDict()
698
+ for f_size in self.cft_list:
699
+ in_ch = self.channels[f_size]
700
+ self.cft[f_size] = Fuse_sft_block(in_ch, in_ch)
701
+
702
+ if fix_modules is not None:
703
+ for module in fix_modules:
704
+ for param in getattr(self, module).parameters():
705
+ param.requires_grad = False
706
+
707
+
708
+ def get_flow(self, x):
709
+ b, t, c, h, w = x.size()
710
+
711
+ x_1 = x[:, :-1, :, :, :].reshape(-1, c, h, w)
712
+ x_2 = x[:, 1:, :, :, :].reshape(-1, c, h, w)
713
+
714
+ # Forward flow
715
+ with torch.no_grad():
716
+ flows = self.flownet(x_2, x_1).view(b, t - 1, 2, h, w)
717
+
718
+ return flows.detach()
719
+
720
+ def mask_by_ratio(self, x, mask_ratio=0.):
721
+ if mask_ratio == 0:
722
+ return x
723
+
724
+ # B F C H W
725
+ b, t, c, h, w = x.size()
726
+ d = h * w
727
+ x = rearrange(x, "b f c h w -> b f (h w) c")
728
+
729
+ len_keep = int(d * (1 - mask_ratio))
730
+ sample = torch.rand((b, t, d, 1), device=x.device).topk(
731
+ len_keep, dim=2).indices
732
+ mask = torch.zeros((b, t, d, 1), dtype=torch.bool, device=x.device)
733
+ mask.scatter_(dim=2, index=sample, value=True)
734
+
735
+ x = mask * x
736
+ x = rearrange(x, "b f (h w) c -> b f c h w", h=h)
737
+
738
+ return x
739
+
740
+ def forward(self, x, detach_16=True, early_feat=True, need_upscale=True):
741
+ """Forward function for KEEP.
742
+
743
+ Args:
744
+ lqs (tensor): Input low quality (LQ) sequence of
745
+ shape (b, t, c, h, w).
746
+
747
+ Returns:
748
+ Tensor: Output HR sequence with shape (b, t, c, 4h, 4w).
749
+ """
750
+ video_length = x.shape[1]
751
+
752
+ if need_upscale:
753
+ x = rearrange(x, "b f c h w -> (b f) c h w")
754
+ x = F.interpolate(x, scale_factor=4, mode='bilinear')
755
+ x = rearrange(x, "(b f) c h w -> b f c h w", f=video_length)
756
+
757
+ b, t, c, h, w = x.size()
758
+ flows = self.get_flow(x) # (B, t-1, 2, H , W)
759
+
760
+ # ################### Encoder #####################
761
+ # BTCHW -> (BT)CHW
762
+ x = x.reshape(-1, c, h, w)
763
+ enc_feat_dict = {}
764
+ out_list = [self.fuse_encoder_block[f_size]
765
+ for f_size in self.cft_list]
766
+ for i, block in enumerate(self.encoder.blocks):
767
+ x = block(x)
768
+ if i in out_list:
769
+ enc_feat_dict[str(x.shape[-1])] = rearrange(x, "(b f) c h w -> b f c h w", f=t).detach()
770
+
771
+ lq_feat = x
772
+
773
+ # gpu_tracker.track('After encoder')
774
+ # ################### Kalman Filter ###############
775
+ z_codes = rearrange(x, "(b f) c h w -> b f c h w", f=t)
776
+ if self.training:
777
+ z_codes = self.mask_by_ratio(z_codes, self.mask_ratio)
778
+ gains = self.kalman_filter.calc_gain(z_codes)
779
+
780
+ outs = []
781
+ logits = []
782
+ cross_prev_feat = {}
783
+ gen_feat_dict = defaultdict(list)
784
+
785
+ cft_list = [self.fuse_generator_block[f_size]
786
+ for f_size in self.cft_list]
787
+
788
+ cfa_list = [self.fuse_generator_block[f_size]
789
+ for f_size in self.cfa_list]
790
+
791
+ temp_reg_list = [self.fuse_generator_block[f_size]
792
+ for f_size in self.temp_reg_list]
793
+
794
+ for i in range(video_length):
795
+ # print(f'Frame {i} ...')
796
+ if i == 0:
797
+ z_hat = z_codes[:, i, ...]
798
+ else:
799
+ z_prime = self.hq_encoder(
800
+ self.kalman_filter.predict(prev_out.detach(), flows[:, i-1, ...]))
801
+ z_hat = self.kalman_filter.update(
802
+ z_codes[:, i, ...], z_prime, gains[:, i, ...])
803
+
804
+ # ################# Transformer ###################
805
+ pos_emb = self.position_emb.unsqueeze(1).repeat(1, b, 1)
806
+ # BCHW -> BC(HW) -> (HW)BC
807
+ query_emb = self.feat_emb(z_hat.flatten(2).permute(2, 0, 1))
808
+ for layer in self.ft_layers:
809
+ query_emb = layer(query_emb, query_pos=pos_emb)
810
+
811
+ # output logits
812
+ logit = self.idx_pred_layer(query_emb).permute(
813
+ 1, 0, 2) # (hw)bn -> b(hw)n
814
+ logits.append(logit)
815
+
816
+ # ################# Quantization ###################
817
+ code_h = int(np.sqrt(self.latent_size))
818
+ soft_one_hot = F.softmax(logit, dim=2)
819
+ _, top_idx = torch.topk(soft_one_hot, 1, dim=2)
820
+ quant_feat = self.quantize.get_codebook_feat(
821
+ top_idx, shape=[b, code_h, code_h, 256])
822
+
823
+ if detach_16:
824
+ # for training stage III
825
+ quant_feat = quant_feat.detach()
826
+ else:
827
+ # preserve gradients for stage II
828
+ quant_feat = query_emb + (quant_feat - query_emb).detach()
829
+
830
+ # ################## Generator ####################
831
+ x = quant_feat
832
+
833
+ for j, block in enumerate(self.generator.blocks):
834
+ x = block(x)
835
+
836
+ if j in cft_list: # fuse after i-th block
837
+ f_size = str(x.shape[-1])
838
+ # pdb.set_trace()
839
+ x = self.cft[f_size](
840
+ enc_feat_dict[f_size][:, i, ...], x, self.cond)
841
+
842
+ if j in cfa_list:
843
+ f_size = str(x.shape[-1])
844
+
845
+ if i == 0:
846
+ cross_prev_feat[f_size] = x
847
+ # print(f_size)
848
+ else:
849
+ # pdb.set_trace()
850
+ prev_fea = cross_prev_feat[f_size]
851
+ x = self.cfa[f_size](
852
+ x, prev_fea, residual=self.use_residual)
853
+ cross_prev_feat[f_size] = x
854
+
855
+ if j in temp_reg_list:
856
+ f_size = str(x.shape[-1])
857
+ gen_feat_dict[f_size].append(x)
858
+
859
+ prev_out = x # B C H W
860
+ outs.append(prev_out)
861
+
862
+ for f_size, feat in gen_feat_dict.items():
863
+ gen_feat_dict[f_size] = torch.stack(feat, dim=1) # bfchw
864
+
865
+ # Convert defaultdict to regular dict before returning
866
+ gen_feat_dict = dict(gen_feat_dict)
867
+
868
+ logits = torch.stack(logits, dim=1) # b(hw)n -> bf(hw)n
869
+ logits = rearrange(logits, "b f l n -> (b f) l n")
870
+ outs = torch.stack(outs, dim=1) # bfchw
871
+ if self.training:
872
+ if early_feat:
873
+ return outs, logits, lq_feat, gen_feat_dict
874
+ else:
875
+ return outs, gen_feat_dict
876
+ else:
877
+ return outs
878
+
879
+
880
+ def count_parameters(model):
881
+ # Initialize counters
882
+ total_params = 0
883
+ sub_module_params = {}
884
+
885
+ # Loop through all the modules in the model
886
+ for name, module in model.named_children():
887
+ # if len(list(module.children())) == 0: # Check if it's a leaf module
888
+ params = sum(p.numel() for p in module.parameters())
889
+ total_params += params
890
+ sub_module_params[name] = params
891
+
892
+ return total_params, sub_module_params
893
+
894
+
895
+ if __name__ == '__main__':
896
+ import time
897
+ batch_size = 1
898
+ video_length = 4
899
+ height = 128
900
+ width = 128
901
+
902
+ model = KEEP(
903
+ img_size=512,
904
+ emb_dim=256,
905
+ ch_mult=[1, 2, 2, 4, 4, 8],
906
+ dim_embd=512,
907
+ n_head=8,
908
+ n_layers=4,
909
+ codebook_size=1024,
910
+ cft_list=[],
911
+ fix_modules=['generator', 'quantize', 'flownet', 'cft', 'hq_encoder',
912
+ 'encoder', 'feat_emb', 'ft_layers', 'idx_pred_layer'],
913
+ flownet_path="../../weights/GMFlow/gmflow_sintel-0c07dcb3.pth",
914
+ kalman_attn_head_dim=32,
915
+ num_uncertainty_layers=3,
916
+ cond=0,
917
+ cfa_list=['32'],
918
+ cfa_nhead=4,
919
+ cfa_dim=256,
920
+ temp_reg_list=['64'],
921
+ ).cuda()
922
+
923
+ total_params = sum(map(lambda x: x.numel(), model.parameters()))
924
+ print(f"Total parameters in the model: {total_params / 1e6:.2f} M")
925
+
926
+ dummy_input = torch.randn((1, 20, 3, 128, 128)).cuda()
927
+
928
+ start_time = time.time()
929
+
930
+ with torch.no_grad():
931
+ for _ in range(100):
932
+ out = model(dummy_input)
933
+ elapsed_time = time.time() - start_time
934
+
935
+ print(f"Forward pass time: {elapsed_time / 100 / 20 * 1000:.2f} ms")
936
+ print(out.shape)
basicsr/archs/rrdbnet_arch.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from torch import nn as nn
3
+ from torch.nn import functional as F
4
+
5
+ from basicsr.utils.registry import ARCH_REGISTRY
6
+ from .arch_util import default_init_weights, make_layer, pixel_unshuffle
7
+
8
+
9
+ class ResidualDenseBlock(nn.Module):
10
+ """Residual Dense Block.
11
+
12
+ Used in RRDB block in ESRGAN.
13
+
14
+ Args:
15
+ num_feat (int): Channel number of intermediate features.
16
+ num_grow_ch (int): Channels for each growth.
17
+ """
18
+
19
+ def __init__(self, num_feat=64, num_grow_ch=32):
20
+ super(ResidualDenseBlock, self).__init__()
21
+ self.conv1 = nn.Conv2d(num_feat, num_grow_ch, 3, 1, 1)
22
+ self.conv2 = nn.Conv2d(num_feat + num_grow_ch, num_grow_ch, 3, 1, 1)
23
+ self.conv3 = nn.Conv2d(num_feat + 2 * num_grow_ch, num_grow_ch, 3, 1, 1)
24
+ self.conv4 = nn.Conv2d(num_feat + 3 * num_grow_ch, num_grow_ch, 3, 1, 1)
25
+ self.conv5 = nn.Conv2d(num_feat + 4 * num_grow_ch, num_feat, 3, 1, 1)
26
+
27
+ self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
28
+
29
+ # initialization
30
+ default_init_weights([self.conv1, self.conv2, self.conv3, self.conv4, self.conv5], 0.1)
31
+
32
+ def forward(self, x):
33
+ x1 = self.lrelu(self.conv1(x))
34
+ x2 = self.lrelu(self.conv2(torch.cat((x, x1), 1)))
35
+ x3 = self.lrelu(self.conv3(torch.cat((x, x1, x2), 1)))
36
+ x4 = self.lrelu(self.conv4(torch.cat((x, x1, x2, x3), 1)))
37
+ x5 = self.conv5(torch.cat((x, x1, x2, x3, x4), 1))
38
+ # Emperically, we use 0.2 to scale the residual for better performance
39
+ return x5 * 0.2 + x
40
+
41
+
42
+ class RRDB(nn.Module):
43
+ """Residual in Residual Dense Block.
44
+
45
+ Used in RRDB-Net in ESRGAN.
46
+
47
+ Args:
48
+ num_feat (int): Channel number of intermediate features.
49
+ num_grow_ch (int): Channels for each growth.
50
+ """
51
+
52
+ def __init__(self, num_feat, num_grow_ch=32):
53
+ super(RRDB, self).__init__()
54
+ self.rdb1 = ResidualDenseBlock(num_feat, num_grow_ch)
55
+ self.rdb2 = ResidualDenseBlock(num_feat, num_grow_ch)
56
+ self.rdb3 = ResidualDenseBlock(num_feat, num_grow_ch)
57
+
58
+ def forward(self, x):
59
+ out = self.rdb1(x)
60
+ out = self.rdb2(out)
61
+ out = self.rdb3(out)
62
+ # Emperically, we use 0.2 to scale the residual for better performance
63
+ return out * 0.2 + x
64
+
65
+
66
+ @ARCH_REGISTRY.register()
67
+ class RRDBNet(nn.Module):
68
+ """Networks consisting of Residual in Residual Dense Block, which is used
69
+ in ESRGAN.
70
+
71
+ ESRGAN: Enhanced Super-Resolution Generative Adversarial Networks.
72
+
73
+ We extend ESRGAN for scale x2 and scale x1.
74
+ Note: This is one option for scale 1, scale 2 in RRDBNet.
75
+ We first employ the pixel-unshuffle (an inverse operation of pixelshuffle to reduce the spatial size
76
+ and enlarge the channel size before feeding inputs into the main ESRGAN architecture.
77
+
78
+ Args:
79
+ num_in_ch (int): Channel number of inputs.
80
+ num_out_ch (int): Channel number of outputs.
81
+ num_feat (int): Channel number of intermediate features.
82
+ Default: 64
83
+ num_block (int): Block number in the trunk network. Defaults: 23
84
+ num_grow_ch (int): Channels for each growth. Default: 32.
85
+ """
86
+
87
+ def __init__(self, num_in_ch, num_out_ch, scale=4, num_feat=64, num_block=23, num_grow_ch=32):
88
+ super(RRDBNet, self).__init__()
89
+ self.scale = scale
90
+ if scale == 2:
91
+ num_in_ch = num_in_ch * 4
92
+ elif scale == 1:
93
+ num_in_ch = num_in_ch * 16
94
+ self.conv_first = nn.Conv2d(num_in_ch, num_feat, 3, 1, 1)
95
+ self.body = make_layer(RRDB, num_block, num_feat=num_feat, num_grow_ch=num_grow_ch)
96
+ self.conv_body = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
97
+ # upsample
98
+ self.conv_up1 = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
99
+ self.conv_up2 = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
100
+ self.conv_hr = nn.Conv2d(num_feat, num_feat, 3, 1, 1)
101
+ self.conv_last = nn.Conv2d(num_feat, num_out_ch, 3, 1, 1)
102
+
103
+ self.lrelu = nn.LeakyReLU(negative_slope=0.2, inplace=True)
104
+
105
+ def forward(self, x):
106
+ if self.scale == 2:
107
+ feat = pixel_unshuffle(x, scale=2)
108
+ elif self.scale == 1:
109
+ feat = pixel_unshuffle(x, scale=4)
110
+ else:
111
+ feat = x
112
+ feat = self.conv_first(feat)
113
+ body_feat = self.conv_body(self.body(feat))
114
+ feat = feat + body_feat
115
+ # upsample
116
+ feat = self.lrelu(self.conv_up1(F.interpolate(feat, scale_factor=2, mode='nearest')))
117
+ feat = self.lrelu(self.conv_up2(F.interpolate(feat, scale_factor=2, mode='nearest')))
118
+ out = self.conv_last(self.lrelu(self.conv_hr(feat)))
119
+ return out
basicsr/archs/spectral_norm_arch.py ADDED
@@ -0,0 +1,288 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Spectral Normalization from https://arxiv.org/abs/1802.05957
3
+ """
4
+ import torch
5
+ from torch.nn.functional import normalize
6
+
7
+
8
+ class SpectralNorm(object):
9
+ # Invariant before and after each forward call:
10
+ # u = normalize(W @ v)
11
+ # NB: At initialization, this invariant is not enforced
12
+
13
+ _version = 1
14
+
15
+ # At version 1:
16
+ # made `W` not a buffer,
17
+ # added `v` as a buffer, and
18
+ # made eval mode use `W = u @ W_orig @ v` rather than the stored `W`.
19
+
20
+ def __init__(self, name='weight', n_power_iterations=1, dim=0, eps=1e-12):
21
+ self.name = name
22
+ self.dim = dim
23
+ if n_power_iterations <= 0:
24
+ raise ValueError(
25
+ 'Expected n_power_iterations to be positive, but '
26
+ 'got n_power_iterations={}'.format(n_power_iterations))
27
+ self.n_power_iterations = n_power_iterations
28
+ self.eps = eps
29
+
30
+ def reshape_weight_to_matrix(self, weight):
31
+ weight_mat = weight
32
+ if self.dim != 0:
33
+ # permute dim to front
34
+ weight_mat = weight_mat.permute(
35
+ self.dim,
36
+ *[d for d in range(weight_mat.dim()) if d != self.dim])
37
+ height = weight_mat.size(0)
38
+ return weight_mat.reshape(height, -1)
39
+
40
+ def compute_weight(self, module, do_power_iteration):
41
+ # NB: If `do_power_iteration` is set, the `u` and `v` vectors are
42
+ # updated in power iteration **in-place**. This is very important
43
+ # because in `DataParallel` forward, the vectors (being buffers) are
44
+ # broadcast from the parallelized module to each module replica,
45
+ # which is a new module object created on the fly. And each replica
46
+ # runs its own spectral norm power iteration. So simply assigning
47
+ # the updated vectors to the module this function runs on will cause
48
+ # the update to be lost forever. And the next time the parallelized
49
+ # module is replicated, the same randomly initialized vectors are
50
+ # broadcast and used!
51
+ #
52
+ # Therefore, to make the change propagate back, we rely on two
53
+ # important behaviors (also enforced via tests):
54
+ # 1. `DataParallel` doesn't clone storage if the broadcast tensor
55
+ # is already on correct device; and it makes sure that the
56
+ # parallelized module is already on `device[0]`.
57
+ # 2. If the out tensor in `out=` kwarg has correct shape, it will
58
+ # just fill in the values.
59
+ # Therefore, since the same power iteration is performed on all
60
+ # devices, simply updating the tensors in-place will make sure that
61
+ # the module replica on `device[0]` will update the _u vector on the
62
+ # parallized module (by shared storage).
63
+ #
64
+ # However, after we update `u` and `v` in-place, we need to **clone**
65
+ # them before using them to normalize the weight. This is to support
66
+ # backproping through two forward passes, e.g., the common pattern in
67
+ # GAN training: loss = D(real) - D(fake). Otherwise, engine will
68
+ # complain that variables needed to do backward for the first forward
69
+ # (i.e., the `u` and `v` vectors) are changed in the second forward.
70
+ weight = getattr(module, self.name + '_orig')
71
+ u = getattr(module, self.name + '_u')
72
+ v = getattr(module, self.name + '_v')
73
+ weight_mat = self.reshape_weight_to_matrix(weight)
74
+
75
+ if do_power_iteration:
76
+ with torch.no_grad():
77
+ for _ in range(self.n_power_iterations):
78
+ # Spectral norm of weight equals to `u^T W v`, where `u` and `v`
79
+ # are the first left and right singular vectors.
80
+ # This power iteration produces approximations of `u` and `v`.
81
+ v = normalize(torch.mv(weight_mat.t(), u),
82
+ dim=0,
83
+ eps=self.eps,
84
+ out=v)
85
+ u = normalize(torch.mv(weight_mat, v),
86
+ dim=0,
87
+ eps=self.eps,
88
+ out=u)
89
+ if self.n_power_iterations > 0:
90
+ # See above on why we need to clone
91
+ u = u.clone()
92
+ v = v.clone()
93
+
94
+ sigma = torch.dot(u, torch.mv(weight_mat, v))
95
+ weight = weight / sigma
96
+ return weight
97
+
98
+ def remove(self, module):
99
+ with torch.no_grad():
100
+ weight = self.compute_weight(module, do_power_iteration=False)
101
+ delattr(module, self.name)
102
+ delattr(module, self.name + '_u')
103
+ delattr(module, self.name + '_v')
104
+ delattr(module, self.name + '_orig')
105
+ module.register_parameter(self.name,
106
+ torch.nn.Parameter(weight.detach()))
107
+
108
+ def __call__(self, module, inputs):
109
+ setattr(
110
+ module, self.name,
111
+ self.compute_weight(module, do_power_iteration=module.training))
112
+
113
+ def _solve_v_and_rescale(self, weight_mat, u, target_sigma):
114
+ # Tries to returns a vector `v` s.t. `u = normalize(W @ v)`
115
+ # (the invariant at top of this class) and `u @ W @ v = sigma`.
116
+ # This uses pinverse in case W^T W is not invertible.
117
+ v = torch.chain_matmul(weight_mat.t().mm(weight_mat).pinverse(),
118
+ weight_mat.t(), u.unsqueeze(1)).squeeze(1)
119
+ return v.mul_(target_sigma / torch.dot(u, torch.mv(weight_mat, v)))
120
+
121
+ @staticmethod
122
+ def apply(module, name, n_power_iterations, dim, eps):
123
+ for k, hook in module._forward_pre_hooks.items():
124
+ if isinstance(hook, SpectralNorm) and hook.name == name:
125
+ raise RuntimeError(
126
+ "Cannot register two spectral_norm hooks on "
127
+ "the same parameter {}".format(name))
128
+
129
+ fn = SpectralNorm(name, n_power_iterations, dim, eps)
130
+ weight = module._parameters[name]
131
+
132
+ with torch.no_grad():
133
+ weight_mat = fn.reshape_weight_to_matrix(weight)
134
+
135
+ h, w = weight_mat.size()
136
+ # randomly initialize `u` and `v`
137
+ u = normalize(weight.new_empty(h).normal_(0, 1), dim=0, eps=fn.eps)
138
+ v = normalize(weight.new_empty(w).normal_(0, 1), dim=0, eps=fn.eps)
139
+
140
+ delattr(module, fn.name)
141
+ module.register_parameter(fn.name + "_orig", weight)
142
+ # We still need to assign weight back as fn.name because all sorts of
143
+ # things may assume that it exists, e.g., when initializing weights.
144
+ # However, we can't directly assign as it could be an nn.Parameter and
145
+ # gets added as a parameter. Instead, we register weight.data as a plain
146
+ # attribute.
147
+ setattr(module, fn.name, weight.data)
148
+ module.register_buffer(fn.name + "_u", u)
149
+ module.register_buffer(fn.name + "_v", v)
150
+
151
+ module.register_forward_pre_hook(fn)
152
+
153
+ module._register_state_dict_hook(SpectralNormStateDictHook(fn))
154
+ module._register_load_state_dict_pre_hook(
155
+ SpectralNormLoadStateDictPreHook(fn))
156
+ return fn
157
+
158
+
159
+ # This is a top level class because Py2 pickle doesn't like inner class nor an
160
+ # instancemethod.
161
+ class SpectralNormLoadStateDictPreHook(object):
162
+ # See docstring of SpectralNorm._version on the changes to spectral_norm.
163
+ def __init__(self, fn):
164
+ self.fn = fn
165
+
166
+ # For state_dict with version None, (assuming that it has gone through at
167
+ # least one training forward), we have
168
+ #
169
+ # u = normalize(W_orig @ v)
170
+ # W = W_orig / sigma, where sigma = u @ W_orig @ v
171
+ #
172
+ # To compute `v`, we solve `W_orig @ x = u`, and let
173
+ # v = x / (u @ W_orig @ x) * (W / W_orig).
174
+ def __call__(self, state_dict, prefix, local_metadata, strict,
175
+ missing_keys, unexpected_keys, error_msgs):
176
+ fn = self.fn
177
+ version = local_metadata.get('spectral_norm',
178
+ {}).get(fn.name + '.version', None)
179
+ if version is None or version < 1:
180
+ with torch.no_grad():
181
+ weight_orig = state_dict[prefix + fn.name + '_orig']
182
+ # weight = state_dict.pop(prefix + fn.name)
183
+ # sigma = (weight_orig / weight).mean()
184
+ weight_mat = fn.reshape_weight_to_matrix(weight_orig)
185
+ u = state_dict[prefix + fn.name + '_u']
186
+ # v = fn._solve_v_and_rescale(weight_mat, u, sigma)
187
+ # state_dict[prefix + fn.name + '_v'] = v
188
+
189
+
190
+ # This is a top level class because Py2 pickle doesn't like inner class nor an
191
+ # instancemethod.
192
+ class SpectralNormStateDictHook(object):
193
+ # See docstring of SpectralNorm._version on the changes to spectral_norm.
194
+ def __init__(self, fn):
195
+ self.fn = fn
196
+
197
+ def __call__(self, module, state_dict, prefix, local_metadata):
198
+ if 'spectral_norm' not in local_metadata:
199
+ local_metadata['spectral_norm'] = {}
200
+ key = self.fn.name + '.version'
201
+ if key in local_metadata['spectral_norm']:
202
+ raise RuntimeError(
203
+ "Unexpected key in metadata['spectral_norm']: {}".format(key))
204
+ local_metadata['spectral_norm'][key] = self.fn._version
205
+
206
+
207
+ def spectral_norm(module,
208
+ name='weight',
209
+ n_power_iterations=1,
210
+ eps=1e-12,
211
+ dim=None):
212
+ r"""Applies spectral normalization to a parameter in the given module.
213
+
214
+ .. math::
215
+ \mathbf{W}_{SN} = \dfrac{\mathbf{W}}{\sigma(\mathbf{W})},
216
+ \sigma(\mathbf{W}) = \max_{\mathbf{h}: \mathbf{h} \ne 0} \dfrac{\|\mathbf{W} \mathbf{h}\|_2}{\|\mathbf{h}\|_2}
217
+
218
+ Spectral normalization stabilizes the training of discriminators (critics)
219
+ in Generative Adversarial Networks (GANs) by rescaling the weight tensor
220
+ with spectral norm :math:`\sigma` of the weight matrix calculated using
221
+ power iteration method. If the dimension of the weight tensor is greater
222
+ than 2, it is reshaped to 2D in power iteration method to get spectral
223
+ norm. This is implemented via a hook that calculates spectral norm and
224
+ rescales weight before every :meth:`~Module.forward` call.
225
+
226
+ See `Spectral Normalization for Generative Adversarial Networks`_ .
227
+
228
+ .. _`Spectral Normalization for Generative Adversarial Networks`: https://arxiv.org/abs/1802.05957
229
+
230
+ Args:
231
+ module (nn.Module): containing module
232
+ name (str, optional): name of weight parameter
233
+ n_power_iterations (int, optional): number of power iterations to
234
+ calculate spectral norm
235
+ eps (float, optional): epsilon for numerical stability in
236
+ calculating norms
237
+ dim (int, optional): dimension corresponding to number of outputs,
238
+ the default is ``0``, except for modules that are instances of
239
+ ConvTranspose{1,2,3}d, when it is ``1``
240
+
241
+ Returns:
242
+ The original module with the spectral norm hook
243
+
244
+ Example::
245
+
246
+ >>> m = spectral_norm(nn.Linear(20, 40))
247
+ >>> m
248
+ Linear(in_features=20, out_features=40, bias=True)
249
+ >>> m.weight_u.size()
250
+ torch.Size([40])
251
+
252
+ """
253
+ if dim is None:
254
+ if isinstance(module,
255
+ (torch.nn.ConvTranspose1d, torch.nn.ConvTranspose2d,
256
+ torch.nn.ConvTranspose3d)):
257
+ dim = 1
258
+ else:
259
+ dim = 0
260
+ SpectralNorm.apply(module, name, n_power_iterations, dim, eps)
261
+ return module
262
+
263
+
264
+ def remove_spectral_norm(module, name='weight'):
265
+ r"""Removes the spectral normalization reparameterization from a module.
266
+
267
+ Args:
268
+ module (Module): containing module
269
+ name (str, optional): name of weight parameter
270
+
271
+ Example:
272
+ >>> m = spectral_norm(nn.Linear(40, 10))
273
+ >>> remove_spectral_norm(m)
274
+ """
275
+ for k, hook in module._forward_pre_hooks.items():
276
+ if isinstance(hook, SpectralNorm) and hook.name == name:
277
+ hook.remove(module)
278
+ del module._forward_pre_hooks[k]
279
+ return module
280
+
281
+ raise ValueError("spectral_norm of '{}' not found in {}".format(
282
+ name, module))
283
+
284
+
285
+ def use_spectral_norm(module, use_sn=False):
286
+ if use_sn:
287
+ return spectral_norm(module)
288
+ return module
basicsr/archs/vgg_arch.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ from collections import OrderedDict
4
+ from torch import nn as nn
5
+ from torchvision.models import vgg as vgg
6
+
7
+ from basicsr.utils.registry import ARCH_REGISTRY
8
+
9
+ VGG_PRETRAIN_PATH = 'experiments/pretrained_models/vgg19-dcbb9e9d.pth'
10
+ NAMES = {
11
+ 'vgg11': [
12
+ 'conv1_1', 'relu1_1', 'pool1', 'conv2_1', 'relu2_1', 'pool2', 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2',
13
+ 'pool3', 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'pool4', 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2',
14
+ 'pool5'
15
+ ],
16
+ 'vgg13': [
17
+ 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1', 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
18
+ 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'pool3', 'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'pool4',
19
+ 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'pool5'
20
+ ],
21
+ 'vgg16': [
22
+ 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1', 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
23
+ 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3', 'relu3_3', 'pool3', 'conv4_1', 'relu4_1', 'conv4_2',
24
+ 'relu4_2', 'conv4_3', 'relu4_3', 'pool4', 'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3', 'relu5_3',
25
+ 'pool5'
26
+ ],
27
+ 'vgg19': [
28
+ 'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1', 'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
29
+ 'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3', 'relu3_3', 'conv3_4', 'relu3_4', 'pool3', 'conv4_1',
30
+ 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3', 'relu4_3', 'conv4_4', 'relu4_4', 'pool4', 'conv5_1', 'relu5_1',
31
+ 'conv5_2', 'relu5_2', 'conv5_3', 'relu5_3', 'conv5_4', 'relu5_4', 'pool5'
32
+ ]
33
+ }
34
+
35
+
36
+ def insert_bn(names):
37
+ """Insert bn layer after each conv.
38
+
39
+ Args:
40
+ names (list): The list of layer names.
41
+
42
+ Returns:
43
+ list: The list of layer names with bn layers.
44
+ """
45
+ names_bn = []
46
+ for name in names:
47
+ names_bn.append(name)
48
+ if 'conv' in name:
49
+ position = name.replace('conv', '')
50
+ names_bn.append('bn' + position)
51
+ return names_bn
52
+
53
+
54
+ @ARCH_REGISTRY.register()
55
+ class VGGFeatureExtractor(nn.Module):
56
+ """VGG network for feature extraction.
57
+
58
+ In this implementation, we allow users to choose whether use normalization
59
+ in the input feature and the type of vgg network. Note that the pretrained
60
+ path must fit the vgg type.
61
+
62
+ Args:
63
+ layer_name_list (list[str]): Forward function returns the corresponding
64
+ features according to the layer_name_list.
65
+ Example: {'relu1_1', 'relu2_1', 'relu3_1'}.
66
+ vgg_type (str): Set the type of vgg network. Default: 'vgg19'.
67
+ use_input_norm (bool): If True, normalize the input image. Importantly,
68
+ the input feature must in the range [0, 1]. Default: True.
69
+ range_norm (bool): If True, norm images with range [-1, 1] to [0, 1].
70
+ Default: False.
71
+ requires_grad (bool): If true, the parameters of VGG network will be
72
+ optimized. Default: False.
73
+ remove_pooling (bool): If true, the max pooling operations in VGG net
74
+ will be removed. Default: False.
75
+ pooling_stride (int): The stride of max pooling operation. Default: 2.
76
+ """
77
+
78
+ def __init__(self,
79
+ layer_name_list,
80
+ vgg_type='vgg19',
81
+ use_input_norm=True,
82
+ range_norm=False,
83
+ requires_grad=False,
84
+ remove_pooling=False,
85
+ pooling_stride=2):
86
+ super(VGGFeatureExtractor, self).__init__()
87
+
88
+ self.layer_name_list = layer_name_list
89
+ self.use_input_norm = use_input_norm
90
+ self.range_norm = range_norm
91
+
92
+ self.names = NAMES[vgg_type.replace('_bn', '')]
93
+ if 'bn' in vgg_type:
94
+ self.names = insert_bn(self.names)
95
+
96
+ # only borrow layers that will be used to avoid unused params
97
+ max_idx = 0
98
+ for v in layer_name_list:
99
+ idx = self.names.index(v)
100
+ if idx > max_idx:
101
+ max_idx = idx
102
+
103
+ if os.path.exists(VGG_PRETRAIN_PATH):
104
+ vgg_net = getattr(vgg, vgg_type)(pretrained=False)
105
+ state_dict = torch.load(VGG_PRETRAIN_PATH, map_location=lambda storage, loc: storage, weights_only=True)
106
+ vgg_net.load_state_dict(state_dict)
107
+ else:
108
+ vgg_net = getattr(vgg, vgg_type)(pretrained=True)
109
+
110
+ features = vgg_net.features[:max_idx + 1]
111
+
112
+ modified_net = OrderedDict()
113
+ for k, v in zip(self.names, features):
114
+ if 'pool' in k:
115
+ # if remove_pooling is true, pooling operation will be removed
116
+ if remove_pooling:
117
+ continue
118
+ else:
119
+ # in some cases, we may want to change the default stride
120
+ modified_net[k] = nn.MaxPool2d(kernel_size=2, stride=pooling_stride)
121
+ else:
122
+ modified_net[k] = v
123
+
124
+ self.vgg_net = nn.Sequential(modified_net)
125
+
126
+ if not requires_grad:
127
+ self.vgg_net.eval()
128
+ for param in self.parameters():
129
+ param.requires_grad = False
130
+ else:
131
+ self.vgg_net.train()
132
+ for param in self.parameters():
133
+ param.requires_grad = True
134
+
135
+ if self.use_input_norm:
136
+ # the mean is for image with range [0, 1]
137
+ self.register_buffer('mean', torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1))
138
+ # the std is for image with range [0, 1]
139
+ self.register_buffer('std', torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1))
140
+
141
+ def forward(self, x):
142
+ """Forward function.
143
+
144
+ Args:
145
+ x (Tensor): Input tensor with shape (n, c, h, w).
146
+
147
+ Returns:
148
+ Tensor: Forward results.
149
+ """
150
+ if self.range_norm:
151
+ x = (x + 1) / 2
152
+ if self.use_input_norm:
153
+ x = (x - self.mean) / self.std
154
+ output = {}
155
+
156
+ for key, layer in self.vgg_net._modules.items():
157
+ x = layer(x)
158
+ if key in self.layer_name_list:
159
+ output[key] = x.clone()
160
+
161
+ return output
basicsr/archs/vqgan_arch.py ADDED
@@ -0,0 +1,597 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ '''
2
+ VQGAN code, adapted from the original created by the Unleashing Transformers authors:
3
+ https://github.com/samb-t/unleashing-transformers/blob/master/models/vqgan.py
4
+
5
+ '''
6
+ import numpy as np
7
+ import torch
8
+ import torch.nn as nn
9
+ import torch.nn.functional as F
10
+ import copy
11
+ from basicsr.utils import get_root_logger
12
+ from basicsr.utils.registry import ARCH_REGISTRY
13
+ from basicsr.archs.spectral_norm_arch import spectral_norm as _spectral_norm
14
+
15
+
16
+ def normalize(in_channels):
17
+ return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True)
18
+
19
+
20
+ @torch.jit.script
21
+ def swish(x):
22
+ return x*torch.sigmoid(x)
23
+
24
+
25
+ # Define VQVAE classes
26
+ class VectorQuantizer(nn.Module):
27
+ def __init__(self, codebook_size, emb_dim, beta):
28
+ super(VectorQuantizer, self).__init__()
29
+ self.codebook_size = codebook_size # number of embeddings
30
+ self.emb_dim = emb_dim # dimension of embedding
31
+ # commitment cost used in loss term, beta * ||z_e(x)-sg[e]||^2
32
+ self.beta = beta
33
+ self.embedding = nn.Embedding(self.codebook_size, self.emb_dim)
34
+ self.embedding.weight.data.uniform_(-1.0 /
35
+ self.codebook_size, 1.0 / self.codebook_size)
36
+
37
+ def forward(self, z):
38
+ # reshape z -> (batch, height, width, channel) and flatten
39
+ z = z.permute(0, 2, 3, 1).contiguous()
40
+ z_flattened = z.view(-1, self.emb_dim)
41
+
42
+ # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z
43
+ d = (z_flattened ** 2).sum(dim=1, keepdim=True) + (self.embedding.weight**2).sum(1) - \
44
+ 2 * torch.matmul(z_flattened, self.embedding.weight.t())
45
+
46
+ mean_distance = torch.mean(d)
47
+ # find closest encodings
48
+ min_encoding_indices = torch.argmin(d, dim=1).unsqueeze(1)
49
+ # min_encoding_scores, min_encoding_indices = torch.topk(d, 1, dim=1, largest=False)
50
+ # [0-1], higher score, higher confidence
51
+ # min_encoding_scores = torch.exp(-min_encoding_scores/10)
52
+
53
+ min_encodings = torch.zeros(
54
+ min_encoding_indices.shape[0], self.codebook_size).to(z)
55
+ min_encodings.scatter_(1, min_encoding_indices, 1)
56
+
57
+ # get quantized latent vectors
58
+ z_q = torch.matmul(min_encodings, self.embedding.weight).view(z.shape)
59
+ # compute loss for embedding
60
+ loss = torch.mean((z_q.detach()-z)**2) + self.beta * \
61
+ torch.mean((z_q - z.detach()) ** 2)
62
+ # preserve gradients
63
+ z_q = z + (z_q - z).detach()
64
+
65
+ # perplexity
66
+ e_mean = torch.mean(min_encodings, dim=0)
67
+ perplexity = torch.exp(-torch.sum(e_mean * torch.log(e_mean + 1e-10)))
68
+ # reshape back to match original input shape
69
+ z_q = z_q.permute(0, 3, 1, 2).contiguous()
70
+
71
+ return z_q, loss, {
72
+ "perplexity": perplexity,
73
+ "min_encodings": min_encodings,
74
+ "min_encoding_indices": min_encoding_indices,
75
+ "mean_distance": mean_distance
76
+ }
77
+
78
+ def get_codebook_feat(self, indices, shape):
79
+ # input indices: batch*token_num -> (batch*token_num)*1
80
+ # shape: batch, height, width, channel
81
+ indices = indices.view(-1, 1)
82
+ min_encodings = torch.zeros(
83
+ indices.shape[0], self.codebook_size).to(indices)
84
+ min_encodings.scatter_(1, indices, 1)
85
+ # get quantized latent vectors
86
+ z_q = torch.matmul(min_encodings.float(), self.embedding.weight)
87
+
88
+ if shape is not None: # reshape back to match original input shape
89
+ z_q = z_q.view(shape).permute(0, 3, 1, 2).contiguous()
90
+
91
+ return z_q
92
+
93
+
94
+ class GumbelQuantizer(nn.Module):
95
+ def __init__(self, codebook_size, emb_dim, num_hiddens, straight_through=False, kl_weight=5e-4, temp_init=1.0):
96
+ super().__init__()
97
+ self.codebook_size = codebook_size # number of embeddings
98
+ self.emb_dim = emb_dim # dimension of embedding
99
+ self.straight_through = straight_through
100
+ self.temperature = temp_init
101
+ self.kl_weight = kl_weight
102
+ # projects last encoder layer to quantized logits
103
+ self.proj = nn.Conv2d(num_hiddens, codebook_size, 1)
104
+ self.embed = nn.Embedding(codebook_size, emb_dim)
105
+
106
+ def forward(self, z):
107
+ hard = self.straight_through if self.training else True
108
+
109
+ logits = self.proj(z)
110
+
111
+ soft_one_hot = F.gumbel_softmax(
112
+ logits, tau=self.temperature, dim=1, hard=hard)
113
+
114
+ z_q = torch.einsum("b n h w, n d -> b d h w",
115
+ soft_one_hot, self.embed.weight)
116
+
117
+ # + kl divergence to the prior loss
118
+ qy = F.softmax(logits, dim=1)
119
+ diff = self.kl_weight * \
120
+ torch.sum(
121
+ qy * torch.log(qy * self.codebook_size + 1e-10), dim=1).mean()
122
+ min_encoding_indices = soft_one_hot.argmax(dim=1)
123
+
124
+ return z_q, diff, {
125
+ "min_encoding_indices": min_encoding_indices
126
+ }
127
+
128
+
129
+ class Downsample(nn.Module):
130
+ def __init__(self, in_channels):
131
+ super().__init__()
132
+ self.conv = torch.nn.Conv2d(
133
+ in_channels, in_channels, kernel_size=3, stride=2, padding=0)
134
+
135
+ def forward(self, x):
136
+ pad = (0, 1, 0, 1)
137
+ x = torch.nn.functional.pad(x, pad, mode="constant", value=0)
138
+ x = self.conv(x)
139
+ return x
140
+
141
+
142
+ class Upsample(nn.Module):
143
+ def __init__(self, in_channels):
144
+ super().__init__()
145
+ self.conv = nn.Conv2d(in_channels, in_channels,
146
+ kernel_size=3, stride=1, padding=1)
147
+
148
+ def forward(self, x):
149
+ x = F.interpolate(x, scale_factor=2.0, mode="nearest")
150
+ x = self.conv(x)
151
+
152
+ return x
153
+
154
+
155
+ class ResBlock(nn.Module):
156
+ def __init__(self, in_channels, out_channels=None):
157
+ super(ResBlock, self).__init__()
158
+ self.in_channels = in_channels
159
+ self.out_channels = in_channels if out_channels is None else out_channels
160
+ self.norm1 = normalize(in_channels)
161
+ self.conv1 = nn.Conv2d(in_channels, out_channels,
162
+ kernel_size=3, stride=1, padding=1)
163
+ self.norm2 = normalize(out_channels)
164
+ self.conv2 = nn.Conv2d(out_channels, out_channels,
165
+ kernel_size=3, stride=1, padding=1)
166
+ if self.in_channels != self.out_channels:
167
+ self.conv_out = nn.Conv2d(
168
+ in_channels, out_channels, kernel_size=1, stride=1, padding=0)
169
+
170
+ def forward(self, x_in):
171
+ x = x_in
172
+ x = self.norm1(x)
173
+ x = swish(x)
174
+ x = self.conv1(x)
175
+ x = self.norm2(x)
176
+ x = swish(x)
177
+ x = self.conv2(x)
178
+ if self.in_channels != self.out_channels:
179
+ x_in = self.conv_out(x_in)
180
+
181
+ return x + x_in
182
+
183
+
184
+ class AttnBlock(nn.Module):
185
+ def __init__(self, in_channels):
186
+ super().__init__()
187
+ self.in_channels = in_channels
188
+
189
+ self.norm = normalize(in_channels)
190
+ self.q = torch.nn.Conv2d(
191
+ in_channels,
192
+ in_channels,
193
+ kernel_size=1,
194
+ stride=1,
195
+ padding=0
196
+ )
197
+ self.k = torch.nn.Conv2d(
198
+ in_channels,
199
+ in_channels,
200
+ kernel_size=1,
201
+ stride=1,
202
+ padding=0
203
+ )
204
+ self.v = torch.nn.Conv2d(
205
+ in_channels,
206
+ in_channels,
207
+ kernel_size=1,
208
+ stride=1,
209
+ padding=0
210
+ )
211
+ self.proj_out = torch.nn.Conv2d(
212
+ in_channels,
213
+ in_channels,
214
+ kernel_size=1,
215
+ stride=1,
216
+ padding=0
217
+ )
218
+
219
+ def forward(self, x):
220
+ h_ = x
221
+ h_ = self.norm(h_)
222
+ q = self.q(h_)
223
+ k = self.k(h_)
224
+ v = self.v(h_)
225
+
226
+ # compute attention
227
+ b, c, h, w = q.shape
228
+ q = q.reshape(b, c, h*w)
229
+ q = q.permute(0, 2, 1)
230
+ k = k.reshape(b, c, h*w)
231
+ w_ = torch.bmm(q, k)
232
+ w_ = w_ * (int(c)**(-0.5))
233
+ w_ = F.softmax(w_, dim=2)
234
+
235
+ # attend to values
236
+ v = v.reshape(b, c, h*w)
237
+ w_ = w_.permute(0, 2, 1)
238
+ h_ = torch.bmm(v, w_)
239
+ h_ = h_.reshape(b, c, h, w)
240
+
241
+ h_ = self.proj_out(h_)
242
+
243
+ return x+h_
244
+
245
+
246
+ class Encoder(nn.Module):
247
+ def __init__(self, in_channels, nf, emb_dim, ch_mult, num_res_blocks, resolution, attn_resolutions):
248
+ super().__init__()
249
+ self.nf = nf
250
+ self.num_resolutions = len(ch_mult)
251
+ self.num_res_blocks = num_res_blocks
252
+ self.resolution = resolution
253
+ self.attn_resolutions = attn_resolutions
254
+
255
+ curr_res = self.resolution
256
+ in_ch_mult = (1,)+tuple(ch_mult)
257
+
258
+ blocks = []
259
+ # initial convultion
260
+ blocks.append(nn.Conv2d(in_channels, nf,
261
+ kernel_size=3, stride=1, padding=1))
262
+
263
+ # residual and downsampling blocks, with attention on smaller res (16x16)
264
+ for i in range(self.num_resolutions):
265
+ block_in_ch = nf * in_ch_mult[i]
266
+ block_out_ch = nf * ch_mult[i]
267
+ for _ in range(self.num_res_blocks):
268
+ blocks.append(ResBlock(block_in_ch, block_out_ch))
269
+ block_in_ch = block_out_ch
270
+ if curr_res in attn_resolutions:
271
+ blocks.append(AttnBlock(block_in_ch))
272
+
273
+ if i != self.num_resolutions - 1:
274
+ blocks.append(Downsample(block_in_ch))
275
+ curr_res = curr_res // 2
276
+
277
+ # non-local attention block
278
+ blocks.append(ResBlock(block_in_ch, block_in_ch))
279
+ blocks.append(AttnBlock(block_in_ch))
280
+ blocks.append(ResBlock(block_in_ch, block_in_ch))
281
+
282
+ # normalise and convert to latent size
283
+ blocks.append(normalize(block_in_ch))
284
+ blocks.append(nn.Conv2d(block_in_ch, emb_dim,
285
+ kernel_size=3, stride=1, padding=1))
286
+ self.blocks = nn.ModuleList(blocks)
287
+
288
+ def forward(self, x):
289
+ for block in self.blocks:
290
+ x = block(x)
291
+
292
+ return x
293
+
294
+
295
+ class Generator(nn.Module):
296
+ def __init__(self, nf, emb_dim, ch_mult, res_blocks, img_size, attn_resolutions):
297
+ super().__init__()
298
+ self.nf = nf
299
+ self.ch_mult = ch_mult
300
+ self.num_resolutions = len(self.ch_mult)
301
+ self.num_res_blocks = res_blocks
302
+ self.resolution = img_size
303
+ self.attn_resolutions = attn_resolutions
304
+ self.in_channels = emb_dim
305
+ self.out_channels = 3
306
+ block_in_ch = self.nf * self.ch_mult[-1]
307
+ curr_res = self.resolution // 2 ** (self.num_resolutions-1)
308
+
309
+ blocks = []
310
+ # initial conv
311
+ blocks.append(nn.Conv2d(self.in_channels, block_in_ch,
312
+ kernel_size=3, stride=1, padding=1))
313
+
314
+ # non-local attention block
315
+ blocks.append(ResBlock(block_in_ch, block_in_ch))
316
+ blocks.append(AttnBlock(block_in_ch))
317
+ blocks.append(ResBlock(block_in_ch, block_in_ch))
318
+
319
+ for i in reversed(range(self.num_resolutions)):
320
+ block_out_ch = self.nf * self.ch_mult[i]
321
+
322
+ for _ in range(self.num_res_blocks):
323
+ blocks.append(ResBlock(block_in_ch, block_out_ch))
324
+ block_in_ch = block_out_ch
325
+
326
+ if curr_res in self.attn_resolutions:
327
+ blocks.append(AttnBlock(block_in_ch))
328
+
329
+ if i != 0:
330
+ blocks.append(Upsample(block_in_ch))
331
+ curr_res = curr_res * 2
332
+
333
+ blocks.append(normalize(block_in_ch))
334
+ blocks.append(nn.Conv2d(block_in_ch, self.out_channels,
335
+ kernel_size=3, stride=1, padding=1))
336
+
337
+ self.blocks = nn.ModuleList(blocks)
338
+
339
+ def forward(self, x):
340
+ for block in self.blocks:
341
+ x = block(x)
342
+
343
+ return x
344
+
345
+
346
+ @ARCH_REGISTRY.register()
347
+ class VQAutoEncoder(nn.Module):
348
+ def __init__(self, img_size, nf, ch_mult, quantizer="nearest", res_blocks=2, attn_resolutions=[16], codebook_size=1024, emb_dim=256,
349
+ beta=0.25, gumbel_straight_through=False, gumbel_kl_weight=1e-8, model_path=None):
350
+ super().__init__()
351
+ logger = get_root_logger()
352
+ self.in_channels = 3
353
+ self.nf = nf
354
+ self.n_blocks = res_blocks
355
+ self.codebook_size = codebook_size
356
+ self.embed_dim = emb_dim
357
+ self.ch_mult = ch_mult
358
+ self.resolution = img_size
359
+ self.attn_resolutions = attn_resolutions
360
+ self.quantizer_type = quantizer
361
+ self.encoder = Encoder(
362
+ self.in_channels,
363
+ self.nf,
364
+ self.embed_dim,
365
+ self.ch_mult,
366
+ self.n_blocks,
367
+ self.resolution,
368
+ self.attn_resolutions
369
+ )
370
+ if self.quantizer_type == "nearest":
371
+ self.beta = beta # 0.25
372
+ self.quantize = VectorQuantizer(
373
+ self.codebook_size, self.embed_dim, self.beta)
374
+ elif self.quantizer_type == "gumbel":
375
+ self.gumbel_num_hiddens = emb_dim
376
+ self.straight_through = gumbel_straight_through
377
+ self.kl_weight = gumbel_kl_weight
378
+ self.quantize = GumbelQuantizer(
379
+ self.codebook_size,
380
+ self.embed_dim,
381
+ self.gumbel_num_hiddens,
382
+ self.straight_through,
383
+ self.kl_weight
384
+ )
385
+ self.generator = Generator(
386
+ self.nf,
387
+ self.embed_dim,
388
+ self.ch_mult,
389
+ self.n_blocks,
390
+ self.resolution,
391
+ self.attn_resolutions
392
+ )
393
+
394
+ if model_path is not None:
395
+ ckpt = torch.load(model_path, map_location='cpu', weights_only=True)
396
+ if 'params_ema' in ckpt:
397
+ self.load_state_dict(ckpt['params_ema'])
398
+ logger.info(f'vqgan is loaded from: {model_path} [params_ema]')
399
+ elif 'params' in ckpt:
400
+ self.load_state_dict(ckpt['params'])
401
+ logger.info(f'vqgan is loaded from: {model_path} [params]')
402
+ else:
403
+ raise ValueError(f'Wrong params!')
404
+
405
+ def forward(self, x):
406
+ x = self.encoder(x)
407
+ quant, codebook_loss, quant_stats = self.quantize(x)
408
+ x = self.generator(quant)
409
+ return x, codebook_loss, quant_stats
410
+
411
+
412
+ # patch based discriminator
413
+ @ARCH_REGISTRY.register()
414
+ class VQGANDiscriminator(nn.Module):
415
+ def __init__(self, nc=3, ndf=64, n_layers=4, model_path=None):
416
+ super().__init__()
417
+
418
+ layers = [nn.Conv2d(nc, ndf, kernel_size=4, stride=2,
419
+ padding=1), nn.LeakyReLU(0.2, True)]
420
+ ndf_mult = 1
421
+ ndf_mult_prev = 1
422
+ for n in range(1, n_layers): # gradually increase the number of filters
423
+ ndf_mult_prev = ndf_mult
424
+ ndf_mult = min(2 ** n, 8)
425
+ layers += [
426
+ nn.Conv2d(ndf * ndf_mult_prev, ndf * ndf_mult,
427
+ kernel_size=4, stride=2, padding=1, bias=False),
428
+ nn.BatchNorm2d(ndf * ndf_mult),
429
+ nn.LeakyReLU(0.2, True)
430
+ ]
431
+
432
+ ndf_mult_prev = ndf_mult
433
+ ndf_mult = min(2 ** n_layers, 8)
434
+
435
+ layers += [
436
+ nn.Conv2d(ndf * ndf_mult_prev, ndf * ndf_mult,
437
+ kernel_size=4, stride=1, padding=1, bias=False),
438
+ nn.BatchNorm2d(ndf * ndf_mult),
439
+ nn.LeakyReLU(0.2, True)
440
+ ]
441
+
442
+ layers += [
443
+ nn.Conv2d(ndf * ndf_mult, 1, kernel_size=4, stride=1, padding=1)] # output 1 channel prediction map
444
+ self.main = nn.Sequential(*layers)
445
+
446
+ if model_path is not None:
447
+ ckpt = torch.load(model_path, map_location='cpu')
448
+ if 'params_d' in chkpt:
449
+ self.load_state_dict(ckpt['params_d'])
450
+ elif 'params' in chkpt:
451
+ self.load_state_dict(ckpt['params'])
452
+ else:
453
+ raise ValueError(f'Wrong params!')
454
+
455
+ def forward(self, x):
456
+ return self.main(x)
457
+
458
+
459
+ @ARCH_REGISTRY.register()
460
+ class VQHQEncoder(nn.Module):
461
+ def __init__(self, img_size, nf, ch_mult, quantizer="nearest", res_blocks=2, attn_resolutions=[16], codebook_size=1024, emb_dim=256,
462
+ beta=0.25, gumbel_straight_through=False, gumbel_kl_weight=1e-8, model_path=None, params='params'):
463
+ super().__init__()
464
+ logger = get_root_logger()
465
+ self.in_channels = 3
466
+ self.nf = nf
467
+ self.n_blocks = res_blocks
468
+ self.codebook_size = codebook_size
469
+ self.embed_dim = emb_dim
470
+ self.ch_mult = ch_mult
471
+ self.resolution = img_size
472
+ self.attn_resolutions = attn_resolutions
473
+ self.quantizer_type = quantizer
474
+ self.encoder = Encoder(
475
+ self.in_channels,
476
+ self.nf,
477
+ self.embed_dim,
478
+ self.ch_mult,
479
+ self.n_blocks,
480
+ self.resolution,
481
+ self.attn_resolutions
482
+ )
483
+ if self.quantizer_type == "nearest":
484
+ self.beta = beta # 0.25
485
+ self.quantize = VectorQuantizer(
486
+ self.codebook_size, self.embed_dim, self.beta)
487
+ elif self.quantizer_type == "gumbel":
488
+ self.gumbel_num_hiddens = emb_dim
489
+ self.straight_through = gumbel_straight_through
490
+ self.kl_weight = gumbel_kl_weight
491
+ self.quantize = GumbelQuantizer(
492
+ self.codebook_size,
493
+ self.embed_dim,
494
+ self.gumbel_num_hiddens,
495
+ self.straight_through,
496
+ self.kl_weight
497
+ )
498
+
499
+ if model_path is not None:
500
+ self.load_state_dict(torch.load(
501
+ model_path, map_location='cpu', weights_only=True)[params], strict=False)
502
+ logger.info(
503
+ f'VQGAN for latent calculation is loaded from: {model_path} [{params}]')
504
+
505
+ def forward(self, x):
506
+ x = self.encoder(x)
507
+ quant, codebook_loss, quant_stats = self.quantize(x)
508
+ return x, codebook_loss, quant_stats
509
+
510
+
511
+ @ARCH_REGISTRY.register()
512
+ class Discriminator3D(nn.Module):
513
+ def __init__(self,
514
+ in_channels=3,
515
+ nf=32,
516
+ use_sigmoid=False,
517
+ use_spectral_norm=True,):
518
+ super().__init__()
519
+ self.use_sigmoid = use_sigmoid
520
+
521
+ self.layers = nn.Sequential(
522
+ spectral_norm(
523
+ nn.Conv3d(in_channels=in_channels,
524
+ out_channels=nf * 1,
525
+ kernel_size=(3, 5, 5),
526
+ stride=(1, 2, 2),
527
+ padding=1,
528
+ bias=not use_spectral_norm), use_spectral_norm),
529
+ # nn.InstanceNorm2d(64, track_running_stats=False),
530
+ nn.LeakyReLU(0.2, inplace=True),
531
+ spectral_norm(
532
+ nn.Conv3d(nf * 1,
533
+ nf * 2,
534
+ kernel_size=(3, 5, 5),
535
+ stride=(1, 2, 2),
536
+ padding=(1, 2, 2),
537
+ bias=not use_spectral_norm), use_spectral_norm),
538
+ # nn.InstanceNorm2d(128, track_running_stats=False),
539
+ nn.LeakyReLU(0.2, inplace=True),
540
+ spectral_norm(
541
+ nn.Conv3d(nf * 2,
542
+ nf * 4,
543
+ kernel_size=(3, 5, 5),
544
+ stride=(1, 2, 2),
545
+ padding=(1, 2, 2),
546
+ bias=not use_spectral_norm), use_spectral_norm),
547
+ # nn.InstanceNorm2d(256, track_running_stats=False),
548
+ nn.LeakyReLU(0.2, inplace=True),
549
+ spectral_norm(
550
+ nn.Conv3d(nf * 4,
551
+ nf * 4,
552
+ kernel_size=(3, 5, 5),
553
+ stride=(1, 2, 2),
554
+ padding=(1, 2, 2),
555
+ bias=not use_spectral_norm), use_spectral_norm),
556
+ # nn.InstanceNorm2d(256, track_running_stats=False),
557
+ nn.LeakyReLU(0.2, inplace=True),
558
+ spectral_norm(
559
+ nn.Conv3d(nf * 4,
560
+ nf * 4,
561
+ kernel_size=(3, 5, 5),
562
+ stride=(1, 2, 2),
563
+ padding=(1, 2, 2),
564
+ bias=not use_spectral_norm), use_spectral_norm),
565
+ # nn.InstanceNorm2d(256, track_running_stats=False),
566
+ nn.LeakyReLU(0.2, inplace=True),
567
+ nn.Conv3d(nf * 4,
568
+ nf * 4,
569
+ kernel_size=(3, 5, 5),
570
+ stride=(1, 2, 2),
571
+ padding=(1, 2, 2)))
572
+
573
+ self.apply(self._init_weights)
574
+
575
+ def _init_weights(self, module):
576
+ if isinstance(module, (nn.Linear, nn.Embedding)):
577
+ module.weight.data.normal_(mean=0.0, std=0.02)
578
+ if isinstance(module, nn.Linear) and module.bias is not None:
579
+ module.bias.data.zero_()
580
+ elif isinstance(module, nn.LayerNorm):
581
+ module.bias.data.zero_()
582
+ module.weight.data.fill_(1.0)
583
+
584
+ def forward(self, xs):
585
+ # B, T, C, H, W (new)
586
+ xs_t = torch.transpose(xs, 1, 2)
587
+ feat = self.layers(xs_t)
588
+ if self.use_sigmoid:
589
+ feat = torch.sigmoid(feat)
590
+ out = torch.transpose(feat, 1, 2) # B, T, C, H, W
591
+ return out
592
+
593
+
594
+ def spectral_norm(module, mode=True):
595
+ if mode:
596
+ return _spectral_norm(module)
597
+ return module
basicsr/data/__init__.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import importlib
2
+ import numpy as np
3
+ import random
4
+ import torch
5
+ import torch.utils.data
6
+ from copy import deepcopy
7
+ from functools import partial
8
+ from os import path as osp
9
+
10
+ from basicsr.data.prefetch_dataloader import PrefetchDataLoader
11
+ from basicsr.utils import get_root_logger, scandir
12
+ from basicsr.utils.dist_util import get_dist_info
13
+ from basicsr.utils.registry import DATASET_REGISTRY
14
+
15
+ __all__ = ['build_dataset', 'build_dataloader']
16
+
17
+ # automatically scan and import dataset modules for registry
18
+ # scan all the files under the data folder with '_dataset' in file names
19
+ data_folder = osp.dirname(osp.abspath(__file__))
20
+ dataset_filenames = [osp.splitext(osp.basename(v))[0] for v in scandir(data_folder) if v.endswith('_dataset.py')]
21
+ # import all the dataset modules
22
+ _dataset_modules = [importlib.import_module(f'basicsr.data.{file_name}') for file_name in dataset_filenames]
23
+
24
+
25
+ def build_dataset(dataset_opt):
26
+ """Build dataset from options.
27
+
28
+ Args:
29
+ dataset_opt (dict): Configuration for dataset. It must constain:
30
+ name (str): Dataset name.
31
+ type (str): Dataset type.
32
+ """
33
+ dataset_opt = deepcopy(dataset_opt)
34
+ dataset = DATASET_REGISTRY.get(dataset_opt['type'])(dataset_opt)
35
+ logger = get_root_logger()
36
+ logger.info(f'Dataset [{dataset.__class__.__name__}] - {dataset_opt["name"]} ' 'is built.')
37
+ return dataset
38
+
39
+
40
+ def build_dataloader(dataset, dataset_opt, num_gpu=1, dist=False, sampler=None, seed=None):
41
+ """Build dataloader.
42
+
43
+ Args:
44
+ dataset (torch.utils.data.Dataset): Dataset.
45
+ dataset_opt (dict): Dataset options. It contains the following keys:
46
+ phase (str): 'train' or 'val'.
47
+ num_worker_per_gpu (int): Number of workers for each GPU.
48
+ batch_size_per_gpu (int): Training batch size for each GPU.
49
+ num_gpu (int): Number of GPUs. Used only in the train phase.
50
+ Default: 1.
51
+ dist (bool): Whether in distributed training. Used only in the train
52
+ phase. Default: False.
53
+ sampler (torch.utils.data.sampler): Data sampler. Default: None.
54
+ seed (int | None): Seed. Default: None
55
+ """
56
+ phase = dataset_opt['phase']
57
+ rank, _ = get_dist_info()
58
+ if phase == 'train':
59
+ if dist: # distributed training
60
+ batch_size = dataset_opt['batch_size_per_gpu']
61
+ num_workers = dataset_opt['num_worker_per_gpu']
62
+ else: # non-distributed training
63
+ multiplier = 1 if num_gpu == 0 else num_gpu
64
+ batch_size = dataset_opt['batch_size_per_gpu'] * multiplier
65
+ num_workers = dataset_opt['num_worker_per_gpu'] * multiplier
66
+ dataloader_args = dict(
67
+ dataset=dataset,
68
+ batch_size=batch_size,
69
+ shuffle=False,
70
+ num_workers=num_workers,
71
+ sampler=sampler,
72
+ drop_last=True)
73
+ if sampler is None:
74
+ dataloader_args['shuffle'] = True
75
+ dataloader_args['worker_init_fn'] = partial(
76
+ worker_init_fn, num_workers=num_workers, rank=rank, seed=seed) if seed is not None else None
77
+ elif phase in ['val', 'test']: # validation
78
+ dataloader_args = dict(dataset=dataset, batch_size=1, shuffle=False, num_workers=0)
79
+ else:
80
+ raise ValueError(f'Wrong dataset phase: {phase}. ' "Supported ones are 'train', 'val' and 'test'.")
81
+
82
+ dataloader_args['pin_memory'] = dataset_opt.get('pin_memory', False)
83
+
84
+ prefetch_mode = dataset_opt.get('prefetch_mode')
85
+ if prefetch_mode == 'cpu': # CPUPrefetcher
86
+ num_prefetch_queue = dataset_opt.get('num_prefetch_queue', 1)
87
+ logger = get_root_logger()
88
+ logger.info(f'Use {prefetch_mode} prefetch dataloader: ' f'num_prefetch_queue = {num_prefetch_queue}')
89
+ return PrefetchDataLoader(num_prefetch_queue=num_prefetch_queue, **dataloader_args)
90
+ else:
91
+ # prefetch_mode=None: Normal dataloader
92
+ # prefetch_mode='cuda': dataloader for CUDAPrefetcher
93
+ return torch.utils.data.DataLoader(**dataloader_args)
94
+
95
+
96
+ def worker_init_fn(worker_id, num_workers, rank, seed):
97
+ # Set the worker seed to num_workers * rank + worker_id + seed
98
+ worker_seed = num_workers * rank + worker_id + seed
99
+ np.random.seed(worker_seed)
100
+ random.seed(worker_seed)
basicsr/data/data_sampler.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ import torch
3
+ from torch.utils.data.sampler import Sampler
4
+
5
+
6
+ class EnlargedSampler(Sampler):
7
+ """Sampler that restricts data loading to a subset of the dataset.
8
+
9
+ Modified from torch.utils.data.distributed.DistributedSampler
10
+ Support enlarging the dataset for iteration-based training, for saving
11
+ time when restart the dataloader after each epoch
12
+
13
+ Args:
14
+ dataset (torch.utils.data.Dataset): Dataset used for sampling.
15
+ num_replicas (int | None): Number of processes participating in
16
+ the training. It is usually the world_size.
17
+ rank (int | None): Rank of the current process within num_replicas.
18
+ ratio (int): Enlarging ratio. Default: 1.
19
+ """
20
+
21
+ def __init__(self, dataset, num_replicas, rank, ratio=1):
22
+ self.dataset = dataset
23
+ self.num_replicas = num_replicas
24
+ self.rank = rank
25
+ self.epoch = 0
26
+ self.num_samples = math.ceil(len(self.dataset) * ratio / self.num_replicas)
27
+ self.total_size = self.num_samples * self.num_replicas
28
+
29
+ def __iter__(self):
30
+ # deterministically shuffle based on epoch
31
+ g = torch.Generator()
32
+ g.manual_seed(self.epoch)
33
+ indices = torch.randperm(self.total_size, generator=g).tolist()
34
+
35
+ dataset_size = len(self.dataset)
36
+ indices = [v % dataset_size for v in indices]
37
+
38
+ # subsample
39
+ indices = indices[self.rank:self.total_size:self.num_replicas]
40
+ assert len(indices) == self.num_samples
41
+
42
+ return iter(indices)
43
+
44
+ def __len__(self):
45
+ return self.num_samples
46
+
47
+ def set_epoch(self, epoch):
48
+ self.epoch = epoch
basicsr/data/data_util.py ADDED
@@ -0,0 +1,392 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cv2
2
+ import math
3
+ import numpy as np
4
+ import torch
5
+ from os import path as osp
6
+ from PIL import Image, ImageDraw
7
+ from torch.nn import functional as F
8
+
9
+ from basicsr.data.transforms import mod_crop
10
+ from basicsr.utils import img2tensor, scandir
11
+
12
+
13
+ def read_img_seq(path, require_mod_crop=False, scale=1):
14
+ """Read a sequence of images from a given folder path.
15
+
16
+ Args:
17
+ path (list[str] | str): List of image paths or image folder path.
18
+ require_mod_crop (bool): Require mod crop for each image.
19
+ Default: False.
20
+ scale (int): Scale factor for mod_crop. Default: 1.
21
+
22
+ Returns:
23
+ Tensor: size (t, c, h, w), RGB, [0, 1].
24
+ """
25
+ if isinstance(path, list):
26
+ img_paths = path
27
+ else:
28
+ img_paths = sorted(list(scandir(path, full_path=True)))
29
+ imgs = [cv2.imread(v).astype(np.float32) / 255. for v in img_paths]
30
+ if require_mod_crop:
31
+ imgs = [mod_crop(img, scale) for img in imgs]
32
+ imgs = img2tensor(imgs, bgr2rgb=True, float32=True)
33
+ imgs = torch.stack(imgs, dim=0)
34
+ return imgs
35
+
36
+
37
+ def generate_frame_indices(crt_idx, max_frame_num, num_frames, padding='reflection'):
38
+ """Generate an index list for reading `num_frames` frames from a sequence
39
+ of images.
40
+
41
+ Args:
42
+ crt_idx (int): Current center index.
43
+ max_frame_num (int): Max number of the sequence of images (from 1).
44
+ num_frames (int): Reading num_frames frames.
45
+ padding (str): Padding mode, one of
46
+ 'replicate' | 'reflection' | 'reflection_circle' | 'circle'
47
+ Examples: current_idx = 0, num_frames = 5
48
+ The generated frame indices under different padding mode:
49
+ replicate: [0, 0, 0, 1, 2]
50
+ reflection: [2, 1, 0, 1, 2]
51
+ reflection_circle: [4, 3, 0, 1, 2]
52
+ circle: [3, 4, 0, 1, 2]
53
+
54
+ Returns:
55
+ list[int]: A list of indices.
56
+ """
57
+ assert num_frames % 2 == 1, 'num_frames should be an odd number.'
58
+ assert padding in ('replicate', 'reflection', 'reflection_circle', 'circle'), f'Wrong padding mode: {padding}.'
59
+
60
+ max_frame_num = max_frame_num - 1 # start from 0
61
+ num_pad = num_frames // 2
62
+
63
+ indices = []
64
+ for i in range(crt_idx - num_pad, crt_idx + num_pad + 1):
65
+ if i < 0:
66
+ if padding == 'replicate':
67
+ pad_idx = 0
68
+ elif padding == 'reflection':
69
+ pad_idx = -i
70
+ elif padding == 'reflection_circle':
71
+ pad_idx = crt_idx + num_pad - i
72
+ else:
73
+ pad_idx = num_frames + i
74
+ elif i > max_frame_num:
75
+ if padding == 'replicate':
76
+ pad_idx = max_frame_num
77
+ elif padding == 'reflection':
78
+ pad_idx = max_frame_num * 2 - i
79
+ elif padding == 'reflection_circle':
80
+ pad_idx = (crt_idx - num_pad) - (i - max_frame_num)
81
+ else:
82
+ pad_idx = i - num_frames
83
+ else:
84
+ pad_idx = i
85
+ indices.append(pad_idx)
86
+ return indices
87
+
88
+
89
+ def paired_paths_from_lmdb(folders, keys):
90
+ """Generate paired paths from lmdb files.
91
+
92
+ Contents of lmdb. Taking the `lq.lmdb` for example, the file structure is:
93
+
94
+ lq.lmdb
95
+ ├── data.mdb
96
+ ├── lock.mdb
97
+ ├── meta_info.txt
98
+
99
+ The data.mdb and lock.mdb are standard lmdb files and you can refer to
100
+ https://lmdb.readthedocs.io/en/release/ for more details.
101
+
102
+ The meta_info.txt is a specified txt file to record the meta information
103
+ of our datasets. It will be automatically created when preparing
104
+ datasets by our provided dataset tools.
105
+ Each line in the txt file records
106
+ 1)image name (with extension),
107
+ 2)image shape,
108
+ 3)compression level, separated by a white space.
109
+ Example: `baboon.png (120,125,3) 1`
110
+
111
+ We use the image name without extension as the lmdb key.
112
+ Note that we use the same key for the corresponding lq and gt images.
113
+
114
+ Args:
115
+ folders (list[str]): A list of folder path. The order of list should
116
+ be [input_folder, gt_folder].
117
+ keys (list[str]): A list of keys identifying folders. The order should
118
+ be in consistent with folders, e.g., ['lq', 'gt'].
119
+ Note that this key is different from lmdb keys.
120
+
121
+ Returns:
122
+ list[str]: Returned path list.
123
+ """
124
+ assert len(folders) == 2, ('The len of folders should be 2 with [input_folder, gt_folder]. '
125
+ f'But got {len(folders)}')
126
+ assert len(keys) == 2, ('The len of keys should be 2 with [input_key, gt_key]. ' f'But got {len(keys)}')
127
+ input_folder, gt_folder = folders
128
+ input_key, gt_key = keys
129
+
130
+ if not (input_folder.endswith('.lmdb') and gt_folder.endswith('.lmdb')):
131
+ raise ValueError(f'{input_key} folder and {gt_key} folder should both in lmdb '
132
+ f'formats. But received {input_key}: {input_folder}; '
133
+ f'{gt_key}: {gt_folder}')
134
+ # ensure that the two meta_info files are the same
135
+ with open(osp.join(input_folder, 'meta_info.txt')) as fin:
136
+ input_lmdb_keys = [line.split('.')[0] for line in fin]
137
+ with open(osp.join(gt_folder, 'meta_info.txt')) as fin:
138
+ gt_lmdb_keys = [line.split('.')[0] for line in fin]
139
+ if set(input_lmdb_keys) != set(gt_lmdb_keys):
140
+ raise ValueError(f'Keys in {input_key}_folder and {gt_key}_folder are different.')
141
+ else:
142
+ paths = []
143
+ for lmdb_key in sorted(input_lmdb_keys):
144
+ paths.append(dict([(f'{input_key}_path', lmdb_key), (f'{gt_key}_path', lmdb_key)]))
145
+ return paths
146
+
147
+
148
+ def paired_paths_from_meta_info_file(folders, keys, meta_info_file, filename_tmpl):
149
+ """Generate paired paths from an meta information file.
150
+
151
+ Each line in the meta information file contains the image names and
152
+ image shape (usually for gt), separated by a white space.
153
+
154
+ Example of an meta information file:
155
+ ```
156
+ 0001_s001.png (480,480,3)
157
+ 0001_s002.png (480,480,3)
158
+ ```
159
+
160
+ Args:
161
+ folders (list[str]): A list of folder path. The order of list should
162
+ be [input_folder, gt_folder].
163
+ keys (list[str]): A list of keys identifying folders. The order should
164
+ be in consistent with folders, e.g., ['lq', 'gt'].
165
+ meta_info_file (str): Path to the meta information file.
166
+ filename_tmpl (str): Template for each filename. Note that the
167
+ template excludes the file extension. Usually the filename_tmpl is
168
+ for files in the input folder.
169
+
170
+ Returns:
171
+ list[str]: Returned path list.
172
+ """
173
+ assert len(folders) == 2, ('The len of folders should be 2 with [input_folder, gt_folder]. '
174
+ f'But got {len(folders)}')
175
+ assert len(keys) == 2, ('The len of keys should be 2 with [input_key, gt_key]. ' f'But got {len(keys)}')
176
+ input_folder, gt_folder = folders
177
+ input_key, gt_key = keys
178
+
179
+ with open(meta_info_file, 'r') as fin:
180
+ gt_names = [line.split(' ')[0] for line in fin]
181
+
182
+ paths = []
183
+ for gt_name in gt_names:
184
+ basename, ext = osp.splitext(osp.basename(gt_name))
185
+ input_name = f'{filename_tmpl.format(basename)}{ext}'
186
+ input_path = osp.join(input_folder, input_name)
187
+ gt_path = osp.join(gt_folder, gt_name)
188
+ paths.append(dict([(f'{input_key}_path', input_path), (f'{gt_key}_path', gt_path)]))
189
+ return paths
190
+
191
+
192
+ def paired_paths_from_folder(folders, keys, filename_tmpl):
193
+ """Generate paired paths from folders.
194
+
195
+ Args:
196
+ folders (list[str]): A list of folder path. The order of list should
197
+ be [input_folder, gt_folder].
198
+ keys (list[str]): A list of keys identifying folders. The order should
199
+ be in consistent with folders, e.g., ['lq', 'gt'].
200
+ filename_tmpl (str): Template for each filename. Note that the
201
+ template excludes the file extension. Usually the filename_tmpl is
202
+ for files in the input folder.
203
+
204
+ Returns:
205
+ list[str]: Returned path list.
206
+ """
207
+ assert len(folders) == 2, ('The len of folders should be 2 with [input_folder, gt_folder]. '
208
+ f'But got {len(folders)}')
209
+ assert len(keys) == 2, ('The len of keys should be 2 with [input_key, gt_key]. ' f'But got {len(keys)}')
210
+ input_folder, gt_folder = folders
211
+ input_key, gt_key = keys
212
+
213
+ input_paths = list(scandir(input_folder))
214
+ gt_paths = list(scandir(gt_folder))
215
+ assert len(input_paths) == len(gt_paths), (f'{input_key} and {gt_key} datasets have different number of images: '
216
+ f'{len(input_paths)}, {len(gt_paths)}.')
217
+ paths = []
218
+ for gt_path in gt_paths:
219
+ basename, ext = osp.splitext(osp.basename(gt_path))
220
+ input_name = f'{filename_tmpl.format(basename)}{ext}'
221
+ input_path = osp.join(input_folder, input_name)
222
+ assert input_name in input_paths, (f'{input_name} is not in ' f'{input_key}_paths.')
223
+ gt_path = osp.join(gt_folder, gt_path)
224
+ paths.append(dict([(f'{input_key}_path', input_path), (f'{gt_key}_path', gt_path)]))
225
+ return paths
226
+
227
+
228
+ def paths_from_folder(folder):
229
+ """Generate paths from folder.
230
+
231
+ Args:
232
+ folder (str): Folder path.
233
+
234
+ Returns:
235
+ list[str]: Returned path list.
236
+ """
237
+
238
+ paths = list(scandir(folder))
239
+ paths = [osp.join(folder, path) for path in paths]
240
+ return paths
241
+
242
+
243
+ def paths_from_lmdb(folder):
244
+ """Generate paths from lmdb.
245
+
246
+ Args:
247
+ folder (str): Folder path.
248
+
249
+ Returns:
250
+ list[str]: Returned path list.
251
+ """
252
+ if not folder.endswith('.lmdb'):
253
+ raise ValueError(f'Folder {folder}folder should in lmdb format.')
254
+ with open(osp.join(folder, 'meta_info.txt')) as fin:
255
+ paths = [line.split('.')[0] for line in fin]
256
+ return paths
257
+
258
+
259
+ def generate_gaussian_kernel(kernel_size=13, sigma=1.6):
260
+ """Generate Gaussian kernel used in `duf_downsample`.
261
+
262
+ Args:
263
+ kernel_size (int): Kernel size. Default: 13.
264
+ sigma (float): Sigma of the Gaussian kernel. Default: 1.6.
265
+
266
+ Returns:
267
+ np.array: The Gaussian kernel.
268
+ """
269
+ from scipy.ndimage import filters as filters
270
+ kernel = np.zeros((kernel_size, kernel_size))
271
+ # set element at the middle to one, a dirac delta
272
+ kernel[kernel_size // 2, kernel_size // 2] = 1
273
+ # gaussian-smooth the dirac, resulting in a gaussian filter
274
+ return filters.gaussian_filter(kernel, sigma)
275
+
276
+
277
+ def duf_downsample(x, kernel_size=13, scale=4):
278
+ """Downsamping with Gaussian kernel used in the DUF official code.
279
+
280
+ Args:
281
+ x (Tensor): Frames to be downsampled, with shape (b, t, c, h, w).
282
+ kernel_size (int): Kernel size. Default: 13.
283
+ scale (int): Downsampling factor. Supported scale: (2, 3, 4).
284
+ Default: 4.
285
+
286
+ Returns:
287
+ Tensor: DUF downsampled frames.
288
+ """
289
+ assert scale in (2, 3, 4), f'Only support scale (2, 3, 4), but got {scale}.'
290
+
291
+ squeeze_flag = False
292
+ if x.ndim == 4:
293
+ squeeze_flag = True
294
+ x = x.unsqueeze(0)
295
+ b, t, c, h, w = x.size()
296
+ x = x.view(-1, 1, h, w)
297
+ pad_w, pad_h = kernel_size // 2 + scale * 2, kernel_size // 2 + scale * 2
298
+ x = F.pad(x, (pad_w, pad_w, pad_h, pad_h), 'reflect')
299
+
300
+ gaussian_filter = generate_gaussian_kernel(kernel_size, 0.4 * scale)
301
+ gaussian_filter = torch.from_numpy(gaussian_filter).type_as(x).unsqueeze(0).unsqueeze(0)
302
+ x = F.conv2d(x, gaussian_filter, stride=scale)
303
+ x = x[:, :, 2:-2, 2:-2]
304
+ x = x.view(b, t, c, x.size(2), x.size(3))
305
+ if squeeze_flag:
306
+ x = x.squeeze(0)
307
+ return x
308
+
309
+
310
+ def brush_stroke_mask(img, color=(255,255,255)):
311
+ min_num_vertex = 8
312
+ max_num_vertex = 28
313
+ mean_angle = 2*math.pi / 5
314
+ angle_range = 2*math.pi / 12
315
+ # training large mask ratio (training setting)
316
+ min_width = 30
317
+ max_width = 70
318
+ # very large mask ratio (test setting and refine after 200k)
319
+ # min_width = 80
320
+ # max_width = 120
321
+ def generate_mask(H, W, img=None):
322
+ average_radius = math.sqrt(H*H+W*W) / 8
323
+ mask = Image.new('RGB', (W, H), 0)
324
+ if img is not None: mask = img # Image.fromarray(img)
325
+
326
+ for _ in range(np.random.randint(1, 4)):
327
+ num_vertex = np.random.randint(min_num_vertex, max_num_vertex)
328
+ angle_min = mean_angle - np.random.uniform(0, angle_range)
329
+ angle_max = mean_angle + np.random.uniform(0, angle_range)
330
+ angles = []
331
+ vertex = []
332
+ for i in range(num_vertex):
333
+ if i % 2 == 0:
334
+ angles.append(2*math.pi - np.random.uniform(angle_min, angle_max))
335
+ else:
336
+ angles.append(np.random.uniform(angle_min, angle_max))
337
+
338
+ h, w = mask.size
339
+ vertex.append((int(np.random.randint(0, w)), int(np.random.randint(0, h))))
340
+ for i in range(num_vertex):
341
+ r = np.clip(
342
+ np.random.normal(loc=average_radius, scale=average_radius//2),
343
+ 0, 2*average_radius)
344
+ new_x = np.clip(vertex[-1][0] + r * math.cos(angles[i]), 0, w)
345
+ new_y = np.clip(vertex[-1][1] + r * math.sin(angles[i]), 0, h)
346
+ vertex.append((int(new_x), int(new_y)))
347
+
348
+ draw = ImageDraw.Draw(mask)
349
+ width = int(np.random.uniform(min_width, max_width))
350
+ draw.line(vertex, fill=color, width=width)
351
+ for v in vertex:
352
+ draw.ellipse((v[0] - width//2,
353
+ v[1] - width//2,
354
+ v[0] + width//2,
355
+ v[1] + width//2),
356
+ fill=color)
357
+
358
+ return mask
359
+
360
+ width, height = img.size
361
+ mask = generate_mask(height, width, img)
362
+ return mask
363
+
364
+
365
+ def random_ff_mask(shape, max_angle = 10, max_len = 100, max_width = 70, times = 10):
366
+ """Generate a random free form mask with configuration.
367
+ Args:
368
+ config: Config should have configuration including IMG_SHAPES,
369
+ VERTICAL_MARGIN, HEIGHT, HORIZONTAL_MARGIN, WIDTH.
370
+ Returns:
371
+ tuple: (top, left, height, width)
372
+ Link:
373
+ https://github.com/csqiangwen/DeepFillv2_Pytorch/blob/master/train_dataset.py
374
+ """
375
+ height = shape[0]
376
+ width = shape[1]
377
+ mask = np.zeros((height, width), np.float32)
378
+ times = np.random.randint(times-5, times)
379
+ for i in range(times):
380
+ start_x = np.random.randint(width)
381
+ start_y = np.random.randint(height)
382
+ for j in range(1 + np.random.randint(5)):
383
+ angle = 0.01 + np.random.randint(max_angle)
384
+ if i % 2 == 0:
385
+ angle = 2 * 3.1415926 - angle
386
+ length = 10 + np.random.randint(max_len-20, max_len)
387
+ brush_w = 5 + np.random.randint(max_width-30, max_width)
388
+ end_x = (start_x + length * np.sin(angle)).astype(np.int32)
389
+ end_y = (start_y + length * np.cos(angle)).astype(np.int32)
390
+ cv2.line(mask, (start_y, start_x), (end_y, end_x), 1.0, brush_w)
391
+ start_x, start_y = end_x, end_y
392
+ return mask.astype(np.float32)