Spaces:
Sleeping
Sleeping
Upload 176 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- app.py +178 -0
- core/abc_modules.py +50 -0
- core/aff_utils.py +178 -0
- core/arch_resnest/resnest.py +71 -0
- core/arch_resnest/resnet.py +308 -0
- core/arch_resnest/splat.py +99 -0
- core/arch_resnet/resnet.py +157 -0
- core/datasets.py +239 -0
- core/deeplab_utils.py +126 -0
- core/networks.py +355 -0
- core/puzzle_utils.py +69 -0
- core/sync_batchnorm/__init__.py +12 -0
- core/sync_batchnorm/batchnorm.py +282 -0
- core/sync_batchnorm/comm.py +129 -0
- core/sync_batchnorm/replicate.py +88 -0
- core/sync_batchnorm/unittest.py +29 -0
- data/VOC_2012.json +198 -0
- data/test.txt +1456 -0
- data/train.txt +1464 -0
- data/train_aug.txt +0 -0
- data/val.txt +1449 -0
- requirements.txt +0 -0
- res/figure_1/fig_1_a.png +0 -0
- res/figure_1/fig_1_b.png +0 -0
- res/figure_1/fig_1_c.png +0 -0
- res/figure_2.PNG +0 -0
- res/figure_2/original.png +0 -0
- res/figure_3/figure_3_a.png +0 -0
- res/figure_3/figure_3_b.png +0 -0
- res/figure_3/figure_3_c.png +0 -0
- res/figure_3/figure_3_d.png +0 -0
- res/figure_4/2007_000123.png +0 -0
- res/figure_4/2007_000123_gt.png +0 -0
- res/figure_4/2007_000123_pred.png +0 -0
- res/figure_4/2007_000175.png +0 -0
- res/figure_4/2007_000175_gt.png +0 -0
- res/figure_4/2007_000175_pred.png +0 -0
- res/figure_4/2007_000762.png +0 -0
- res/figure_4/2007_000762_gt.png +0 -0
- res/figure_4/2007_000762_pred.png +0 -0
- res/figure_4/2007_000799.png +0 -0
- res/figure_4/2007_000799_gt.png +0 -0
- res/figure_4/2007_000799_pred.png +0 -0
- res/figure_4/2007_000999.png +0 -0
- res/figure_4/2007_000999_gt.png +0 -0
- res/figure_4/2007_000999_pred.png +0 -0
- res/figure_4/2007_001239.png +0 -0
- res/figure_4/2007_001239_gt.png +0 -0
- res/figure_4/2007_001239_pred.png +0 -0
- res/figure_4/2007_001284.png +0 -0
app.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2020 * Ltd. All rights reserved.
|
2 |
+
# author : Sanghyeon Jo <josanghyeokn@gmail.com>
|
3 |
+
|
4 |
+
import gradio as gr
|
5 |
+
|
6 |
+
import os
|
7 |
+
import sys
|
8 |
+
import copy
|
9 |
+
import shutil
|
10 |
+
import random
|
11 |
+
import argparse
|
12 |
+
import numpy as np
|
13 |
+
|
14 |
+
import imageio
|
15 |
+
|
16 |
+
import torch
|
17 |
+
import torch.nn as nn
|
18 |
+
import torch.nn.functional as F
|
19 |
+
|
20 |
+
from torchvision import transforms
|
21 |
+
from torch.utils.tensorboard import SummaryWriter
|
22 |
+
|
23 |
+
from torch.utils.data import DataLoader
|
24 |
+
|
25 |
+
from core.puzzle_utils import *
|
26 |
+
from core.networks import *
|
27 |
+
from core.datasets import *
|
28 |
+
|
29 |
+
from tools.general.io_utils import *
|
30 |
+
from tools.general.time_utils import *
|
31 |
+
from tools.general.json_utils import *
|
32 |
+
|
33 |
+
from tools.ai.log_utils import *
|
34 |
+
from tools.ai.demo_utils import *
|
35 |
+
from tools.ai.optim_utils import *
|
36 |
+
from tools.ai.torch_utils import *
|
37 |
+
from tools.ai.evaluate_utils import *
|
38 |
+
|
39 |
+
from tools.ai.augment_utils import *
|
40 |
+
from tools.ai.randaugment import *
|
41 |
+
|
42 |
+
parser = argparse.ArgumentParser()
|
43 |
+
|
44 |
+
###############################################################################
|
45 |
+
# Dataset
|
46 |
+
###############################################################################
|
47 |
+
parser.add_argument('--seed', default=2606, type=int)
|
48 |
+
parser.add_argument('--num_workers', default=4, type=int)
|
49 |
+
parser.add_argument('--data_dir', default='../VOCtrainval_11-May-2012/', type=str)
|
50 |
+
|
51 |
+
###############################################################################
|
52 |
+
# Network
|
53 |
+
###############################################################################
|
54 |
+
parser.add_argument('--architecture', default='DeepLabv3+', type=str)
|
55 |
+
parser.add_argument('--backbone', default='resnet50', type=str)
|
56 |
+
parser.add_argument('--mode', default='fix', type=str)
|
57 |
+
parser.add_argument('--use_gn', default=True, type=str2bool)
|
58 |
+
|
59 |
+
###############################################################################
|
60 |
+
# Inference parameters
|
61 |
+
###############################################################################
|
62 |
+
parser.add_argument('--tag', default='', type=str)
|
63 |
+
|
64 |
+
parser.add_argument('--domain', default='val', type=str)
|
65 |
+
|
66 |
+
parser.add_argument('--scales', default='0.5,1.0,1.5,2.0', type=str)
|
67 |
+
parser.add_argument('--iteration', default=10, type=int)
|
68 |
+
|
69 |
+
if __name__ == '__main__':
|
70 |
+
###################################################################################
|
71 |
+
# Arguments
|
72 |
+
###################################################################################
|
73 |
+
args = parser.parse_args()
|
74 |
+
|
75 |
+
model_dir = create_directory('./experiments/models/')
|
76 |
+
model_path = model_dir + f'DeepLabv3+@ResNeSt-101@Fix@GN.pth'
|
77 |
+
|
78 |
+
if 'train' in args.domain:
|
79 |
+
args.tag += '@train'
|
80 |
+
else:
|
81 |
+
args.tag += '@' + args.domain
|
82 |
+
|
83 |
+
args.tag += '@scale=%s' % args.scales
|
84 |
+
args.tag += '@iteration=%d' % args.iteration
|
85 |
+
|
86 |
+
set_seed(args.seed)
|
87 |
+
log_func = lambda string='': print(string)
|
88 |
+
|
89 |
+
###################################################################################
|
90 |
+
# Transform, Dataset, DataLoader
|
91 |
+
###################################################################################
|
92 |
+
imagenet_mean = [0.485, 0.456, 0.406]
|
93 |
+
imagenet_std = [0.229, 0.224, 0.225]
|
94 |
+
|
95 |
+
normalize_fn = Normalize(imagenet_mean, imagenet_std)
|
96 |
+
|
97 |
+
# for mIoU
|
98 |
+
meta_dic = read_json('./data/VOC_2012.json')
|
99 |
+
|
100 |
+
###################################################################################
|
101 |
+
# Network
|
102 |
+
###################################################################################
|
103 |
+
if args.architecture == 'DeepLabv3+':
|
104 |
+
model = DeepLabv3_Plus(args.backbone, num_classes=meta_dic['classes'] + 1, mode=args.mode,
|
105 |
+
use_group_norm=args.use_gn)
|
106 |
+
elif args.architecture == 'Seg_Model':
|
107 |
+
model = Seg_Model(args.backbone, num_classes=meta_dic['classes'] + 1)
|
108 |
+
elif args.architecture == 'CSeg_Model':
|
109 |
+
model = CSeg_Model(args.backbone, num_classes=meta_dic['classes'] + 1)
|
110 |
+
|
111 |
+
model = model.cuda()
|
112 |
+
model.eval()
|
113 |
+
|
114 |
+
log_func('[i] Architecture is {}'.format(args.architecture))
|
115 |
+
log_func('[i] Total Params: %.2fM' % (calculate_parameters(model)))
|
116 |
+
log_func()
|
117 |
+
|
118 |
+
load_model(model, model_path, parallel=False)
|
119 |
+
|
120 |
+
#################################################################################################
|
121 |
+
# Evaluation
|
122 |
+
#################################################################################################
|
123 |
+
eval_timer = Timer()
|
124 |
+
scales = [float(scale) for scale in args.scales.split(',')]
|
125 |
+
|
126 |
+
model.eval()
|
127 |
+
eval_timer.tik()
|
128 |
+
|
129 |
+
|
130 |
+
def inference(images, image_size):
|
131 |
+
images = images.cuda()
|
132 |
+
|
133 |
+
logits = model(images)
|
134 |
+
logits = resize_for_tensors(logits, image_size)
|
135 |
+
|
136 |
+
logits = logits[0] + logits[1].flip(-1)
|
137 |
+
logits = get_numpy_from_tensor(logits).transpose((1, 2, 0))
|
138 |
+
return logits
|
139 |
+
|
140 |
+
|
141 |
+
def predict_image(ori_image):
|
142 |
+
with torch.no_grad():
|
143 |
+
ori_w, ori_h = ori_image.size
|
144 |
+
|
145 |
+
cams_list = []
|
146 |
+
|
147 |
+
for scale in scales:
|
148 |
+
image = copy.deepcopy(ori_image)
|
149 |
+
image = image.resize((round(ori_w * scale), round(ori_h * scale)), resample=PIL.Image.BICUBIC)
|
150 |
+
|
151 |
+
image = normalize_fn(image)
|
152 |
+
image = image.transpose((2, 0, 1))
|
153 |
+
|
154 |
+
image = torch.from_numpy(image)
|
155 |
+
flipped_image = image.flip(-1)
|
156 |
+
|
157 |
+
images = torch.stack([image, flipped_image])
|
158 |
+
|
159 |
+
cams = inference(images, (ori_h, ori_w))
|
160 |
+
cams_list.append(cams)
|
161 |
+
|
162 |
+
preds = np.sum(cams_list, axis=0)
|
163 |
+
preds = F.softmax(torch.from_numpy(preds), dim=-1).numpy()
|
164 |
+
|
165 |
+
if args.iteration > 0:
|
166 |
+
preds = crf_inference(np.asarray(ori_image), preds.transpose((2, 0, 1)), t=args.iteration)
|
167 |
+
pred_mask = np.argmax(preds, axis=0)
|
168 |
+
else:
|
169 |
+
pred_mask = np.argmax(preds, axis=-1)
|
170 |
+
|
171 |
+
return pred_mask.astype(np.uint8)
|
172 |
+
|
173 |
+
|
174 |
+
demo = gr.Interface(
|
175 |
+
fn=predict_image,
|
176 |
+
inputs="image",
|
177 |
+
outputs="image"
|
178 |
+
)
|
core/abc_modules.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import math
|
3 |
+
|
4 |
+
import torch
|
5 |
+
import torch.nn as nn
|
6 |
+
|
7 |
+
from abc import ABC
|
8 |
+
|
9 |
+
class ABC_Model(ABC):
|
10 |
+
def global_average_pooling_2d(self, x, keepdims=False):
|
11 |
+
x = torch.mean(x.view(x.size(0), x.size(1), -1), -1)
|
12 |
+
if keepdims:
|
13 |
+
x = x.view(x.size(0), x.size(1), 1, 1)
|
14 |
+
return x
|
15 |
+
|
16 |
+
def initialize(self, modules):
|
17 |
+
for m in modules:
|
18 |
+
if isinstance(m, nn.Conv2d):
|
19 |
+
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
|
20 |
+
# m.weight.data.normal_(0, math.sqrt(2. / n))
|
21 |
+
torch.nn.init.kaiming_normal_(m.weight)
|
22 |
+
|
23 |
+
elif isinstance(m, nn.BatchNorm2d):
|
24 |
+
m.weight.data.fill_(1)
|
25 |
+
m.bias.data.zero_()
|
26 |
+
|
27 |
+
def get_parameter_groups(self, print_fn=print):
|
28 |
+
groups = ([], [], [], [])
|
29 |
+
|
30 |
+
for name, value in self.named_parameters():
|
31 |
+
# pretrained weights
|
32 |
+
if 'model' in name:
|
33 |
+
if 'weight' in name:
|
34 |
+
# print_fn(f'pretrained weights : {name}')
|
35 |
+
groups[0].append(value)
|
36 |
+
else:
|
37 |
+
# print_fn(f'pretrained bias : {name}')
|
38 |
+
groups[1].append(value)
|
39 |
+
|
40 |
+
# scracthed weights
|
41 |
+
else:
|
42 |
+
if 'weight' in name:
|
43 |
+
if print_fn is not None:
|
44 |
+
print_fn(f'scratched weights : {name}')
|
45 |
+
groups[2].append(value)
|
46 |
+
else:
|
47 |
+
if print_fn is not None:
|
48 |
+
print_fn(f'scratched bias : {name}')
|
49 |
+
groups[3].append(value)
|
50 |
+
return groups
|
core/aff_utils.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn.functional as F
|
3 |
+
import numpy as np
|
4 |
+
|
5 |
+
class PathIndex:
|
6 |
+
def __init__(self, radius, default_size):
|
7 |
+
self.radius = radius
|
8 |
+
self.radius_floor = int(np.ceil(radius) - 1)
|
9 |
+
|
10 |
+
self.search_paths, self.search_dst = self.get_search_paths_dst(self.radius)
|
11 |
+
self.path_indices, self.src_indices, self.dst_indices = self.get_path_indices(default_size)
|
12 |
+
|
13 |
+
def get_search_paths_dst(self, max_radius=5):
|
14 |
+
coord_indices_by_length = [[] for _ in range(max_radius * 4)]
|
15 |
+
|
16 |
+
search_dirs = []
|
17 |
+
for x in range(1, max_radius):
|
18 |
+
search_dirs.append((0, x))
|
19 |
+
|
20 |
+
for y in range(1, max_radius):
|
21 |
+
for x in range(-max_radius + 1, max_radius):
|
22 |
+
if x * x + y * y < max_radius ** 2:
|
23 |
+
search_dirs.append((y, x))
|
24 |
+
|
25 |
+
for dir in search_dirs:
|
26 |
+
length_sq = dir[0] ** 2 + dir[1] ** 2
|
27 |
+
path_coords = []
|
28 |
+
|
29 |
+
min_y, max_y = sorted((0, dir[0]))
|
30 |
+
min_x, max_x = sorted((0, dir[1]))
|
31 |
+
|
32 |
+
for y in range(min_y, max_y + 1):
|
33 |
+
for x in range(min_x, max_x + 1):
|
34 |
+
|
35 |
+
dist_sq = (dir[0] * x - dir[1] * y) ** 2 / length_sq
|
36 |
+
|
37 |
+
if dist_sq < 1:
|
38 |
+
path_coords.append([y, x])
|
39 |
+
|
40 |
+
path_coords.sort(key=lambda x: -abs(x[0]) - abs(x[1]))
|
41 |
+
path_length = len(path_coords)
|
42 |
+
|
43 |
+
coord_indices_by_length[path_length].append(path_coords)
|
44 |
+
|
45 |
+
path_list_by_length = [np.asarray(v) for v in coord_indices_by_length if v]
|
46 |
+
path_destinations = np.concatenate([p[:, 0] for p in path_list_by_length], axis=0)
|
47 |
+
|
48 |
+
return path_list_by_length, path_destinations
|
49 |
+
|
50 |
+
def get_path_indices(self, size):
|
51 |
+
full_indices = np.reshape(np.arange(0, size[0] * size[1], dtype=np.int64), (size[0], size[1]))
|
52 |
+
|
53 |
+
cropped_height = size[0] - self.radius_floor
|
54 |
+
cropped_width = size[1] - 2 * self.radius_floor
|
55 |
+
|
56 |
+
path_indices = []
|
57 |
+
for paths in self.search_paths:
|
58 |
+
|
59 |
+
path_indices_list = []
|
60 |
+
for p in paths:
|
61 |
+
coord_indices_list = []
|
62 |
+
|
63 |
+
for dy, dx in p:
|
64 |
+
coord_indices = full_indices[dy:dy + cropped_height,
|
65 |
+
self.radius_floor + dx:self.radius_floor + dx + cropped_width]
|
66 |
+
coord_indices = np.reshape(coord_indices, [-1])
|
67 |
+
|
68 |
+
coord_indices_list.append(coord_indices)
|
69 |
+
|
70 |
+
path_indices_list.append(coord_indices_list)
|
71 |
+
|
72 |
+
path_indices.append(np.array(path_indices_list))
|
73 |
+
|
74 |
+
src_indices = np.reshape(full_indices[:cropped_height, self.radius_floor:self.radius_floor + cropped_width], -1)
|
75 |
+
dst_indices = np.concatenate([p[:,0] for p in path_indices], axis=0)
|
76 |
+
|
77 |
+
return path_indices, src_indices, dst_indices
|
78 |
+
|
79 |
+
|
80 |
+
def edge_to_affinity(edge, paths_indices):
|
81 |
+
aff_list = []
|
82 |
+
edge = edge.view(edge.size(0), -1)
|
83 |
+
|
84 |
+
for i in range(len(paths_indices)):
|
85 |
+
if isinstance(paths_indices[i], np.ndarray):
|
86 |
+
paths_indices[i] = torch.from_numpy(paths_indices[i])
|
87 |
+
paths_indices[i] = paths_indices[i].cuda(non_blocking=True)
|
88 |
+
|
89 |
+
for ind in paths_indices:
|
90 |
+
ind_flat = ind.view(-1)
|
91 |
+
dist = torch.index_select(edge, dim=-1, index=ind_flat)
|
92 |
+
dist = dist.view(dist.size(0), ind.size(0), ind.size(1), ind.size(2))
|
93 |
+
aff = torch.squeeze(1 - F.max_pool2d(dist, (dist.size(2), 1)), dim=2)
|
94 |
+
aff_list.append(aff)
|
95 |
+
aff_cat = torch.cat(aff_list, dim=1)
|
96 |
+
|
97 |
+
return aff_cat
|
98 |
+
|
99 |
+
|
100 |
+
def affinity_sparse2dense(affinity_sparse, ind_from, ind_to, n_vertices):
|
101 |
+
ind_from = torch.from_numpy(ind_from)
|
102 |
+
ind_to = torch.from_numpy(ind_to)
|
103 |
+
|
104 |
+
affinity_sparse = affinity_sparse.view(-1).cpu()
|
105 |
+
ind_from = ind_from.repeat(ind_to.size(0)).view(-1)
|
106 |
+
ind_to = ind_to.view(-1)
|
107 |
+
|
108 |
+
indices = torch.stack([ind_from, ind_to])
|
109 |
+
indices_tp = torch.stack([ind_to, ind_from])
|
110 |
+
|
111 |
+
indices_id = torch.stack([torch.arange(0, n_vertices).long(), torch.arange(0, n_vertices).long()])
|
112 |
+
|
113 |
+
affinity_dense = torch.sparse.FloatTensor(torch.cat([indices, indices_id, indices_tp], dim=1),
|
114 |
+
torch.cat([affinity_sparse, torch.ones([n_vertices]), affinity_sparse])).to_dense().cuda()
|
115 |
+
|
116 |
+
return affinity_dense
|
117 |
+
|
118 |
+
|
119 |
+
def to_transition_matrix(affinity_dense, beta, times):
|
120 |
+
scaled_affinity = torch.pow(affinity_dense, beta)
|
121 |
+
|
122 |
+
trans_mat = scaled_affinity / torch.sum(scaled_affinity, dim=0, keepdim=True)
|
123 |
+
for _ in range(times):
|
124 |
+
trans_mat = torch.matmul(trans_mat, trans_mat)
|
125 |
+
|
126 |
+
return trans_mat
|
127 |
+
|
128 |
+
def propagate_to_edge(x, edge, radius=5, beta=10, exp_times=8):
|
129 |
+
height, width = x.shape[-2:]
|
130 |
+
|
131 |
+
hor_padded = width+radius*2
|
132 |
+
ver_padded = height+radius
|
133 |
+
|
134 |
+
path_index = PathIndex(radius=radius, default_size=(ver_padded, hor_padded))
|
135 |
+
|
136 |
+
edge_padded = F.pad(edge, (radius, radius, 0, radius), mode='constant', value=1.0)
|
137 |
+
sparse_aff = edge_to_affinity(torch.unsqueeze(edge_padded, 0),
|
138 |
+
path_index.path_indices)
|
139 |
+
|
140 |
+
dense_aff = affinity_sparse2dense(sparse_aff, path_index.src_indices,
|
141 |
+
path_index.dst_indices, ver_padded * hor_padded)
|
142 |
+
dense_aff = dense_aff.view(ver_padded, hor_padded, ver_padded, hor_padded)
|
143 |
+
dense_aff = dense_aff[:-radius, radius:-radius, :-radius, radius:-radius]
|
144 |
+
dense_aff = dense_aff.reshape(height * width, height * width)
|
145 |
+
|
146 |
+
trans_mat = to_transition_matrix(dense_aff, beta=beta, times=exp_times)
|
147 |
+
|
148 |
+
x = x.view(-1, height, width) * (1 - edge)
|
149 |
+
|
150 |
+
rw = torch.matmul(x.view(-1, height * width), trans_mat)
|
151 |
+
rw = rw.view(rw.size(0), 1, height, width)
|
152 |
+
|
153 |
+
return rw
|
154 |
+
|
155 |
+
class GetAffinityLabelFromIndices():
|
156 |
+
def __init__(self, indices_from, indices_to):
|
157 |
+
self.indices_from = indices_from
|
158 |
+
self.indices_to = indices_to
|
159 |
+
|
160 |
+
def __call__(self, segm_map):
|
161 |
+
segm_map_flat = np.reshape(segm_map, -1)
|
162 |
+
|
163 |
+
segm_label_from = np.expand_dims(segm_map_flat[self.indices_from], axis=0)
|
164 |
+
segm_label_to = segm_map_flat[self.indices_to]
|
165 |
+
|
166 |
+
valid_label = np.logical_and(np.less(segm_label_from, 21), np.less(segm_label_to, 21))
|
167 |
+
|
168 |
+
equal_label = np.equal(segm_label_from, segm_label_to)
|
169 |
+
|
170 |
+
pos_affinity_label = np.logical_and(equal_label, valid_label)
|
171 |
+
|
172 |
+
bg_pos_affinity_label = np.logical_and(pos_affinity_label, np.equal(segm_label_from, 0)).astype(np.float32)
|
173 |
+
fg_pos_affinity_label = np.logical_and(pos_affinity_label, np.greater(segm_label_from, 0)).astype(np.float32)
|
174 |
+
|
175 |
+
neg_affinity_label = np.logical_and(np.logical_not(equal_label), valid_label).astype(np.float32)
|
176 |
+
|
177 |
+
return torch.from_numpy(bg_pos_affinity_label), torch.from_numpy(fg_pos_affinity_label), torch.from_numpy(neg_affinity_label)
|
178 |
+
|
core/arch_resnest/resnest.py
ADDED
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
2 |
+
## Created by: Hang Zhang
|
3 |
+
## Email: zhanghang0704@gmail.com
|
4 |
+
## Copyright (c) 2020
|
5 |
+
##
|
6 |
+
## LICENSE file in the root directory of this source tree
|
7 |
+
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
8 |
+
"""ResNeSt models"""
|
9 |
+
|
10 |
+
import torch
|
11 |
+
from .resnet import ResNet, Bottleneck
|
12 |
+
|
13 |
+
__all__ = ['resnest50', 'resnest101', 'resnest200', 'resnest269']
|
14 |
+
|
15 |
+
_url_format = 'https://github.com/zhanghang1989/ResNeSt/releases/download/weights_step1/{}-{}.pth'
|
16 |
+
|
17 |
+
_model_sha256 = {name: checksum for checksum, name in [
|
18 |
+
('528c19ca', 'resnest50'),
|
19 |
+
('22405ba7', 'resnest101'),
|
20 |
+
('75117900', 'resnest200'),
|
21 |
+
('0cc87c48', 'resnest269'),
|
22 |
+
]}
|
23 |
+
|
24 |
+
def short_hash(name):
|
25 |
+
if name not in _model_sha256:
|
26 |
+
raise ValueError('Pretrained model for {name} is not available.'.format(name=name))
|
27 |
+
return _model_sha256[name][:8]
|
28 |
+
|
29 |
+
resnest_model_urls = {name: _url_format.format(name, short_hash(name)) for
|
30 |
+
name in _model_sha256.keys()
|
31 |
+
}
|
32 |
+
|
33 |
+
def resnest50(pretrained=False, root='~/.encoding/models', **kwargs):
|
34 |
+
model = ResNet(Bottleneck, [3, 4, 6, 3],
|
35 |
+
radix=2, groups=1, bottleneck_width=64,
|
36 |
+
deep_stem=True, stem_width=32, avg_down=True,
|
37 |
+
avd=True, avd_first=False, **kwargs)
|
38 |
+
if pretrained:
|
39 |
+
model.load_state_dict(torch.hub.load_state_dict_from_url(
|
40 |
+
resnest_model_urls['resnest50'], progress=True, check_hash=True))
|
41 |
+
return model
|
42 |
+
|
43 |
+
def resnest101(pretrained=False, root='~/.encoding/models', **kwargs):
|
44 |
+
model = ResNet(Bottleneck, [3, 4, 23, 3],
|
45 |
+
radix=2, groups=1, bottleneck_width=64,
|
46 |
+
deep_stem=True, stem_width=64, avg_down=True,
|
47 |
+
avd=True, avd_first=False, **kwargs)
|
48 |
+
if pretrained:
|
49 |
+
model.load_state_dict(torch.hub.load_state_dict_from_url(
|
50 |
+
resnest_model_urls['resnest101'], progress=True, check_hash=True))
|
51 |
+
return model
|
52 |
+
|
53 |
+
def resnest200(pretrained=False, root='~/.encoding/models', **kwargs):
|
54 |
+
model = ResNet(Bottleneck, [3, 24, 36, 3],
|
55 |
+
radix=2, groups=1, bottleneck_width=64,
|
56 |
+
deep_stem=True, stem_width=64, avg_down=True,
|
57 |
+
avd=True, avd_first=False, **kwargs)
|
58 |
+
if pretrained:
|
59 |
+
model.load_state_dict(torch.hub.load_state_dict_from_url(
|
60 |
+
resnest_model_urls['resnest200'], progress=True, check_hash=True))
|
61 |
+
return model
|
62 |
+
|
63 |
+
def resnest269(pretrained=False, root='~/.encoding/models', **kwargs):
|
64 |
+
model = ResNet(Bottleneck, [3, 30, 48, 8],
|
65 |
+
radix=2, groups=1, bottleneck_width=64,
|
66 |
+
deep_stem=True, stem_width=64, avg_down=True,
|
67 |
+
avd=True, avd_first=False, **kwargs)
|
68 |
+
if pretrained:
|
69 |
+
model.load_state_dict(torch.hub.load_state_dict_from_url(
|
70 |
+
resnest_model_urls['resnest269'], progress=True, check_hash=True))
|
71 |
+
return model
|
core/arch_resnest/resnet.py
ADDED
@@ -0,0 +1,308 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
2 |
+
## Created by: Hang Zhang
|
3 |
+
## Email: zhanghang0704@gmail.com
|
4 |
+
## Copyright (c) 2020
|
5 |
+
##
|
6 |
+
## LICENSE file in the root directory of this source tree
|
7 |
+
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
8 |
+
"""ResNet variants"""
|
9 |
+
import math
|
10 |
+
import torch
|
11 |
+
import torch.nn as nn
|
12 |
+
|
13 |
+
from .splat import SplAtConv2d
|
14 |
+
|
15 |
+
__all__ = ['ResNet', 'Bottleneck']
|
16 |
+
|
17 |
+
class DropBlock2D(object):
|
18 |
+
def __init__(self, *args, **kwargs):
|
19 |
+
raise NotImplementedError
|
20 |
+
|
21 |
+
class GlobalAvgPool2d(nn.Module):
|
22 |
+
def __init__(self):
|
23 |
+
"""Global average pooling over the input's spatial dimensions"""
|
24 |
+
super(GlobalAvgPool2d, self).__init__()
|
25 |
+
|
26 |
+
def forward(self, inputs):
|
27 |
+
return nn.functional.adaptive_avg_pool2d(inputs, 1).view(inputs.size(0), -1)
|
28 |
+
|
29 |
+
class Bottleneck(nn.Module):
|
30 |
+
"""ResNet Bottleneck
|
31 |
+
"""
|
32 |
+
# pylint: disable=unused-argument
|
33 |
+
expansion = 4
|
34 |
+
def __init__(self, inplanes, planes, stride=1, downsample=None,
|
35 |
+
radix=1, cardinality=1, bottleneck_width=64,
|
36 |
+
avd=False, avd_first=False, dilation=1, is_first=False,
|
37 |
+
rectified_conv=False, rectify_avg=False,
|
38 |
+
norm_layer=None, dropblock_prob=0.0, last_gamma=False):
|
39 |
+
super(Bottleneck, self).__init__()
|
40 |
+
group_width = int(planes * (bottleneck_width / 64.)) * cardinality
|
41 |
+
self.conv1 = nn.Conv2d(inplanes, group_width, kernel_size=1, bias=False)
|
42 |
+
self.bn1 = norm_layer(group_width)
|
43 |
+
self.dropblock_prob = dropblock_prob
|
44 |
+
self.radix = radix
|
45 |
+
self.avd = avd and (stride > 1 or is_first)
|
46 |
+
self.avd_first = avd_first
|
47 |
+
|
48 |
+
if self.avd:
|
49 |
+
self.avd_layer = nn.AvgPool2d(3, stride, padding=1)
|
50 |
+
stride = 1
|
51 |
+
|
52 |
+
if dropblock_prob > 0.0:
|
53 |
+
self.dropblock1 = DropBlock2D(dropblock_prob, 3)
|
54 |
+
if radix == 1:
|
55 |
+
self.dropblock2 = DropBlock2D(dropblock_prob, 3)
|
56 |
+
self.dropblock3 = DropBlock2D(dropblock_prob, 3)
|
57 |
+
|
58 |
+
if radix >= 1:
|
59 |
+
self.conv2 = SplAtConv2d(
|
60 |
+
group_width, group_width, kernel_size=3,
|
61 |
+
stride=stride, padding=dilation,
|
62 |
+
dilation=dilation, groups=cardinality, bias=False,
|
63 |
+
radix=radix, rectify=rectified_conv,
|
64 |
+
rectify_avg=rectify_avg,
|
65 |
+
norm_layer=norm_layer,
|
66 |
+
dropblock_prob=dropblock_prob)
|
67 |
+
elif rectified_conv:
|
68 |
+
from rfconv import RFConv2d
|
69 |
+
self.conv2 = RFConv2d(
|
70 |
+
group_width, group_width, kernel_size=3, stride=stride,
|
71 |
+
padding=dilation, dilation=dilation,
|
72 |
+
groups=cardinality, bias=False,
|
73 |
+
average_mode=rectify_avg)
|
74 |
+
self.bn2 = norm_layer(group_width)
|
75 |
+
else:
|
76 |
+
self.conv2 = nn.Conv2d(
|
77 |
+
group_width, group_width, kernel_size=3, stride=stride,
|
78 |
+
padding=dilation, dilation=dilation,
|
79 |
+
groups=cardinality, bias=False)
|
80 |
+
self.bn2 = norm_layer(group_width)
|
81 |
+
|
82 |
+
self.conv3 = nn.Conv2d(
|
83 |
+
group_width, planes * 4, kernel_size=1, bias=False)
|
84 |
+
self.bn3 = norm_layer(planes*4)
|
85 |
+
|
86 |
+
if last_gamma:
|
87 |
+
from torch.nn.init import zeros_
|
88 |
+
zeros_(self.bn3.weight)
|
89 |
+
self.relu = nn.ReLU(inplace=True)
|
90 |
+
self.downsample = downsample
|
91 |
+
self.dilation = dilation
|
92 |
+
self.stride = stride
|
93 |
+
|
94 |
+
def forward(self, x):
|
95 |
+
residual = x
|
96 |
+
|
97 |
+
out = self.conv1(x)
|
98 |
+
out = self.bn1(out)
|
99 |
+
if self.dropblock_prob > 0.0:
|
100 |
+
out = self.dropblock1(out)
|
101 |
+
out = self.relu(out)
|
102 |
+
|
103 |
+
if self.avd and self.avd_first:
|
104 |
+
out = self.avd_layer(out)
|
105 |
+
|
106 |
+
out = self.conv2(out)
|
107 |
+
if self.radix == 0:
|
108 |
+
out = self.bn2(out)
|
109 |
+
if self.dropblock_prob > 0.0:
|
110 |
+
out = self.dropblock2(out)
|
111 |
+
out = self.relu(out)
|
112 |
+
|
113 |
+
if self.avd and not self.avd_first:
|
114 |
+
out = self.avd_layer(out)
|
115 |
+
|
116 |
+
out = self.conv3(out)
|
117 |
+
out = self.bn3(out)
|
118 |
+
if self.dropblock_prob > 0.0:
|
119 |
+
out = self.dropblock3(out)
|
120 |
+
|
121 |
+
if self.downsample is not None:
|
122 |
+
residual = self.downsample(x)
|
123 |
+
|
124 |
+
out += residual
|
125 |
+
out = self.relu(out)
|
126 |
+
|
127 |
+
return out
|
128 |
+
|
129 |
+
class ResNet(nn.Module):
|
130 |
+
"""ResNet Variants
|
131 |
+
|
132 |
+
Parameters
|
133 |
+
----------
|
134 |
+
block : Block
|
135 |
+
Class for the residual block. Options are BasicBlockV1, BottleneckV1.
|
136 |
+
layers : list of int
|
137 |
+
Numbers of layers in each block
|
138 |
+
classes : int, default 1000
|
139 |
+
Number of classification classes.
|
140 |
+
dilated : bool, default False
|
141 |
+
Applying dilation strategy to pretrained ResNet yielding a stride-8 model,
|
142 |
+
typically used in Semantic Segmentation.
|
143 |
+
norm_layer : object
|
144 |
+
Normalization layer used in backbone network (default: :class:`mxnet.gluon.nn.BatchNorm`;
|
145 |
+
for Synchronized Cross-GPU BachNormalization).
|
146 |
+
|
147 |
+
Reference:
|
148 |
+
|
149 |
+
- He, Kaiming, et al. "Deep residual learning for image recognition." Proceedings of the IEEE conference on computer vision and pattern recognition. 2016.
|
150 |
+
|
151 |
+
- Yu, Fisher, and Vladlen Koltun. "Multi-scale context aggregation by dilated convolutions."
|
152 |
+
"""
|
153 |
+
# pylint: disable=unused-variable
|
154 |
+
def __init__(self, block, layers, radix=1, groups=1, bottleneck_width=64,
|
155 |
+
num_classes=1000, dilated=False, dilation=1,
|
156 |
+
deep_stem=False, stem_width=64, avg_down=False,
|
157 |
+
rectified_conv=False, rectify_avg=False,
|
158 |
+
avd=False, avd_first=False,
|
159 |
+
final_drop=0.0, dropblock_prob=0,
|
160 |
+
last_gamma=False, norm_layer=nn.BatchNorm2d):
|
161 |
+
self.cardinality = groups
|
162 |
+
self.bottleneck_width = bottleneck_width
|
163 |
+
# ResNet-D params
|
164 |
+
self.inplanes = stem_width*2 if deep_stem else 64
|
165 |
+
self.avg_down = avg_down
|
166 |
+
self.last_gamma = last_gamma
|
167 |
+
# ResNeSt params
|
168 |
+
self.radix = radix
|
169 |
+
self.avd = avd
|
170 |
+
self.avd_first = avd_first
|
171 |
+
|
172 |
+
super(ResNet, self).__init__()
|
173 |
+
self.rectified_conv = rectified_conv
|
174 |
+
self.rectify_avg = rectify_avg
|
175 |
+
if rectified_conv:
|
176 |
+
from rfconv import RFConv2d
|
177 |
+
conv_layer = RFConv2d
|
178 |
+
else:
|
179 |
+
conv_layer = nn.Conv2d
|
180 |
+
conv_kwargs = {'average_mode': rectify_avg} if rectified_conv else {}
|
181 |
+
if deep_stem:
|
182 |
+
self.conv1 = nn.Sequential(
|
183 |
+
conv_layer(3, stem_width, kernel_size=3, stride=2, padding=1, bias=False, **conv_kwargs),
|
184 |
+
norm_layer(stem_width),
|
185 |
+
nn.ReLU(inplace=True),
|
186 |
+
conv_layer(stem_width, stem_width, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs),
|
187 |
+
norm_layer(stem_width),
|
188 |
+
nn.ReLU(inplace=True),
|
189 |
+
conv_layer(stem_width, stem_width*2, kernel_size=3, stride=1, padding=1, bias=False, **conv_kwargs),
|
190 |
+
)
|
191 |
+
else:
|
192 |
+
self.conv1 = conv_layer(3, 64, kernel_size=7, stride=2, padding=3,
|
193 |
+
bias=False, **conv_kwargs)
|
194 |
+
self.bn1 = norm_layer(self.inplanes)
|
195 |
+
self.relu = nn.ReLU(inplace=True)
|
196 |
+
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
|
197 |
+
self.layer1 = self._make_layer(block, 64, layers[0], norm_layer=norm_layer, is_first=False)
|
198 |
+
self.layer2 = self._make_layer(block, 128, layers[1], stride=2, norm_layer=norm_layer)
|
199 |
+
if dilated or dilation == 4:
|
200 |
+
self.layer3 = self._make_layer(block, 256, layers[2], stride=1,
|
201 |
+
dilation=2, norm_layer=norm_layer,
|
202 |
+
dropblock_prob=dropblock_prob)
|
203 |
+
self.layer4 = self._make_layer(block, 512, layers[3], stride=1,
|
204 |
+
dilation=4, norm_layer=norm_layer,
|
205 |
+
dropblock_prob=dropblock_prob)
|
206 |
+
elif dilation==2:
|
207 |
+
self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
|
208 |
+
dilation=1, norm_layer=norm_layer,
|
209 |
+
dropblock_prob=dropblock_prob)
|
210 |
+
self.layer4 = self._make_layer(block, 512, layers[3], stride=1,
|
211 |
+
dilation=2, norm_layer=norm_layer,
|
212 |
+
dropblock_prob=dropblock_prob)
|
213 |
+
else:
|
214 |
+
self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
|
215 |
+
norm_layer=norm_layer,
|
216 |
+
dropblock_prob=dropblock_prob)
|
217 |
+
self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
|
218 |
+
norm_layer=norm_layer,
|
219 |
+
dropblock_prob=dropblock_prob)
|
220 |
+
|
221 |
+
self.avgpool = GlobalAvgPool2d()
|
222 |
+
self.drop = nn.Dropout(final_drop) if final_drop > 0.0 else None
|
223 |
+
self.fc = nn.Linear(512 * block.expansion, num_classes)
|
224 |
+
|
225 |
+
for m in self.modules():
|
226 |
+
if isinstance(m, nn.Conv2d):
|
227 |
+
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
|
228 |
+
m.weight.data.normal_(0, math.sqrt(2. / n))
|
229 |
+
elif isinstance(m, norm_layer):
|
230 |
+
m.weight.data.fill_(1)
|
231 |
+
m.bias.data.zero_()
|
232 |
+
|
233 |
+
def _make_layer(self, block, planes, blocks, stride=1, dilation=1, norm_layer=None,
|
234 |
+
dropblock_prob=0.0, is_first=True):
|
235 |
+
downsample = None
|
236 |
+
if stride != 1 or self.inplanes != planes * block.expansion:
|
237 |
+
down_layers = []
|
238 |
+
if self.avg_down:
|
239 |
+
if dilation == 1:
|
240 |
+
down_layers.append(nn.AvgPool2d(kernel_size=stride, stride=stride,
|
241 |
+
ceil_mode=True, count_include_pad=False))
|
242 |
+
else:
|
243 |
+
down_layers.append(nn.AvgPool2d(kernel_size=1, stride=1,
|
244 |
+
ceil_mode=True, count_include_pad=False))
|
245 |
+
down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion,
|
246 |
+
kernel_size=1, stride=1, bias=False))
|
247 |
+
else:
|
248 |
+
down_layers.append(nn.Conv2d(self.inplanes, planes * block.expansion,
|
249 |
+
kernel_size=1, stride=stride, bias=False))
|
250 |
+
down_layers.append(norm_layer(planes * block.expansion))
|
251 |
+
downsample = nn.Sequential(*down_layers)
|
252 |
+
|
253 |
+
layers = []
|
254 |
+
if dilation == 1 or dilation == 2:
|
255 |
+
layers.append(block(self.inplanes, planes, stride, downsample=downsample,
|
256 |
+
radix=self.radix, cardinality=self.cardinality,
|
257 |
+
bottleneck_width=self.bottleneck_width,
|
258 |
+
avd=self.avd, avd_first=self.avd_first,
|
259 |
+
dilation=1, is_first=is_first, rectified_conv=self.rectified_conv,
|
260 |
+
rectify_avg=self.rectify_avg,
|
261 |
+
norm_layer=norm_layer, dropblock_prob=dropblock_prob,
|
262 |
+
last_gamma=self.last_gamma))
|
263 |
+
elif dilation == 4:
|
264 |
+
layers.append(block(self.inplanes, planes, stride, downsample=downsample,
|
265 |
+
radix=self.radix, cardinality=self.cardinality,
|
266 |
+
bottleneck_width=self.bottleneck_width,
|
267 |
+
avd=self.avd, avd_first=self.avd_first,
|
268 |
+
dilation=2, is_first=is_first, rectified_conv=self.rectified_conv,
|
269 |
+
rectify_avg=self.rectify_avg,
|
270 |
+
norm_layer=norm_layer, dropblock_prob=dropblock_prob,
|
271 |
+
last_gamma=self.last_gamma))
|
272 |
+
else:
|
273 |
+
raise RuntimeError("=> unknown dilation size: {}".format(dilation))
|
274 |
+
|
275 |
+
self.inplanes = planes * block.expansion
|
276 |
+
for i in range(1, blocks):
|
277 |
+
layers.append(block(self.inplanes, planes,
|
278 |
+
radix=self.radix, cardinality=self.cardinality,
|
279 |
+
bottleneck_width=self.bottleneck_width,
|
280 |
+
avd=self.avd, avd_first=self.avd_first,
|
281 |
+
dilation=dilation, rectified_conv=self.rectified_conv,
|
282 |
+
rectify_avg=self.rectify_avg,
|
283 |
+
norm_layer=norm_layer, dropblock_prob=dropblock_prob,
|
284 |
+
last_gamma=self.last_gamma))
|
285 |
+
|
286 |
+
return nn.Sequential(*layers)
|
287 |
+
|
288 |
+
def forward(self, x):
|
289 |
+
x = self.conv1(x)
|
290 |
+
x = self.bn1(x)
|
291 |
+
x = self.relu(x)
|
292 |
+
x = self.maxpool(x)
|
293 |
+
|
294 |
+
x = self.layer1(x)
|
295 |
+
x = self.layer2(x)
|
296 |
+
x = self.layer3(x)
|
297 |
+
x = self.layer4(x)
|
298 |
+
|
299 |
+
# print(x.size())
|
300 |
+
|
301 |
+
x = self.avgpool(x)
|
302 |
+
#x = x.view(x.size(0), -1)
|
303 |
+
x = torch.flatten(x, 1)
|
304 |
+
if self.drop:
|
305 |
+
x = self.drop(x)
|
306 |
+
x = self.fc(x)
|
307 |
+
|
308 |
+
return x
|
core/arch_resnest/splat.py
ADDED
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Split-Attention"""
|
2 |
+
|
3 |
+
import torch
|
4 |
+
from torch import nn
|
5 |
+
import torch.nn.functional as F
|
6 |
+
from torch.nn import Conv2d, Module, Linear, BatchNorm2d, ReLU
|
7 |
+
from torch.nn.modules.utils import _pair
|
8 |
+
|
9 |
+
__all__ = ['SplAtConv2d']
|
10 |
+
|
11 |
+
class SplAtConv2d(Module):
|
12 |
+
"""Split-Attention Conv2d
|
13 |
+
"""
|
14 |
+
def __init__(self, in_channels, channels, kernel_size, stride=(1, 1), padding=(0, 0),
|
15 |
+
dilation=(1, 1), groups=1, bias=True,
|
16 |
+
radix=2, reduction_factor=4,
|
17 |
+
rectify=False, rectify_avg=False, norm_layer=None,
|
18 |
+
dropblock_prob=0.0, **kwargs):
|
19 |
+
super(SplAtConv2d, self).__init__()
|
20 |
+
padding = _pair(padding)
|
21 |
+
self.rectify = rectify and (padding[0] > 0 or padding[1] > 0)
|
22 |
+
self.rectify_avg = rectify_avg
|
23 |
+
inter_channels = max(in_channels*radix//reduction_factor, 32)
|
24 |
+
self.radix = radix
|
25 |
+
self.cardinality = groups
|
26 |
+
self.channels = channels
|
27 |
+
self.dropblock_prob = dropblock_prob
|
28 |
+
if self.rectify:
|
29 |
+
from rfconv import RFConv2d
|
30 |
+
self.conv = RFConv2d(in_channels, channels*radix, kernel_size, stride, padding, dilation,
|
31 |
+
groups=groups*radix, bias=bias, average_mode=rectify_avg, **kwargs)
|
32 |
+
else:
|
33 |
+
self.conv = Conv2d(in_channels, channels*radix, kernel_size, stride, padding, dilation,
|
34 |
+
groups=groups*radix, bias=bias, **kwargs)
|
35 |
+
self.use_bn = norm_layer is not None
|
36 |
+
if self.use_bn:
|
37 |
+
self.bn0 = norm_layer(channels*radix)
|
38 |
+
self.relu = ReLU(inplace=True)
|
39 |
+
self.fc1 = Conv2d(channels, inter_channels, 1, groups=self.cardinality)
|
40 |
+
if self.use_bn:
|
41 |
+
self.bn1 = norm_layer(inter_channels)
|
42 |
+
self.fc2 = Conv2d(inter_channels, channels*radix, 1, groups=self.cardinality)
|
43 |
+
if dropblock_prob > 0.0:
|
44 |
+
self.dropblock = DropBlock2D(dropblock_prob, 3)
|
45 |
+
self.rsoftmax = rSoftMax(radix, groups)
|
46 |
+
|
47 |
+
def forward(self, x):
|
48 |
+
x = self.conv(x)
|
49 |
+
if self.use_bn:
|
50 |
+
x = self.bn0(x)
|
51 |
+
if self.dropblock_prob > 0.0:
|
52 |
+
x = self.dropblock(x)
|
53 |
+
x = self.relu(x)
|
54 |
+
|
55 |
+
batch, rchannel = x.shape[:2]
|
56 |
+
if self.radix > 1:
|
57 |
+
if torch.__version__ < '1.5':
|
58 |
+
splited = torch.split(x, int(rchannel//self.radix), dim=1)
|
59 |
+
else:
|
60 |
+
splited = torch.split(x, rchannel//self.radix, dim=1)
|
61 |
+
gap = sum(splited)
|
62 |
+
else:
|
63 |
+
gap = x
|
64 |
+
gap = F.adaptive_avg_pool2d(gap, 1)
|
65 |
+
gap = self.fc1(gap)
|
66 |
+
|
67 |
+
if self.use_bn:
|
68 |
+
gap = self.bn1(gap)
|
69 |
+
gap = self.relu(gap)
|
70 |
+
|
71 |
+
atten = self.fc2(gap)
|
72 |
+
atten = self.rsoftmax(atten).view(batch, -1, 1, 1)
|
73 |
+
|
74 |
+
if self.radix > 1:
|
75 |
+
if torch.__version__ < '1.5':
|
76 |
+
attens = torch.split(atten, int(rchannel//self.radix), dim=1)
|
77 |
+
else:
|
78 |
+
attens = torch.split(atten, rchannel//self.radix, dim=1)
|
79 |
+
out = sum([att*split for (att, split) in zip(attens, splited)])
|
80 |
+
else:
|
81 |
+
out = atten * x
|
82 |
+
return out.contiguous()
|
83 |
+
|
84 |
+
class rSoftMax(nn.Module):
|
85 |
+
def __init__(self, radix, cardinality):
|
86 |
+
super().__init__()
|
87 |
+
self.radix = radix
|
88 |
+
self.cardinality = cardinality
|
89 |
+
|
90 |
+
def forward(self, x):
|
91 |
+
batch = x.size(0)
|
92 |
+
if self.radix > 1:
|
93 |
+
x = x.view(batch, self.cardinality, self.radix, -1).transpose(1, 2)
|
94 |
+
x = F.softmax(x, dim=1)
|
95 |
+
x = x.reshape(batch, -1)
|
96 |
+
else:
|
97 |
+
x = torch.sigmoid(x)
|
98 |
+
return x
|
99 |
+
|
core/arch_resnet/resnet.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch.nn as nn
|
2 |
+
import torch.nn.functional as F
|
3 |
+
import torch.utils.model_zoo as model_zoo
|
4 |
+
|
5 |
+
urls_dic = {
|
6 |
+
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
|
7 |
+
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
|
8 |
+
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
|
9 |
+
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
|
10 |
+
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
|
11 |
+
}
|
12 |
+
|
13 |
+
layers_dic = {
|
14 |
+
'resnet18' : [2, 2, 2, 2],
|
15 |
+
'resnet34' : [3, 4, 6, 3],
|
16 |
+
'resnet50' : [3, 4, 6, 3],
|
17 |
+
'resnet101' : [3, 4, 23, 3],
|
18 |
+
'resnet152' : [3, 8, 36, 3]
|
19 |
+
}
|
20 |
+
|
21 |
+
def conv3x3(in_planes: int, out_planes: int, stride: int = 1, groups: int = 1, dilation: int = 1) -> nn.Conv2d:
|
22 |
+
"""3x3 convolution with padding"""
|
23 |
+
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
|
24 |
+
padding=dilation, groups=groups, bias=False, dilation=dilation)
|
25 |
+
|
26 |
+
def conv1x1(in_planes: int, out_planes: int, stride: int = 1) -> nn.Conv2d:
|
27 |
+
"""1x1 convolution"""
|
28 |
+
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
|
29 |
+
|
30 |
+
class BasicBlock(nn.Module):
|
31 |
+
expansion: int = 1
|
32 |
+
|
33 |
+
def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1, batch_norm_fn=nn.BatchNorm2d):
|
34 |
+
super(BasicBlock, self).__init__()
|
35 |
+
|
36 |
+
self.conv1 = conv3x3(inplanes, planes, stride)
|
37 |
+
self.bn1 = batch_norm_fn(planes)
|
38 |
+
self.relu = nn.ReLU(inplace=True)
|
39 |
+
self.conv2 = conv3x3(planes, planes)
|
40 |
+
self.bn2 = batch_norm_fn(planes)
|
41 |
+
self.downsample = downsample
|
42 |
+
self.stride = stride
|
43 |
+
|
44 |
+
def forward(self, x):
|
45 |
+
identity = x
|
46 |
+
|
47 |
+
out = self.conv1(x)
|
48 |
+
out = self.bn1(out)
|
49 |
+
out = self.relu(out)
|
50 |
+
|
51 |
+
out = self.conv2(out)
|
52 |
+
out = self.bn2(out)
|
53 |
+
|
54 |
+
if self.downsample is not None:
|
55 |
+
identity = self.downsample(x)
|
56 |
+
|
57 |
+
out += identity
|
58 |
+
out = self.relu(out)
|
59 |
+
|
60 |
+
return out
|
61 |
+
|
62 |
+
class Bottleneck(nn.Module):
|
63 |
+
expansion = 4
|
64 |
+
|
65 |
+
def __init__(self, inplanes, planes, stride=1, downsample=None, dilation=1, batch_norm_fn=nn.BatchNorm2d):
|
66 |
+
super(Bottleneck, self).__init__()
|
67 |
+
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
|
68 |
+
self.bn1 = batch_norm_fn(planes)
|
69 |
+
|
70 |
+
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
|
71 |
+
padding=dilation, bias=False, dilation=dilation)
|
72 |
+
self.bn2 = batch_norm_fn(planes)
|
73 |
+
|
74 |
+
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
|
75 |
+
self.bn3 = batch_norm_fn(planes * 4)
|
76 |
+
|
77 |
+
self.relu = nn.ReLU(inplace=True)
|
78 |
+
self.downsample = downsample
|
79 |
+
self.stride = stride
|
80 |
+
self.dilation = dilation
|
81 |
+
|
82 |
+
def forward(self, x):
|
83 |
+
residual = x
|
84 |
+
|
85 |
+
out = self.conv1(x)
|
86 |
+
out = self.bn1(out)
|
87 |
+
out = self.relu(out)
|
88 |
+
|
89 |
+
out = self.conv2(out)
|
90 |
+
out = self.bn2(out)
|
91 |
+
out = self.relu(out)
|
92 |
+
|
93 |
+
out = self.conv3(out)
|
94 |
+
out = self.bn3(out)
|
95 |
+
|
96 |
+
if self.downsample is not None:
|
97 |
+
residual = self.downsample(x)
|
98 |
+
|
99 |
+
out += residual
|
100 |
+
out = self.relu(out)
|
101 |
+
|
102 |
+
return out
|
103 |
+
|
104 |
+
class ResNet(nn.Module):
|
105 |
+
|
106 |
+
def __init__(self, block, layers, strides=(2, 2, 2, 2), dilations=(1, 1, 1, 1), batch_norm_fn=nn.BatchNorm2d):
|
107 |
+
self.batch_norm_fn = batch_norm_fn
|
108 |
+
|
109 |
+
self.inplanes = 64
|
110 |
+
super(ResNet, self).__init__()
|
111 |
+
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=strides[0], padding=3,
|
112 |
+
bias=False)
|
113 |
+
self.bn1 = self.batch_norm_fn(64)
|
114 |
+
self.relu = nn.ReLU(inplace=True)
|
115 |
+
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
|
116 |
+
self.layer1 = self._make_layer(block, 64, layers[0], stride=1, dilation=dilations[0])
|
117 |
+
self.layer2 = self._make_layer(block, 128, layers[1], stride=strides[1], dilation=dilations[1])
|
118 |
+
self.layer3 = self._make_layer(block, 256, layers[2], stride=strides[2], dilation=dilations[2])
|
119 |
+
self.layer4 = self._make_layer(block, 512, layers[3], stride=strides[3], dilation=dilations[3])
|
120 |
+
self.inplanes = 1024
|
121 |
+
|
122 |
+
#self.avgpool = nn.AvgPool2d(7, stride=1)
|
123 |
+
#self.fc = nn.Linear(512 * block.expansion, 1000)
|
124 |
+
|
125 |
+
def _make_layer(self, block, planes, blocks, stride=1, dilation=1):
|
126 |
+
downsample = None
|
127 |
+
if stride != 1 or self.inplanes != planes * block.expansion:
|
128 |
+
downsample = nn.Sequential(
|
129 |
+
nn.Conv2d(self.inplanes, planes * block.expansion,
|
130 |
+
kernel_size=1, stride=stride, bias=False),
|
131 |
+
self.batch_norm_fn(planes * block.expansion),
|
132 |
+
)
|
133 |
+
|
134 |
+
layers = [block(self.inplanes, planes, stride, downsample, dilation=1, batch_norm_fn=self.batch_norm_fn)]
|
135 |
+
self.inplanes = planes * block.expansion
|
136 |
+
for i in range(1, blocks):
|
137 |
+
layers.append(block(self.inplanes, planes, dilation=dilation, batch_norm_fn=self.batch_norm_fn))
|
138 |
+
|
139 |
+
return nn.Sequential(*layers)
|
140 |
+
|
141 |
+
def forward(self, x):
|
142 |
+
x = self.conv1(x)
|
143 |
+
x = self.bn1(x)
|
144 |
+
x = self.relu(x)
|
145 |
+
x = self.maxpool(x)
|
146 |
+
|
147 |
+
x = self.layer1(x)
|
148 |
+
x = self.layer2(x)
|
149 |
+
x = self.layer3(x)
|
150 |
+
x = self.layer4(x)
|
151 |
+
|
152 |
+
x = self.avgpool(x)
|
153 |
+
x = x.view(x.size(0), -1)
|
154 |
+
x = self.fc(x)
|
155 |
+
|
156 |
+
return x
|
157 |
+
|
core/datasets.py
ADDED
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import cv2
|
3 |
+
import glob
|
4 |
+
import torch
|
5 |
+
|
6 |
+
import math
|
7 |
+
import imageio
|
8 |
+
import numpy as np
|
9 |
+
|
10 |
+
from PIL import Image
|
11 |
+
|
12 |
+
from core.aff_utils import *
|
13 |
+
|
14 |
+
from tools.ai.augment_utils import *
|
15 |
+
from tools.ai.torch_utils import one_hot_embedding
|
16 |
+
|
17 |
+
from tools.general.xml_utils import read_xml
|
18 |
+
from tools.general.json_utils import read_json
|
19 |
+
from tools.dataset.voc_utils import get_color_map_dic
|
20 |
+
|
21 |
+
class Iterator:
|
22 |
+
def __init__(self, loader):
|
23 |
+
self.loader = loader
|
24 |
+
self.init()
|
25 |
+
|
26 |
+
def init(self):
|
27 |
+
self.iterator = iter(self.loader)
|
28 |
+
|
29 |
+
def get(self):
|
30 |
+
try:
|
31 |
+
data = next(self.iterator)
|
32 |
+
except StopIteration:
|
33 |
+
self.init()
|
34 |
+
data = next(self.iterator)
|
35 |
+
|
36 |
+
return data
|
37 |
+
|
38 |
+
class VOC_Dataset(torch.utils.data.Dataset):
|
39 |
+
def __init__(self, root_dir, domain, with_id=False, with_tags=False, with_mask=False):
|
40 |
+
self.root_dir = root_dir
|
41 |
+
|
42 |
+
self.image_dir = self.root_dir + 'JPEGImages/'
|
43 |
+
self.xml_dir = self.root_dir + 'Annotations/'
|
44 |
+
self.mask_dir = self.root_dir + 'SegmentationClass/'
|
45 |
+
|
46 |
+
self.image_id_list = [image_id.strip() for image_id in open('./data/%s.txt'%domain).readlines()]
|
47 |
+
|
48 |
+
self.with_id = with_id
|
49 |
+
self.with_tags = with_tags
|
50 |
+
self.with_mask = with_mask
|
51 |
+
|
52 |
+
def __len__(self):
|
53 |
+
return len(self.image_id_list)
|
54 |
+
|
55 |
+
def get_image(self, image_id):
|
56 |
+
image = Image.open(self.image_dir + image_id + '.jpg').convert('RGB')
|
57 |
+
return image
|
58 |
+
|
59 |
+
def get_mask(self, image_id):
|
60 |
+
mask_path = self.mask_dir + image_id + '.png'
|
61 |
+
if os.path.isfile(mask_path):
|
62 |
+
mask = Image.open(mask_path)
|
63 |
+
else:
|
64 |
+
mask = None
|
65 |
+
return mask
|
66 |
+
|
67 |
+
def get_tags(self, image_id):
|
68 |
+
_, tags = read_xml(self.xml_dir + image_id + '.xml')
|
69 |
+
return tags
|
70 |
+
|
71 |
+
def __getitem__(self, index):
|
72 |
+
image_id = self.image_id_list[index]
|
73 |
+
|
74 |
+
data_list = [self.get_image(image_id)]
|
75 |
+
|
76 |
+
if self.with_id:
|
77 |
+
data_list.append(image_id)
|
78 |
+
|
79 |
+
if self.with_tags:
|
80 |
+
data_list.append(self.get_tags(image_id))
|
81 |
+
|
82 |
+
if self.with_mask:
|
83 |
+
data_list.append(self.get_mask(image_id))
|
84 |
+
|
85 |
+
return data_list
|
86 |
+
|
87 |
+
class VOC_Dataset_For_Classification(VOC_Dataset):
|
88 |
+
def __init__(self, root_dir, domain, transform=None):
|
89 |
+
super().__init__(root_dir, domain, with_tags=True)
|
90 |
+
self.transform = transform
|
91 |
+
|
92 |
+
data = read_json('./data/VOC_2012.json')
|
93 |
+
|
94 |
+
self.class_dic = data['class_dic']
|
95 |
+
self.classes = data['classes']
|
96 |
+
|
97 |
+
def __getitem__(self, index):
|
98 |
+
image, tags = super().__getitem__(index)
|
99 |
+
|
100 |
+
if self.transform is not None:
|
101 |
+
image = self.transform(image)
|
102 |
+
|
103 |
+
label = one_hot_embedding([self.class_dic[tag] for tag in tags], self.classes)
|
104 |
+
return image, label
|
105 |
+
|
106 |
+
class VOC_Dataset_For_Segmentation(VOC_Dataset):
|
107 |
+
def __init__(self, root_dir, domain, transform=None):
|
108 |
+
super().__init__(root_dir, domain, with_mask=True)
|
109 |
+
self.transform = transform
|
110 |
+
|
111 |
+
cmap_dic, _, class_names = get_color_map_dic()
|
112 |
+
self.colors = np.asarray([cmap_dic[class_name] for class_name in class_names])
|
113 |
+
|
114 |
+
def __getitem__(self, index):
|
115 |
+
image, mask = super().__getitem__(index)
|
116 |
+
|
117 |
+
if self.transform is not None:
|
118 |
+
input_dic = {'image':image, 'mask':mask}
|
119 |
+
output_dic = self.transform(input_dic)
|
120 |
+
|
121 |
+
image = output_dic['image']
|
122 |
+
mask = output_dic['mask']
|
123 |
+
|
124 |
+
return image, mask
|
125 |
+
|
126 |
+
class VOC_Dataset_For_Evaluation(VOC_Dataset):
|
127 |
+
def __init__(self, root_dir, domain, transform=None):
|
128 |
+
super().__init__(root_dir, domain, with_id=True, with_mask=True)
|
129 |
+
self.transform = transform
|
130 |
+
|
131 |
+
cmap_dic, _, class_names = get_color_map_dic()
|
132 |
+
self.colors = np.asarray([cmap_dic[class_name] for class_name in class_names])
|
133 |
+
|
134 |
+
def __getitem__(self, index):
|
135 |
+
image, image_id, mask = super().__getitem__(index)
|
136 |
+
|
137 |
+
if self.transform is not None:
|
138 |
+
input_dic = {'image':image, 'mask':mask}
|
139 |
+
output_dic = self.transform(input_dic)
|
140 |
+
|
141 |
+
image = output_dic['image']
|
142 |
+
mask = output_dic['mask']
|
143 |
+
|
144 |
+
return image, image_id, mask
|
145 |
+
|
146 |
+
class VOC_Dataset_For_WSSS(VOC_Dataset):
|
147 |
+
def __init__(self, root_dir, domain, pred_dir, transform=None):
|
148 |
+
super().__init__(root_dir, domain, with_id=True)
|
149 |
+
self.pred_dir = pred_dir
|
150 |
+
self.transform = transform
|
151 |
+
|
152 |
+
cmap_dic, _, class_names = get_color_map_dic()
|
153 |
+
self.colors = np.asarray([cmap_dic[class_name] for class_name in class_names])
|
154 |
+
|
155 |
+
def __getitem__(self, index):
|
156 |
+
image, image_id = super().__getitem__(index)
|
157 |
+
mask = Image.open(self.pred_dir + image_id + '.png')
|
158 |
+
|
159 |
+
if self.transform is not None:
|
160 |
+
input_dic = {'image':image, 'mask':mask}
|
161 |
+
output_dic = self.transform(input_dic)
|
162 |
+
|
163 |
+
image = output_dic['image']
|
164 |
+
mask = output_dic['mask']
|
165 |
+
|
166 |
+
return image, mask
|
167 |
+
|
168 |
+
class VOC_Dataset_For_Testing_CAM(VOC_Dataset):
|
169 |
+
def __init__(self, root_dir, domain, transform=None):
|
170 |
+
super().__init__(root_dir, domain, with_tags=True, with_mask=True)
|
171 |
+
self.transform = transform
|
172 |
+
|
173 |
+
cmap_dic, _, class_names = get_color_map_dic()
|
174 |
+
self.colors = np.asarray([cmap_dic[class_name] for class_name in class_names])
|
175 |
+
|
176 |
+
data = read_json('./data/VOC_2012.json')
|
177 |
+
|
178 |
+
self.class_dic = data['class_dic']
|
179 |
+
self.classes = data['classes']
|
180 |
+
|
181 |
+
def __getitem__(self, index):
|
182 |
+
image, tags, mask = super().__getitem__(index)
|
183 |
+
|
184 |
+
if self.transform is not None:
|
185 |
+
input_dic = {'image':image, 'mask':mask}
|
186 |
+
output_dic = self.transform(input_dic)
|
187 |
+
|
188 |
+
image = output_dic['image']
|
189 |
+
mask = output_dic['mask']
|
190 |
+
|
191 |
+
label = one_hot_embedding([self.class_dic[tag] for tag in tags], self.classes)
|
192 |
+
return image, label, mask
|
193 |
+
|
194 |
+
class VOC_Dataset_For_Making_CAM(VOC_Dataset):
|
195 |
+
def __init__(self, root_dir, domain):
|
196 |
+
super().__init__(root_dir, domain, with_id=True, with_tags=True, with_mask=True)
|
197 |
+
|
198 |
+
cmap_dic, _, class_names = get_color_map_dic()
|
199 |
+
self.colors = np.asarray([cmap_dic[class_name] for class_name in class_names])
|
200 |
+
|
201 |
+
data = read_json('./data/VOC_2012.json')
|
202 |
+
|
203 |
+
self.class_names = np.asarray(class_names[1:21])
|
204 |
+
self.class_dic = data['class_dic']
|
205 |
+
self.classes = data['classes']
|
206 |
+
|
207 |
+
def __getitem__(self, index):
|
208 |
+
image, image_id, tags, mask = super().__getitem__(index)
|
209 |
+
|
210 |
+
label = one_hot_embedding([self.class_dic[tag] for tag in tags], self.classes)
|
211 |
+
return image, image_id, label, mask
|
212 |
+
|
213 |
+
class VOC_Dataset_For_Affinity(VOC_Dataset):
|
214 |
+
def __init__(self, root_dir, domain, path_index, label_dir, transform=None):
|
215 |
+
super().__init__(root_dir, domain, with_id=True)
|
216 |
+
|
217 |
+
data = read_json('./data/VOC_2012.json')
|
218 |
+
|
219 |
+
self.class_dic = data['class_dic']
|
220 |
+
self.classes = data['classes']
|
221 |
+
|
222 |
+
self.transform = transform
|
223 |
+
|
224 |
+
self.label_dir = label_dir
|
225 |
+
self.path_index = path_index
|
226 |
+
|
227 |
+
self.extract_aff_lab_func = GetAffinityLabelFromIndices(self.path_index.src_indices, self.path_index.dst_indices)
|
228 |
+
|
229 |
+
def __getitem__(self, idx):
|
230 |
+
image, image_id = super().__getitem__(idx)
|
231 |
+
|
232 |
+
label = imageio.imread(self.label_dir + image_id + '.png')
|
233 |
+
label = Image.fromarray(label)
|
234 |
+
|
235 |
+
output_dic = self.transform({'image':image, 'mask':label})
|
236 |
+
image, label = output_dic['image'], output_dic['mask']
|
237 |
+
|
238 |
+
return image, self.extract_aff_lab_func(label)
|
239 |
+
|
core/deeplab_utils.py
ADDED
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2021 * Ltd. All rights reserved.
|
2 |
+
# author : Sanghyeon Jo <josanghyeokn@gmail.com>
|
3 |
+
|
4 |
+
import torch
|
5 |
+
import torch.nn as nn
|
6 |
+
import torch.nn.functional as F
|
7 |
+
|
8 |
+
class ASPPModule(nn.Module):
|
9 |
+
def __init__(self, inplanes, planes, kernel_size, padding, dilation, norm_fn=None):
|
10 |
+
super().__init__()
|
11 |
+
self.atrous_conv = nn.Conv2d(inplanes, planes, kernel_size=kernel_size, stride=1, padding=padding, dilation=dilation, bias=False)
|
12 |
+
self.bn = norm_fn(planes)
|
13 |
+
self.relu = nn.ReLU(inplace=True)
|
14 |
+
|
15 |
+
self.initialize([self.atrous_conv, self.bn])
|
16 |
+
|
17 |
+
def forward(self, x):
|
18 |
+
x = self.atrous_conv(x)
|
19 |
+
x = self.bn(x)
|
20 |
+
return self.relu(x)
|
21 |
+
|
22 |
+
def initialize(self, modules):
|
23 |
+
for m in modules:
|
24 |
+
if isinstance(m, nn.Conv2d):
|
25 |
+
torch.nn.init.kaiming_normal_(m.weight)
|
26 |
+
elif isinstance(m, nn.BatchNorm2d):
|
27 |
+
m.weight.data.fill_(1)
|
28 |
+
m.bias.data.zero_()
|
29 |
+
|
30 |
+
class ASPP(nn.Module):
|
31 |
+
def __init__(self, output_stride, norm_fn):
|
32 |
+
super().__init__()
|
33 |
+
|
34 |
+
inplanes = 2048
|
35 |
+
|
36 |
+
if output_stride == 16:
|
37 |
+
dilations = [1, 6, 12, 18]
|
38 |
+
elif output_stride == 8:
|
39 |
+
dilations = [1, 12, 24, 36]
|
40 |
+
|
41 |
+
self.aspp1 = ASPPModule(inplanes, 256, 1, padding=0, dilation=dilations[0], norm_fn=norm_fn)
|
42 |
+
self.aspp2 = ASPPModule(inplanes, 256, 3, padding=dilations[1], dilation=dilations[1], norm_fn=norm_fn)
|
43 |
+
self.aspp3 = ASPPModule(inplanes, 256, 3, padding=dilations[2], dilation=dilations[2], norm_fn=norm_fn)
|
44 |
+
self.aspp4 = ASPPModule(inplanes, 256, 3, padding=dilations[3], dilation=dilations[3], norm_fn=norm_fn)
|
45 |
+
|
46 |
+
self.global_avg_pool = nn.Sequential(
|
47 |
+
nn.AdaptiveAvgPool2d((1, 1)),
|
48 |
+
nn.Conv2d(inplanes, 256, 1, stride=1, bias=False),
|
49 |
+
norm_fn(256),
|
50 |
+
nn.ReLU(inplace=True),
|
51 |
+
)
|
52 |
+
|
53 |
+
self.conv1 = nn.Conv2d(1280, 256, 1, bias=False)
|
54 |
+
self.bn1 = norm_fn(256)
|
55 |
+
self.relu = nn.ReLU(inplace=True)
|
56 |
+
self.dropout = nn.Dropout(0.5)
|
57 |
+
|
58 |
+
self.initialize([self.conv1, self.bn1] + list(self.global_avg_pool.modules()))
|
59 |
+
|
60 |
+
def forward(self, x):
|
61 |
+
x1 = self.aspp1(x)
|
62 |
+
x2 = self.aspp2(x)
|
63 |
+
x3 = self.aspp3(x)
|
64 |
+
x4 = self.aspp4(x)
|
65 |
+
|
66 |
+
x5 = self.global_avg_pool(x)
|
67 |
+
x5 = F.interpolate(x5, size=x4.size()[2:], mode='bilinear', align_corners=True)
|
68 |
+
|
69 |
+
x = torch.cat((x1, x2, x3, x4, x5), dim=1)
|
70 |
+
|
71 |
+
x = self.conv1(x)
|
72 |
+
x = self.bn1(x)
|
73 |
+
x = self.relu(x)
|
74 |
+
x = self.dropout(x)
|
75 |
+
|
76 |
+
return x
|
77 |
+
|
78 |
+
def initialize(self, modules):
|
79 |
+
for m in modules:
|
80 |
+
if isinstance(m, nn.Conv2d):
|
81 |
+
torch.nn.init.kaiming_normal_(m.weight)
|
82 |
+
elif isinstance(m, nn.BatchNorm2d):
|
83 |
+
m.weight.data.fill_(1)
|
84 |
+
m.bias.data.zero_()
|
85 |
+
|
86 |
+
class Decoder(nn.Module):
|
87 |
+
def __init__(self, num_classes, low_level_inplanes, norm_fn):
|
88 |
+
super().__init__()
|
89 |
+
|
90 |
+
self.conv1 = nn.Conv2d(low_level_inplanes, 48, 1, bias=False)
|
91 |
+
self.bn1 = norm_fn(48)
|
92 |
+
self.relu = nn.ReLU(inplace=True)
|
93 |
+
|
94 |
+
self.classifier = nn.Sequential(
|
95 |
+
nn.Conv2d(304, 256, kernel_size=3, stride=1, padding=1, bias=False),
|
96 |
+
norm_fn(256),
|
97 |
+
nn.ReLU(inplace=True),
|
98 |
+
nn.Dropout(0.5),
|
99 |
+
|
100 |
+
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=False),
|
101 |
+
norm_fn(256),
|
102 |
+
nn.ReLU(inplace=True),
|
103 |
+
nn.Dropout(0.1),
|
104 |
+
nn.Conv2d(256, num_classes, kernel_size=1, stride=1)
|
105 |
+
)
|
106 |
+
|
107 |
+
self.initialize([self.conv1, self.bn1] + list(self.classifier.modules()))
|
108 |
+
|
109 |
+
def forward(self, x, x_low_level):
|
110 |
+
x_low_level = self.conv1(x_low_level)
|
111 |
+
x_low_level = self.bn1(x_low_level)
|
112 |
+
x_low_level = self.relu(x_low_level)
|
113 |
+
|
114 |
+
x = F.interpolate(x, size=x_low_level.size()[2:], mode='bilinear', align_corners=True)
|
115 |
+
x = torch.cat((x, x_low_level), dim=1)
|
116 |
+
x = self.classifier(x)
|
117 |
+
|
118 |
+
return x
|
119 |
+
|
120 |
+
def initialize(self, modules):
|
121 |
+
for m in modules:
|
122 |
+
if isinstance(m, nn.Conv2d):
|
123 |
+
torch.nn.init.kaiming_normal_(m.weight)
|
124 |
+
elif isinstance(m, nn.BatchNorm2d):
|
125 |
+
m.weight.data.fill_(1)
|
126 |
+
m.bias.data.zero_()
|
core/networks.py
ADDED
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (C) 2021 * Ltd. All rights reserved.
|
2 |
+
# author : Sanghyeon Jo <josanghyeokn@gmail.com>
|
3 |
+
|
4 |
+
import math
|
5 |
+
|
6 |
+
import torch
|
7 |
+
import torch.nn as nn
|
8 |
+
import torch.nn.functional as F
|
9 |
+
|
10 |
+
from torchvision import models
|
11 |
+
import torch.utils.model_zoo as model_zoo
|
12 |
+
|
13 |
+
from .arch_resnet import resnet
|
14 |
+
from .arch_resnest import resnest
|
15 |
+
from .abc_modules import ABC_Model
|
16 |
+
|
17 |
+
from .deeplab_utils import ASPP, Decoder
|
18 |
+
from .aff_utils import PathIndex
|
19 |
+
from .puzzle_utils import tile_features, merge_features
|
20 |
+
|
21 |
+
from tools.ai.torch_utils import resize_for_tensors
|
22 |
+
|
23 |
+
#######################################################################
|
24 |
+
# Normalization
|
25 |
+
#######################################################################
|
26 |
+
from .sync_batchnorm.batchnorm import SynchronizedBatchNorm2d
|
27 |
+
|
28 |
+
class FixedBatchNorm(nn.BatchNorm2d):
|
29 |
+
def forward(self, x):
|
30 |
+
return F.batch_norm(x, self.running_mean, self.running_var, self.weight, self.bias, training=False, eps=self.eps)
|
31 |
+
|
32 |
+
def group_norm(features):
|
33 |
+
return nn.GroupNorm(4, features)
|
34 |
+
#######################################################################
|
35 |
+
|
36 |
+
class Backbone(nn.Module, ABC_Model):
|
37 |
+
def __init__(self, model_name, state_path, num_classes=20, mode='fix', segmentation=False):
|
38 |
+
super().__init__()
|
39 |
+
|
40 |
+
self.mode = mode
|
41 |
+
|
42 |
+
if self.mode == 'fix':
|
43 |
+
self.norm_fn = FixedBatchNorm
|
44 |
+
else:
|
45 |
+
self.norm_fn = nn.BatchNorm2d
|
46 |
+
|
47 |
+
if 'resnet' in model_name:
|
48 |
+
self.model = resnet.ResNet(resnet.Bottleneck, resnet.layers_dic[model_name], strides=(2, 2, 2, 1), batch_norm_fn=self.norm_fn)
|
49 |
+
|
50 |
+
state_dict = torch.load(state_path)
|
51 |
+
self.model.load_state_dict(state_dict, strict=False)
|
52 |
+
else:
|
53 |
+
if segmentation:
|
54 |
+
dilation, dilated = 4, True
|
55 |
+
else:
|
56 |
+
dilation, dilated = 2, False
|
57 |
+
|
58 |
+
self.model = eval("resnest." + model_name)(pretrained=True, dilated=dilated, dilation=dilation, norm_layer=self.norm_fn)
|
59 |
+
|
60 |
+
del self.model.avgpool
|
61 |
+
del self.model.fc
|
62 |
+
|
63 |
+
self.stage1 = nn.Sequential(self.model.conv1,
|
64 |
+
self.model.bn1,
|
65 |
+
self.model.relu,
|
66 |
+
self.model.maxpool)
|
67 |
+
self.stage2 = nn.Sequential(self.model.layer1)
|
68 |
+
self.stage3 = nn.Sequential(self.model.layer2)
|
69 |
+
self.stage4 = nn.Sequential(self.model.layer3)
|
70 |
+
self.stage5 = nn.Sequential(self.model.layer4)
|
71 |
+
|
72 |
+
class Classifier(Backbone):
|
73 |
+
def __init__(self, model_name, state_path, num_classes=20, mode='fix'):
|
74 |
+
super().__init__(model_name, state_path, num_classes, mode)
|
75 |
+
|
76 |
+
self.classifier = nn.Conv2d(2048, num_classes, 1, bias=False)
|
77 |
+
self.num_classes = num_classes
|
78 |
+
|
79 |
+
self.initialize([self.classifier])
|
80 |
+
|
81 |
+
def forward(self, x, with_cam=False):
|
82 |
+
x = self.stage1(x)
|
83 |
+
x = self.stage2(x)
|
84 |
+
x = self.stage3(x)
|
85 |
+
x = self.stage4(x)
|
86 |
+
x = self.stage5(x)
|
87 |
+
|
88 |
+
if with_cam:
|
89 |
+
features = self.classifier(x)
|
90 |
+
logits = self.global_average_pooling_2d(features)
|
91 |
+
return logits, features
|
92 |
+
else:
|
93 |
+
x = self.global_average_pooling_2d(x, keepdims=True)
|
94 |
+
logits = self.classifier(x).view(-1, self.num_classes)
|
95 |
+
return logits
|
96 |
+
|
97 |
+
class Classifier_For_Positive_Pooling(Backbone):
|
98 |
+
def __init__(self, model_name, num_classes=20, mode='fix'):
|
99 |
+
super().__init__(model_name, num_classes, mode)
|
100 |
+
|
101 |
+
self.classifier = nn.Conv2d(2048, num_classes, 1, bias=False)
|
102 |
+
self.num_classes = num_classes
|
103 |
+
|
104 |
+
self.initialize([self.classifier])
|
105 |
+
|
106 |
+
def forward(self, x, with_cam=False):
|
107 |
+
x = self.stage1(x)
|
108 |
+
x = self.stage2(x)
|
109 |
+
x = self.stage3(x)
|
110 |
+
x = self.stage4(x)
|
111 |
+
x = self.stage5(x)
|
112 |
+
|
113 |
+
if with_cam:
|
114 |
+
features = self.classifier(x)
|
115 |
+
logits = self.global_average_pooling_2d(features)
|
116 |
+
return logits, features
|
117 |
+
else:
|
118 |
+
x = self.global_average_pooling_2d(x, keepdims=True)
|
119 |
+
logits = self.classifier(x).view(-1, self.num_classes)
|
120 |
+
return logits
|
121 |
+
|
122 |
+
class Classifier_For_Puzzle(Classifier):
|
123 |
+
def __init__(self, model_name, num_classes=20, mode='fix'):
|
124 |
+
super().__init__(model_name, num_classes, mode)
|
125 |
+
|
126 |
+
def forward(self, x, num_pieces=1, level=-1):
|
127 |
+
batch_size = x.size()[0]
|
128 |
+
|
129 |
+
output_dic = {}
|
130 |
+
layers = [self.stage1, self.stage2, self.stage3, self.stage4, self.stage5, self.classifier]
|
131 |
+
|
132 |
+
for l, layer in enumerate(layers):
|
133 |
+
l += 1
|
134 |
+
if level == l:
|
135 |
+
x = tile_features(x, num_pieces)
|
136 |
+
|
137 |
+
x = layer(x)
|
138 |
+
output_dic['stage%d'%l] = x
|
139 |
+
|
140 |
+
output_dic['logits'] = self.global_average_pooling_2d(output_dic['stage6'])
|
141 |
+
|
142 |
+
for l in range(len(layers)):
|
143 |
+
l += 1
|
144 |
+
if l >= level:
|
145 |
+
output_dic['stage%d'%l] = merge_features(output_dic['stage%d'%l], num_pieces, batch_size)
|
146 |
+
|
147 |
+
if level is not None:
|
148 |
+
output_dic['merged_logits'] = self.global_average_pooling_2d(output_dic['stage6'])
|
149 |
+
|
150 |
+
return output_dic
|
151 |
+
|
152 |
+
class AffinityNet(Backbone):
|
153 |
+
def __init__(self, model_name, path_index=None):
|
154 |
+
super().__init__(model_name, None, 'fix')
|
155 |
+
|
156 |
+
if '50' in model_name:
|
157 |
+
fc_edge1_features = 64
|
158 |
+
else:
|
159 |
+
fc_edge1_features = 128
|
160 |
+
|
161 |
+
self.fc_edge1 = nn.Sequential(
|
162 |
+
nn.Conv2d(fc_edge1_features, 32, 1, bias=False),
|
163 |
+
nn.GroupNorm(4, 32),
|
164 |
+
nn.ReLU(inplace=True),
|
165 |
+
)
|
166 |
+
self.fc_edge2 = nn.Sequential(
|
167 |
+
nn.Conv2d(256, 32, 1, bias=False),
|
168 |
+
nn.GroupNorm(4, 32),
|
169 |
+
nn.ReLU(inplace=True),
|
170 |
+
)
|
171 |
+
self.fc_edge3 = nn.Sequential(
|
172 |
+
nn.Conv2d(512, 32, 1, bias=False),
|
173 |
+
nn.GroupNorm(4, 32),
|
174 |
+
nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False),
|
175 |
+
nn.ReLU(inplace=True),
|
176 |
+
)
|
177 |
+
self.fc_edge4 = nn.Sequential(
|
178 |
+
nn.Conv2d(1024, 32, 1, bias=False),
|
179 |
+
nn.GroupNorm(4, 32),
|
180 |
+
nn.Upsample(scale_factor=4, mode='bilinear', align_corners=False),
|
181 |
+
nn.ReLU(inplace=True),
|
182 |
+
)
|
183 |
+
self.fc_edge5 = nn.Sequential(
|
184 |
+
nn.Conv2d(2048, 32, 1, bias=False),
|
185 |
+
nn.GroupNorm(4, 32),
|
186 |
+
nn.Upsample(scale_factor=4, mode='bilinear', align_corners=False),
|
187 |
+
nn.ReLU(inplace=True),
|
188 |
+
)
|
189 |
+
self.fc_edge6 = nn.Conv2d(160, 1, 1, bias=True)
|
190 |
+
|
191 |
+
self.backbone = nn.ModuleList([self.stage1, self.stage2, self.stage3, self.stage4, self.stage5])
|
192 |
+
self.edge_layers = nn.ModuleList([self.fc_edge1, self.fc_edge2, self.fc_edge3, self.fc_edge4, self.fc_edge5, self.fc_edge6])
|
193 |
+
|
194 |
+
if path_index is not None:
|
195 |
+
self.path_index = path_index
|
196 |
+
self.n_path_lengths = len(self.path_index.path_indices)
|
197 |
+
for i, pi in enumerate(self.path_index.path_indices):
|
198 |
+
self.register_buffer("path_indices_" + str(i), torch.from_numpy(pi))
|
199 |
+
|
200 |
+
def train(self, mode=True):
|
201 |
+
super().train(mode)
|
202 |
+
self.backbone.eval()
|
203 |
+
|
204 |
+
def forward(self, x, with_affinity=False):
|
205 |
+
x1 = self.stage1(x).detach()
|
206 |
+
x2 = self.stage2(x1).detach()
|
207 |
+
x3 = self.stage3(x2).detach()
|
208 |
+
x4 = self.stage4(x3).detach()
|
209 |
+
x5 = self.stage5(x4).detach()
|
210 |
+
|
211 |
+
edge1 = self.fc_edge1(x1)
|
212 |
+
edge2 = self.fc_edge2(x2)
|
213 |
+
edge3 = self.fc_edge3(x3)[..., :edge2.size(2), :edge2.size(3)]
|
214 |
+
edge4 = self.fc_edge4(x4)[..., :edge2.size(2), :edge2.size(3)]
|
215 |
+
edge5 = self.fc_edge5(x5)[..., :edge2.size(2), :edge2.size(3)]
|
216 |
+
|
217 |
+
edge = self.fc_edge6(torch.cat([edge1, edge2, edge3, edge4, edge5], dim=1))
|
218 |
+
|
219 |
+
if with_affinity:
|
220 |
+
return edge, self.to_affinity(torch.sigmoid(edge))
|
221 |
+
else:
|
222 |
+
return edge
|
223 |
+
|
224 |
+
def get_edge(self, x, image_size=512, stride=4):
|
225 |
+
feat_size = (x.size(2)-1)//stride+1, (x.size(3)-1)//stride+1
|
226 |
+
|
227 |
+
x = F.pad(x, [0, image_size-x.size(3), 0, image_size-x.size(2)])
|
228 |
+
edge_out = self.forward(x)
|
229 |
+
edge_out = edge_out[..., :feat_size[0], :feat_size[1]]
|
230 |
+
edge_out = torch.sigmoid(edge_out[0]/2 + edge_out[1].flip(-1)/2)
|
231 |
+
|
232 |
+
return edge_out
|
233 |
+
|
234 |
+
"""
|
235 |
+
aff = self.to_affinity(torch.sigmoid(edge_out))
|
236 |
+
pos_aff_loss = (-1) * torch.log(aff + 1e-5)
|
237 |
+
neg_aff_loss = (-1) * torch.log(1. + 1e-5 - aff)
|
238 |
+
"""
|
239 |
+
def to_affinity(self, edge):
|
240 |
+
aff_list = []
|
241 |
+
edge = edge.view(edge.size(0), -1)
|
242 |
+
|
243 |
+
for i in range(self.n_path_lengths):
|
244 |
+
ind = self._buffers["path_indices_" + str(i)]
|
245 |
+
ind_flat = ind.view(-1)
|
246 |
+
dist = torch.index_select(edge, dim=-1, index=ind_flat)
|
247 |
+
dist = dist.view(dist.size(0), ind.size(0), ind.size(1), ind.size(2))
|
248 |
+
aff = torch.squeeze(1 - F.max_pool2d(dist, (dist.size(2), 1)), dim=2)
|
249 |
+
aff_list.append(aff)
|
250 |
+
aff_cat = torch.cat(aff_list, dim=1)
|
251 |
+
return aff_cat
|
252 |
+
|
253 |
+
class DeepLabv3_Plus(Backbone):
|
254 |
+
def __init__(self, model_name, num_classes=21, mode='fix', use_group_norm=False):
|
255 |
+
super().__init__(model_name, num_classes, mode, segmentation=False)
|
256 |
+
|
257 |
+
if use_group_norm:
|
258 |
+
norm_fn_for_extra_modules = group_norm
|
259 |
+
else:
|
260 |
+
norm_fn_for_extra_modules = self.norm_fn
|
261 |
+
|
262 |
+
self.aspp = ASPP(output_stride=16, norm_fn=norm_fn_for_extra_modules)
|
263 |
+
self.decoder = Decoder(num_classes, 256, norm_fn_for_extra_modules)
|
264 |
+
|
265 |
+
def forward(self, x, with_cam=False):
|
266 |
+
inputs = x
|
267 |
+
|
268 |
+
x = self.stage1(x)
|
269 |
+
x = self.stage2(x)
|
270 |
+
x_low_level = x
|
271 |
+
|
272 |
+
x = self.stage3(x)
|
273 |
+
x = self.stage4(x)
|
274 |
+
x = self.stage5(x)
|
275 |
+
|
276 |
+
x = self.aspp(x)
|
277 |
+
x = self.decoder(x, x_low_level)
|
278 |
+
x = resize_for_tensors(x, inputs.size()[2:], align_corners=True)
|
279 |
+
|
280 |
+
return x
|
281 |
+
|
282 |
+
class Seg_Model(Backbone):
|
283 |
+
def __init__(self, model_name, num_classes=21):
|
284 |
+
super().__init__(model_name, num_classes, mode='fix', segmentation=False)
|
285 |
+
|
286 |
+
self.classifier = nn.Conv2d(2048, num_classes, 1, bias=False)
|
287 |
+
|
288 |
+
def forward(self, inputs):
|
289 |
+
x = self.stage1(inputs)
|
290 |
+
x = self.stage2(x)
|
291 |
+
x = self.stage3(x)
|
292 |
+
x = self.stage4(x)
|
293 |
+
x = self.stage5(x)
|
294 |
+
|
295 |
+
logits = self.classifier(x)
|
296 |
+
# logits = resize_for_tensors(logits, inputs.size()[2:], align_corners=False)
|
297 |
+
|
298 |
+
return logits
|
299 |
+
|
300 |
+
class CSeg_Model(Backbone):
|
301 |
+
def __init__(self, model_name, num_classes=21):
|
302 |
+
super().__init__(model_name, num_classes, 'fix')
|
303 |
+
|
304 |
+
if '50' in model_name:
|
305 |
+
fc_edge1_features = 64
|
306 |
+
else:
|
307 |
+
fc_edge1_features = 128
|
308 |
+
|
309 |
+
self.fc_edge1 = nn.Sequential(
|
310 |
+
nn.Conv2d(fc_edge1_features, 32, 1, bias=False),
|
311 |
+
nn.GroupNorm(4, 32),
|
312 |
+
nn.ReLU(inplace=True),
|
313 |
+
)
|
314 |
+
self.fc_edge2 = nn.Sequential(
|
315 |
+
nn.Conv2d(256, 32, 1, bias=False),
|
316 |
+
nn.GroupNorm(4, 32),
|
317 |
+
nn.ReLU(inplace=True),
|
318 |
+
)
|
319 |
+
self.fc_edge3 = nn.Sequential(
|
320 |
+
nn.Conv2d(512, 32, 1, bias=False),
|
321 |
+
nn.GroupNorm(4, 32),
|
322 |
+
nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False),
|
323 |
+
nn.ReLU(inplace=True),
|
324 |
+
)
|
325 |
+
self.fc_edge4 = nn.Sequential(
|
326 |
+
nn.Conv2d(1024, 32, 1, bias=False),
|
327 |
+
nn.GroupNorm(4, 32),
|
328 |
+
nn.Upsample(scale_factor=4, mode='bilinear', align_corners=False),
|
329 |
+
nn.ReLU(inplace=True),
|
330 |
+
)
|
331 |
+
self.fc_edge5 = nn.Sequential(
|
332 |
+
nn.Conv2d(2048, 32, 1, bias=False),
|
333 |
+
nn.GroupNorm(4, 32),
|
334 |
+
nn.Upsample(scale_factor=4, mode='bilinear', align_corners=False),
|
335 |
+
nn.ReLU(inplace=True),
|
336 |
+
)
|
337 |
+
self.fc_edge6 = nn.Conv2d(160, num_classes, 1, bias=True)
|
338 |
+
|
339 |
+
def forward(self, x):
|
340 |
+
x1 = self.stage1(x)
|
341 |
+
x2 = self.stage2(x1)
|
342 |
+
x3 = self.stage3(x2)
|
343 |
+
x4 = self.stage4(x3)
|
344 |
+
x5 = self.stage5(x4)
|
345 |
+
|
346 |
+
edge1 = self.fc_edge1(x1)
|
347 |
+
edge2 = self.fc_edge2(x2)
|
348 |
+
edge3 = self.fc_edge3(x3)[..., :edge2.size(2), :edge2.size(3)]
|
349 |
+
edge4 = self.fc_edge4(x4)[..., :edge2.size(2), :edge2.size(3)]
|
350 |
+
edge5 = self.fc_edge5(x5)[..., :edge2.size(2), :edge2.size(3)]
|
351 |
+
|
352 |
+
logits = self.fc_edge6(torch.cat([edge1, edge2, edge3, edge4, edge5], dim=1))
|
353 |
+
# logits = resize_for_tensors(logits, x.size()[2:], align_corners=True)
|
354 |
+
|
355 |
+
return logits
|
core/puzzle_utils.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import math
|
2 |
+
|
3 |
+
import torch
|
4 |
+
import torch.nn.functional as F
|
5 |
+
|
6 |
+
def tile_features(features, num_pieces):
|
7 |
+
_, _, h, w = features.size()
|
8 |
+
|
9 |
+
num_pieces_per_line = int(math.sqrt(num_pieces))
|
10 |
+
|
11 |
+
h_per_patch = h // num_pieces_per_line
|
12 |
+
w_per_patch = w // num_pieces_per_line
|
13 |
+
|
14 |
+
"""
|
15 |
+
+-----+-----+
|
16 |
+
| 1 | 2 |
|
17 |
+
+-----+-----+
|
18 |
+
| 3 | 4 |
|
19 |
+
+-----+-----+
|
20 |
+
|
21 |
+
+-----+-----+-----+-----+
|
22 |
+
| 1 | 2 | 3 | 4 |
|
23 |
+
+-----+-----+-----+-----+
|
24 |
+
"""
|
25 |
+
patches = []
|
26 |
+
for splitted_features in torch.split(features, h_per_patch, dim=2):
|
27 |
+
for patch in torch.split(splitted_features, w_per_patch, dim=3):
|
28 |
+
patches.append(patch)
|
29 |
+
|
30 |
+
return torch.cat(patches, dim=0)
|
31 |
+
|
32 |
+
def merge_features(features, num_pieces, batch_size):
|
33 |
+
"""
|
34 |
+
+-----+-----+-----+-----+
|
35 |
+
| 1 | 2 | 3 | 4 |
|
36 |
+
+-----+-----+-----+-----+
|
37 |
+
|
38 |
+
+-----+-----+
|
39 |
+
| 1 | 2 |
|
40 |
+
+-----+-----+
|
41 |
+
| 3 | 4 |
|
42 |
+
+-----+-----+
|
43 |
+
"""
|
44 |
+
features_list = list(torch.split(features, batch_size))
|
45 |
+
num_pieces_per_line = int(math.sqrt(num_pieces))
|
46 |
+
|
47 |
+
index = 0
|
48 |
+
ext_h_list = []
|
49 |
+
|
50 |
+
for _ in range(num_pieces_per_line):
|
51 |
+
|
52 |
+
ext_w_list = []
|
53 |
+
for _ in range(num_pieces_per_line):
|
54 |
+
ext_w_list.append(features_list[index])
|
55 |
+
index += 1
|
56 |
+
|
57 |
+
ext_h_list.append(torch.cat(ext_w_list, dim=3))
|
58 |
+
|
59 |
+
features = torch.cat(ext_h_list, dim=2)
|
60 |
+
return features
|
61 |
+
|
62 |
+
def puzzle_module(x, func_list, num_pieces):
|
63 |
+
tiled_x = tile_features(x, num_pieces)
|
64 |
+
|
65 |
+
for func in func_list:
|
66 |
+
tiled_x = func(tiled_x)
|
67 |
+
|
68 |
+
merged_x = merge_features(tiled_x, num_pieces, x.size()[0])
|
69 |
+
return merged_x
|
core/sync_batchnorm/__init__.py
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# File : __init__.py
|
3 |
+
# Author : Jiayuan Mao
|
4 |
+
# Email : maojiayuan@gmail.com
|
5 |
+
# Date : 27/01/2018
|
6 |
+
#
|
7 |
+
# This file is part of Synchronized-BatchNorm-PyTorch.
|
8 |
+
# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch
|
9 |
+
# Distributed under MIT License.
|
10 |
+
|
11 |
+
from .batchnorm import SynchronizedBatchNorm1d, SynchronizedBatchNorm2d, SynchronizedBatchNorm3d
|
12 |
+
from .replicate import DataParallelWithCallback, patch_replication_callback
|
core/sync_batchnorm/batchnorm.py
ADDED
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# File : batchnorm.py
|
3 |
+
# Author : Jiayuan Mao
|
4 |
+
# Email : maojiayuan@gmail.com
|
5 |
+
# Date : 27/01/2018
|
6 |
+
#
|
7 |
+
# This file is part of Synchronized-BatchNorm-PyTorch.
|
8 |
+
# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch
|
9 |
+
# Distributed under MIT License.
|
10 |
+
|
11 |
+
import collections
|
12 |
+
|
13 |
+
import torch
|
14 |
+
import torch.nn.functional as F
|
15 |
+
|
16 |
+
from torch.nn.modules.batchnorm import _BatchNorm
|
17 |
+
from torch.nn.parallel._functions import ReduceAddCoalesced, Broadcast
|
18 |
+
|
19 |
+
from .comm import SyncMaster
|
20 |
+
|
21 |
+
__all__ = ['SynchronizedBatchNorm1d', 'SynchronizedBatchNorm2d', 'SynchronizedBatchNorm3d']
|
22 |
+
|
23 |
+
|
24 |
+
def _sum_ft(tensor):
|
25 |
+
"""sum over the first and last dimention"""
|
26 |
+
return tensor.sum(dim=0).sum(dim=-1)
|
27 |
+
|
28 |
+
|
29 |
+
def _unsqueeze_ft(tensor):
|
30 |
+
"""add new dementions at the front and the tail"""
|
31 |
+
return tensor.unsqueeze(0).unsqueeze(-1)
|
32 |
+
|
33 |
+
|
34 |
+
_ChildMessage = collections.namedtuple('_ChildMessage', ['sum', 'ssum', 'sum_size'])
|
35 |
+
_MasterMessage = collections.namedtuple('_MasterMessage', ['sum', 'inv_std'])
|
36 |
+
|
37 |
+
|
38 |
+
class _SynchronizedBatchNorm(_BatchNorm):
|
39 |
+
def __init__(self, num_features, eps=1e-5, momentum=0.1, affine=True):
|
40 |
+
super(_SynchronizedBatchNorm, self).__init__(num_features, eps=eps, momentum=momentum, affine=affine)
|
41 |
+
|
42 |
+
self._sync_master = SyncMaster(self._data_parallel_master)
|
43 |
+
|
44 |
+
self._is_parallel = False
|
45 |
+
self._parallel_id = None
|
46 |
+
self._slave_pipe = None
|
47 |
+
|
48 |
+
def forward(self, input):
|
49 |
+
# If it is not parallel computation or is in evaluation mode, use PyTorch's implementation.
|
50 |
+
if not (self._is_parallel and self.training):
|
51 |
+
return F.batch_norm(
|
52 |
+
input, self.running_mean, self.running_var, self.weight, self.bias,
|
53 |
+
self.training, self.momentum, self.eps)
|
54 |
+
|
55 |
+
# Resize the input to (B, C, -1).
|
56 |
+
input_shape = input.size()
|
57 |
+
input = input.view(input.size(0), self.num_features, -1)
|
58 |
+
|
59 |
+
# Compute the sum and square-sum.
|
60 |
+
sum_size = input.size(0) * input.size(2)
|
61 |
+
input_sum = _sum_ft(input)
|
62 |
+
input_ssum = _sum_ft(input ** 2)
|
63 |
+
|
64 |
+
# Reduce-and-broadcast the statistics.
|
65 |
+
if self._parallel_id == 0:
|
66 |
+
mean, inv_std = self._sync_master.run_master(_ChildMessage(input_sum, input_ssum, sum_size))
|
67 |
+
else:
|
68 |
+
mean, inv_std = self._slave_pipe.run_slave(_ChildMessage(input_sum, input_ssum, sum_size))
|
69 |
+
|
70 |
+
# Compute the output.
|
71 |
+
if self.affine:
|
72 |
+
# MJY:: Fuse the multiplication for speed.
|
73 |
+
output = (input - _unsqueeze_ft(mean)) * _unsqueeze_ft(inv_std * self.weight) + _unsqueeze_ft(self.bias)
|
74 |
+
else:
|
75 |
+
output = (input - _unsqueeze_ft(mean)) * _unsqueeze_ft(inv_std)
|
76 |
+
|
77 |
+
# Reshape it.
|
78 |
+
return output.view(input_shape)
|
79 |
+
|
80 |
+
def __data_parallel_replicate__(self, ctx, copy_id):
|
81 |
+
self._is_parallel = True
|
82 |
+
self._parallel_id = copy_id
|
83 |
+
|
84 |
+
# parallel_id == 0 means master device.
|
85 |
+
if self._parallel_id == 0:
|
86 |
+
ctx.sync_master = self._sync_master
|
87 |
+
else:
|
88 |
+
self._slave_pipe = ctx.sync_master.register_slave(copy_id)
|
89 |
+
|
90 |
+
def _data_parallel_master(self, intermediates):
|
91 |
+
"""Reduce the sum and square-sum, compute the statistics, and broadcast it."""
|
92 |
+
|
93 |
+
# Always using same "device order" makes the ReduceAdd operation faster.
|
94 |
+
# Thanks to:: Tete Xiao (http://tetexiao.com/)
|
95 |
+
intermediates = sorted(intermediates, key=lambda i: i[1].sum.get_device())
|
96 |
+
|
97 |
+
to_reduce = [i[1][:2] for i in intermediates]
|
98 |
+
to_reduce = [j for i in to_reduce for j in i] # flatten
|
99 |
+
target_gpus = [i[1].sum.get_device() for i in intermediates]
|
100 |
+
|
101 |
+
sum_size = sum([i[1].sum_size for i in intermediates])
|
102 |
+
sum_, ssum = ReduceAddCoalesced.apply(target_gpus[0], 2, *to_reduce)
|
103 |
+
mean, inv_std = self._compute_mean_std(sum_, ssum, sum_size)
|
104 |
+
|
105 |
+
broadcasted = Broadcast.apply(target_gpus, mean, inv_std)
|
106 |
+
|
107 |
+
outputs = []
|
108 |
+
for i, rec in enumerate(intermediates):
|
109 |
+
outputs.append((rec[0], _MasterMessage(*broadcasted[i * 2:i * 2 + 2])))
|
110 |
+
|
111 |
+
return outputs
|
112 |
+
|
113 |
+
def _compute_mean_std(self, sum_, ssum, size):
|
114 |
+
"""Compute the mean and standard-deviation with sum and square-sum. This method
|
115 |
+
also maintains the moving average on the master device."""
|
116 |
+
assert size > 1, 'BatchNorm computes unbiased standard-deviation, which requires size > 1.'
|
117 |
+
mean = sum_ / size
|
118 |
+
sumvar = ssum - sum_ * mean
|
119 |
+
unbias_var = sumvar / (size - 1)
|
120 |
+
bias_var = sumvar / size
|
121 |
+
|
122 |
+
self.running_mean = (1 - self.momentum) * self.running_mean + self.momentum * mean.data
|
123 |
+
self.running_var = (1 - self.momentum) * self.running_var + self.momentum * unbias_var.data
|
124 |
+
|
125 |
+
return mean, bias_var.clamp(self.eps) ** -0.5
|
126 |
+
|
127 |
+
|
128 |
+
class SynchronizedBatchNorm1d(_SynchronizedBatchNorm):
|
129 |
+
r"""Applies Synchronized Batch Normalization over a 2d or 3d input that is seen as a
|
130 |
+
mini-batch.
|
131 |
+
.. math::
|
132 |
+
y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta
|
133 |
+
This module differs from the built-in PyTorch BatchNorm1d as the mean and
|
134 |
+
standard-deviation are reduced across all devices during training.
|
135 |
+
For example, when one uses `nn.DataParallel` to wrap the network during
|
136 |
+
training, PyTorch's implementation normalize the tensor on each device using
|
137 |
+
the statistics only on that device, which accelerated the computation and
|
138 |
+
is also easy to implement, but the statistics might be inaccurate.
|
139 |
+
Instead, in this synchronized version, the statistics will be computed
|
140 |
+
over all training samples distributed on multiple devices.
|
141 |
+
|
142 |
+
Note that, for one-GPU or CPU-only case, this module behaves exactly same
|
143 |
+
as the built-in PyTorch implementation.
|
144 |
+
The mean and standard-deviation are calculated per-dimension over
|
145 |
+
the mini-batches and gamma and beta are learnable parameter vectors
|
146 |
+
of size C (where C is the input size).
|
147 |
+
During training, this layer keeps a running estimate of its computed mean
|
148 |
+
and variance. The running sum is kept with a default momentum of 0.1.
|
149 |
+
During evaluation, this running mean/variance is used for normalization.
|
150 |
+
Because the BatchNorm is done over the `C` dimension, computing statistics
|
151 |
+
on `(N, L)` slices, it's common terminology to call this Temporal BatchNorm
|
152 |
+
Args:
|
153 |
+
num_features: num_features from an expected input of size
|
154 |
+
`batch_size x num_features [x width]`
|
155 |
+
eps: a value added to the denominator for numerical stability.
|
156 |
+
Default: 1e-5
|
157 |
+
momentum: the value used for the running_mean and running_var
|
158 |
+
computation. Default: 0.1
|
159 |
+
affine: a boolean value that when set to ``True``, gives the layer learnable
|
160 |
+
affine parameters. Default: ``True``
|
161 |
+
Shape:
|
162 |
+
- Input: :math:`(N, C)` or :math:`(N, C, L)`
|
163 |
+
- Output: :math:`(N, C)` or :math:`(N, C, L)` (same shape as input)
|
164 |
+
Examples:
|
165 |
+
>>> # With Learnable Parameters
|
166 |
+
>>> m = SynchronizedBatchNorm1d(100)
|
167 |
+
>>> # Without Learnable Parameters
|
168 |
+
>>> m = SynchronizedBatchNorm1d(100, affine=False)
|
169 |
+
>>> input = torch.autograd.Variable(torch.randn(20, 100))
|
170 |
+
>>> output = m(input)
|
171 |
+
"""
|
172 |
+
|
173 |
+
def _check_input_dim(self, input):
|
174 |
+
if input.dim() != 2 and input.dim() != 3:
|
175 |
+
raise ValueError('expected 2D or 3D input (got {}D input)'
|
176 |
+
.format(input.dim()))
|
177 |
+
super(SynchronizedBatchNorm1d, self)._check_input_dim(input)
|
178 |
+
|
179 |
+
|
180 |
+
class SynchronizedBatchNorm2d(_SynchronizedBatchNorm):
|
181 |
+
r"""Applies Batch Normalization over a 4d input that is seen as a mini-batch
|
182 |
+
of 3d inputs
|
183 |
+
.. math::
|
184 |
+
y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta
|
185 |
+
This module differs from the built-in PyTorch BatchNorm2d as the mean and
|
186 |
+
standard-deviation are reduced across all devices during training.
|
187 |
+
For example, when one uses `nn.DataParallel` to wrap the network during
|
188 |
+
training, PyTorch's implementation normalize the tensor on each device using
|
189 |
+
the statistics only on that device, which accelerated the computation and
|
190 |
+
is also easy to implement, but the statistics might be inaccurate.
|
191 |
+
Instead, in this synchronized version, the statistics will be computed
|
192 |
+
over all training samples distributed on multiple devices.
|
193 |
+
|
194 |
+
Note that, for one-GPU or CPU-only case, this module behaves exactly same
|
195 |
+
as the built-in PyTorch implementation.
|
196 |
+
The mean and standard-deviation are calculated per-dimension over
|
197 |
+
the mini-batches and gamma and beta are learnable parameter vectors
|
198 |
+
of size C (where C is the input size).
|
199 |
+
During training, this layer keeps a running estimate of its computed mean
|
200 |
+
and variance. The running sum is kept with a default momentum of 0.1.
|
201 |
+
During evaluation, this running mean/variance is used for normalization.
|
202 |
+
Because the BatchNorm is done over the `C` dimension, computing statistics
|
203 |
+
on `(N, H, W)` slices, it's common terminology to call this Spatial BatchNorm
|
204 |
+
Args:
|
205 |
+
num_features: num_features from an expected input of
|
206 |
+
size batch_size x num_features x height x width
|
207 |
+
eps: a value added to the denominator for numerical stability.
|
208 |
+
Default: 1e-5
|
209 |
+
momentum: the value used for the running_mean and running_var
|
210 |
+
computation. Default: 0.1
|
211 |
+
affine: a boolean value that when set to ``True``, gives the layer learnable
|
212 |
+
affine parameters. Default: ``True``
|
213 |
+
Shape:
|
214 |
+
- Input: :math:`(N, C, H, W)`
|
215 |
+
- Output: :math:`(N, C, H, W)` (same shape as input)
|
216 |
+
Examples:
|
217 |
+
>>> # With Learnable Parameters
|
218 |
+
>>> m = SynchronizedBatchNorm2d(100)
|
219 |
+
>>> # Without Learnable Parameters
|
220 |
+
>>> m = SynchronizedBatchNorm2d(100, affine=False)
|
221 |
+
>>> input = torch.autograd.Variable(torch.randn(20, 100, 35, 45))
|
222 |
+
>>> output = m(input)
|
223 |
+
"""
|
224 |
+
|
225 |
+
def _check_input_dim(self, input):
|
226 |
+
if input.dim() != 4:
|
227 |
+
raise ValueError('expected 4D input (got {}D input)'
|
228 |
+
.format(input.dim()))
|
229 |
+
super(SynchronizedBatchNorm2d, self)._check_input_dim(input)
|
230 |
+
|
231 |
+
|
232 |
+
class SynchronizedBatchNorm3d(_SynchronizedBatchNorm):
|
233 |
+
r"""Applies Batch Normalization over a 5d input that is seen as a mini-batch
|
234 |
+
of 4d inputs
|
235 |
+
.. math::
|
236 |
+
y = \frac{x - mean[x]}{ \sqrt{Var[x] + \epsilon}} * gamma + beta
|
237 |
+
This module differs from the built-in PyTorch BatchNorm3d as the mean and
|
238 |
+
standard-deviation are reduced across all devices during training.
|
239 |
+
For example, when one uses `nn.DataParallel` to wrap the network during
|
240 |
+
training, PyTorch's implementation normalize the tensor on each device using
|
241 |
+
the statistics only on that device, which accelerated the computation and
|
242 |
+
is also easy to implement, but the statistics might be inaccurate.
|
243 |
+
Instead, in this synchronized version, the statistics will be computed
|
244 |
+
over all training samples distributed on multiple devices.
|
245 |
+
|
246 |
+
Note that, for one-GPU or CPU-only case, this module behaves exactly same
|
247 |
+
as the built-in PyTorch implementation.
|
248 |
+
The mean and standard-deviation are calculated per-dimension over
|
249 |
+
the mini-batches and gamma and beta are learnable parameter vectors
|
250 |
+
of size C (where C is the input size).
|
251 |
+
During training, this layer keeps a running estimate of its computed mean
|
252 |
+
and variance. The running sum is kept with a default momentum of 0.1.
|
253 |
+
During evaluation, this running mean/variance is used for normalization.
|
254 |
+
Because the BatchNorm is done over the `C` dimension, computing statistics
|
255 |
+
on `(N, D, H, W)` slices, it's common terminology to call this Volumetric BatchNorm
|
256 |
+
or Spatio-temporal BatchNorm
|
257 |
+
Args:
|
258 |
+
num_features: num_features from an expected input of
|
259 |
+
size batch_size x num_features x depth x height x width
|
260 |
+
eps: a value added to the denominator for numerical stability.
|
261 |
+
Default: 1e-5
|
262 |
+
momentum: the value used for the running_mean and running_var
|
263 |
+
computation. Default: 0.1
|
264 |
+
affine: a boolean value that when set to ``True``, gives the layer learnable
|
265 |
+
affine parameters. Default: ``True``
|
266 |
+
Shape:
|
267 |
+
- Input: :math:`(N, C, D, H, W)`
|
268 |
+
- Output: :math:`(N, C, D, H, W)` (same shape as input)
|
269 |
+
Examples:
|
270 |
+
>>> # With Learnable Parameters
|
271 |
+
>>> m = SynchronizedBatchNorm3d(100)
|
272 |
+
>>> # Without Learnable Parameters
|
273 |
+
>>> m = SynchronizedBatchNorm3d(100, affine=False)
|
274 |
+
>>> input = torch.autograd.Variable(torch.randn(20, 100, 35, 45, 10))
|
275 |
+
>>> output = m(input)
|
276 |
+
"""
|
277 |
+
|
278 |
+
def _check_input_dim(self, input):
|
279 |
+
if input.dim() != 5:
|
280 |
+
raise ValueError('expected 5D input (got {}D input)'
|
281 |
+
.format(input.dim()))
|
282 |
+
super(SynchronizedBatchNorm3d, self)._check_input_dim(input)
|
core/sync_batchnorm/comm.py
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# File : comm.py
|
3 |
+
# Author : Jiayuan Mao
|
4 |
+
# Email : maojiayuan@gmail.com
|
5 |
+
# Date : 27/01/2018
|
6 |
+
#
|
7 |
+
# This file is part of Synchronized-BatchNorm-PyTorch.
|
8 |
+
# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch
|
9 |
+
# Distributed under MIT License.
|
10 |
+
|
11 |
+
import queue
|
12 |
+
import collections
|
13 |
+
import threading
|
14 |
+
|
15 |
+
__all__ = ['FutureResult', 'SlavePipe', 'SyncMaster']
|
16 |
+
|
17 |
+
|
18 |
+
class FutureResult(object):
|
19 |
+
"""A thread-safe future implementation. Used only as one-to-one pipe."""
|
20 |
+
|
21 |
+
def __init__(self):
|
22 |
+
self._result = None
|
23 |
+
self._lock = threading.Lock()
|
24 |
+
self._cond = threading.Condition(self._lock)
|
25 |
+
|
26 |
+
def put(self, result):
|
27 |
+
with self._lock:
|
28 |
+
assert self._result is None, 'Previous result has\'t been fetched.'
|
29 |
+
self._result = result
|
30 |
+
self._cond.notify()
|
31 |
+
|
32 |
+
def get(self):
|
33 |
+
with self._lock:
|
34 |
+
if self._result is None:
|
35 |
+
self._cond.wait()
|
36 |
+
|
37 |
+
res = self._result
|
38 |
+
self._result = None
|
39 |
+
return res
|
40 |
+
|
41 |
+
|
42 |
+
_MasterRegistry = collections.namedtuple('MasterRegistry', ['result'])
|
43 |
+
_SlavePipeBase = collections.namedtuple('_SlavePipeBase', ['identifier', 'queue', 'result'])
|
44 |
+
|
45 |
+
|
46 |
+
class SlavePipe(_SlavePipeBase):
|
47 |
+
"""Pipe for master-slave communication."""
|
48 |
+
|
49 |
+
def run_slave(self, msg):
|
50 |
+
self.queue.put((self.identifier, msg))
|
51 |
+
ret = self.result.get()
|
52 |
+
self.queue.put(True)
|
53 |
+
return ret
|
54 |
+
|
55 |
+
|
56 |
+
class SyncMaster(object):
|
57 |
+
"""An abstract `SyncMaster` object.
|
58 |
+
- During the replication, as the data parallel will trigger an callback of each module, all slave devices should
|
59 |
+
call `register(id)` and obtain an `SlavePipe` to communicate with the master.
|
60 |
+
- During the forward pass, master device invokes `run_master`, all messages from slave devices will be collected,
|
61 |
+
and passed to a registered callback.
|
62 |
+
- After receiving the messages, the master device should gather the information and determine to message passed
|
63 |
+
back to each slave devices.
|
64 |
+
"""
|
65 |
+
|
66 |
+
def __init__(self, master_callback):
|
67 |
+
"""
|
68 |
+
Args:
|
69 |
+
master_callback: a callback to be invoked after having collected messages from slave devices.
|
70 |
+
"""
|
71 |
+
self._master_callback = master_callback
|
72 |
+
self._queue = queue.Queue()
|
73 |
+
self._registry = collections.OrderedDict()
|
74 |
+
self._activated = False
|
75 |
+
|
76 |
+
def __getstate__(self):
|
77 |
+
return {'master_callback': self._master_callback}
|
78 |
+
|
79 |
+
def __setstate__(self, state):
|
80 |
+
self.__init__(state['master_callback'])
|
81 |
+
|
82 |
+
def register_slave(self, identifier):
|
83 |
+
"""
|
84 |
+
Register an slave device.
|
85 |
+
Args:
|
86 |
+
identifier: an identifier, usually is the device id.
|
87 |
+
Returns: a `SlavePipe` object which can be used to communicate with the master device.
|
88 |
+
"""
|
89 |
+
if self._activated:
|
90 |
+
assert self._queue.empty(), 'Queue is not clean before next initialization.'
|
91 |
+
self._activated = False
|
92 |
+
self._registry.clear()
|
93 |
+
future = FutureResult()
|
94 |
+
self._registry[identifier] = _MasterRegistry(future)
|
95 |
+
return SlavePipe(identifier, self._queue, future)
|
96 |
+
|
97 |
+
def run_master(self, master_msg):
|
98 |
+
"""
|
99 |
+
Main entry for the master device in each forward pass.
|
100 |
+
The messages were first collected from each devices (including the master device), and then
|
101 |
+
an callback will be invoked to compute the message to be sent back to each devices
|
102 |
+
(including the master device).
|
103 |
+
Args:
|
104 |
+
master_msg: the message that the master want to send to itself. This will be placed as the first
|
105 |
+
message when calling `master_callback`. For detailed usage, see `_SynchronizedBatchNorm` for an example.
|
106 |
+
Returns: the message to be sent back to the master device.
|
107 |
+
"""
|
108 |
+
self._activated = True
|
109 |
+
|
110 |
+
intermediates = [(0, master_msg)]
|
111 |
+
for i in range(self.nr_slaves):
|
112 |
+
intermediates.append(self._queue.get())
|
113 |
+
|
114 |
+
results = self._master_callback(intermediates)
|
115 |
+
assert results[0][0] == 0, 'The first result should belongs to the master.'
|
116 |
+
|
117 |
+
for i, res in results:
|
118 |
+
if i == 0:
|
119 |
+
continue
|
120 |
+
self._registry[i].result.put(res)
|
121 |
+
|
122 |
+
for i in range(self.nr_slaves):
|
123 |
+
assert self._queue.get() is True
|
124 |
+
|
125 |
+
return results[0][1]
|
126 |
+
|
127 |
+
@property
|
128 |
+
def nr_slaves(self):
|
129 |
+
return len(self._registry)
|
core/sync_batchnorm/replicate.py
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# File : replicate.py
|
3 |
+
# Author : Jiayuan Mao
|
4 |
+
# Email : maojiayuan@gmail.com
|
5 |
+
# Date : 27/01/2018
|
6 |
+
#
|
7 |
+
# This file is part of Synchronized-BatchNorm-PyTorch.
|
8 |
+
# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch
|
9 |
+
# Distributed under MIT License.
|
10 |
+
|
11 |
+
import functools
|
12 |
+
|
13 |
+
from torch.nn.parallel.data_parallel import DataParallel
|
14 |
+
|
15 |
+
__all__ = [
|
16 |
+
'CallbackContext',
|
17 |
+
'execute_replication_callbacks',
|
18 |
+
'DataParallelWithCallback',
|
19 |
+
'patch_replication_callback'
|
20 |
+
]
|
21 |
+
|
22 |
+
|
23 |
+
class CallbackContext(object):
|
24 |
+
pass
|
25 |
+
|
26 |
+
|
27 |
+
def execute_replication_callbacks(modules):
|
28 |
+
"""
|
29 |
+
Execute an replication callback `__data_parallel_replicate__` on each module created by original replication.
|
30 |
+
The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)`
|
31 |
+
Note that, as all modules are isomorphism, we assign each sub-module with a context
|
32 |
+
(shared among multiple copies of this module on different devices).
|
33 |
+
Through this context, different copies can share some information.
|
34 |
+
We guarantee that the callback on the master copy (the first copy) will be called ahead of calling the callback
|
35 |
+
of any slave copies.
|
36 |
+
"""
|
37 |
+
master_copy = modules[0]
|
38 |
+
nr_modules = len(list(master_copy.modules()))
|
39 |
+
ctxs = [CallbackContext() for _ in range(nr_modules)]
|
40 |
+
|
41 |
+
for i, module in enumerate(modules):
|
42 |
+
for j, m in enumerate(module.modules()):
|
43 |
+
if hasattr(m, '__data_parallel_replicate__'):
|
44 |
+
m.__data_parallel_replicate__(ctxs[j], i)
|
45 |
+
|
46 |
+
|
47 |
+
class DataParallelWithCallback(DataParallel):
|
48 |
+
"""
|
49 |
+
Data Parallel with a replication callback.
|
50 |
+
An replication callback `__data_parallel_replicate__` of each module will be invoked after being created by
|
51 |
+
original `replicate` function.
|
52 |
+
The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)`
|
53 |
+
Examples:
|
54 |
+
> sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False)
|
55 |
+
> sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1])
|
56 |
+
# sync_bn.__data_parallel_replicate__ will be invoked.
|
57 |
+
"""
|
58 |
+
|
59 |
+
def replicate(self, module, device_ids):
|
60 |
+
modules = super(DataParallelWithCallback, self).replicate(module, device_ids)
|
61 |
+
execute_replication_callbacks(modules)
|
62 |
+
return modules
|
63 |
+
|
64 |
+
|
65 |
+
def patch_replication_callback(data_parallel):
|
66 |
+
"""
|
67 |
+
Monkey-patch an existing `DataParallel` object. Add the replication callback.
|
68 |
+
Useful when you have customized `DataParallel` implementation.
|
69 |
+
Examples:
|
70 |
+
> sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False)
|
71 |
+
> sync_bn = DataParallel(sync_bn, device_ids=[0, 1])
|
72 |
+
> patch_replication_callback(sync_bn)
|
73 |
+
# this is equivalent to
|
74 |
+
> sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False)
|
75 |
+
> sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1])
|
76 |
+
"""
|
77 |
+
|
78 |
+
assert isinstance(data_parallel, DataParallel)
|
79 |
+
|
80 |
+
old_replicate = data_parallel.replicate
|
81 |
+
|
82 |
+
@functools.wraps(old_replicate)
|
83 |
+
def new_replicate(module, device_ids):
|
84 |
+
modules = old_replicate(module, device_ids)
|
85 |
+
execute_replication_callbacks(modules)
|
86 |
+
return modules
|
87 |
+
|
88 |
+
data_parallel.replicate = new_replicate
|
core/sync_batchnorm/unittest.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# File : unittest.py
|
3 |
+
# Author : Jiayuan Mao
|
4 |
+
# Email : maojiayuan@gmail.com
|
5 |
+
# Date : 27/01/2018
|
6 |
+
#
|
7 |
+
# This file is part of Synchronized-BatchNorm-PyTorch.
|
8 |
+
# https://github.com/vacancy/Synchronized-BatchNorm-PyTorch
|
9 |
+
# Distributed under MIT License.
|
10 |
+
|
11 |
+
import unittest
|
12 |
+
|
13 |
+
import numpy as np
|
14 |
+
from torch.autograd import Variable
|
15 |
+
|
16 |
+
|
17 |
+
def as_numpy(v):
|
18 |
+
if isinstance(v, Variable):
|
19 |
+
v = v.data
|
20 |
+
return v.cpu().numpy()
|
21 |
+
|
22 |
+
|
23 |
+
class TorchTestCase(unittest.TestCase):
|
24 |
+
def assertTensorClose(self, a, b, atol=1e-3, rtol=1e-3):
|
25 |
+
npa, npb = as_numpy(a), as_numpy(b)
|
26 |
+
self.assertTrue(
|
27 |
+
np.allclose(npa, npb, atol=atol),
|
28 |
+
'Tensor close check failed\n{}\n{}\nadiff={}, rdiff={}'.format(a, b, np.abs(npa - npb).max(), np.abs((npa - npb) / np.fmax(npa, 1e-5)).max())
|
29 |
+
)
|
data/VOC_2012.json
ADDED
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"train": {
|
3 |
+
"aeroplane": 837,
|
4 |
+
"person": 9734,
|
5 |
+
"tvmonitor": 790,
|
6 |
+
"dog": 1418,
|
7 |
+
"chair": 2794,
|
8 |
+
"bird": 1128,
|
9 |
+
"bottle": 1396,
|
10 |
+
"boat": 940,
|
11 |
+
"diningtable": 715,
|
12 |
+
"train": 609,
|
13 |
+
"motorbike": 689,
|
14 |
+
"horse": 696,
|
15 |
+
"cow": 633,
|
16 |
+
"bicycle": 712,
|
17 |
+
"car": 2235,
|
18 |
+
"cat": 1141,
|
19 |
+
"sofa": 732,
|
20 |
+
"bus": 569,
|
21 |
+
"pottedplant": 1024,
|
22 |
+
"sheep": 931
|
23 |
+
},
|
24 |
+
"validation": {
|
25 |
+
"aeroplane": 112,
|
26 |
+
"train": 93,
|
27 |
+
"boat": 108,
|
28 |
+
"bicycle": 103,
|
29 |
+
"person": 866,
|
30 |
+
"sheep": 153,
|
31 |
+
"tvmonitor": 98,
|
32 |
+
"horse": 104,
|
33 |
+
"bottle": 163,
|
34 |
+
"sofa": 106,
|
35 |
+
"chair": 245,
|
36 |
+
"cow": 132,
|
37 |
+
"car": 249,
|
38 |
+
"pottedplant": 171,
|
39 |
+
"bus": 116,
|
40 |
+
"diningtable": 82,
|
41 |
+
"dog": 150,
|
42 |
+
"bird": 140,
|
43 |
+
"cat": 132,
|
44 |
+
"motorbike": 103
|
45 |
+
},
|
46 |
+
"classes": 20,
|
47 |
+
"class_names": [
|
48 |
+
"aeroplane",
|
49 |
+
"bicycle",
|
50 |
+
"bird",
|
51 |
+
"boat",
|
52 |
+
"bottle",
|
53 |
+
"bus",
|
54 |
+
"car",
|
55 |
+
"cat",
|
56 |
+
"chair",
|
57 |
+
"cow",
|
58 |
+
"diningtable",
|
59 |
+
"dog",
|
60 |
+
"horse",
|
61 |
+
"motorbike",
|
62 |
+
"person",
|
63 |
+
"pottedplant",
|
64 |
+
"sheep",
|
65 |
+
"sofa",
|
66 |
+
"train",
|
67 |
+
"tvmonitor"
|
68 |
+
],
|
69 |
+
"class_dic": {
|
70 |
+
"aeroplane": 0,
|
71 |
+
"bicycle": 1,
|
72 |
+
"bird": 2,
|
73 |
+
"boat": 3,
|
74 |
+
"bottle": 4,
|
75 |
+
"bus": 5,
|
76 |
+
"car": 6,
|
77 |
+
"cat": 7,
|
78 |
+
"chair": 8,
|
79 |
+
"cow": 9,
|
80 |
+
"diningtable": 10,
|
81 |
+
"dog": 11,
|
82 |
+
"horse": 12,
|
83 |
+
"motorbike": 13,
|
84 |
+
"person": 14,
|
85 |
+
"pottedplant": 15,
|
86 |
+
"sheep": 16,
|
87 |
+
"sofa": 17,
|
88 |
+
"train": 18,
|
89 |
+
"tvmonitor": 19
|
90 |
+
},
|
91 |
+
"color_dict": {
|
92 |
+
"background": [
|
93 |
+
0,
|
94 |
+
0,
|
95 |
+
0
|
96 |
+
],
|
97 |
+
"aeroplane": [
|
98 |
+
128,
|
99 |
+
0,
|
100 |
+
0
|
101 |
+
],
|
102 |
+
"bicycle": [
|
103 |
+
0,
|
104 |
+
128,
|
105 |
+
0
|
106 |
+
],
|
107 |
+
"bird": [
|
108 |
+
128,
|
109 |
+
128,
|
110 |
+
0
|
111 |
+
],
|
112 |
+
"boat": [
|
113 |
+
0,
|
114 |
+
0,
|
115 |
+
128
|
116 |
+
],
|
117 |
+
"bottle": [
|
118 |
+
128,
|
119 |
+
0,
|
120 |
+
128
|
121 |
+
],
|
122 |
+
"bus": [
|
123 |
+
0,
|
124 |
+
128,
|
125 |
+
128
|
126 |
+
],
|
127 |
+
"car": [
|
128 |
+
128,
|
129 |
+
128,
|
130 |
+
128
|
131 |
+
],
|
132 |
+
"cat": [
|
133 |
+
64,
|
134 |
+
0,
|
135 |
+
0
|
136 |
+
],
|
137 |
+
"chair": [
|
138 |
+
192,
|
139 |
+
0,
|
140 |
+
0
|
141 |
+
],
|
142 |
+
"cow": [
|
143 |
+
64,
|
144 |
+
128,
|
145 |
+
0
|
146 |
+
],
|
147 |
+
"diningtable": [
|
148 |
+
192,
|
149 |
+
128,
|
150 |
+
0
|
151 |
+
],
|
152 |
+
"dog": [
|
153 |
+
64,
|
154 |
+
0,
|
155 |
+
128
|
156 |
+
],
|
157 |
+
"horse": [
|
158 |
+
192,
|
159 |
+
0,
|
160 |
+
128
|
161 |
+
],
|
162 |
+
"motorbike": [
|
163 |
+
64,
|
164 |
+
128,
|
165 |
+
128
|
166 |
+
],
|
167 |
+
"person": [
|
168 |
+
192,
|
169 |
+
128,
|
170 |
+
128
|
171 |
+
],
|
172 |
+
"pottedplant": [
|
173 |
+
0,
|
174 |
+
64,
|
175 |
+
0
|
176 |
+
],
|
177 |
+
"sheep": [
|
178 |
+
128,
|
179 |
+
64,
|
180 |
+
0
|
181 |
+
],
|
182 |
+
"sofa": [
|
183 |
+
0,
|
184 |
+
192,
|
185 |
+
0
|
186 |
+
],
|
187 |
+
"train": [
|
188 |
+
128,
|
189 |
+
192,
|
190 |
+
0
|
191 |
+
],
|
192 |
+
"tvmonitor": [
|
193 |
+
0,
|
194 |
+
64,
|
195 |
+
128
|
196 |
+
]
|
197 |
+
}
|
198 |
+
}
|
data/test.txt
ADDED
@@ -0,0 +1,1456 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2008_000006
|
2 |
+
2008_000011
|
3 |
+
2008_000012
|
4 |
+
2008_000018
|
5 |
+
2008_000024
|
6 |
+
2008_000030
|
7 |
+
2008_000031
|
8 |
+
2008_000046
|
9 |
+
2008_000047
|
10 |
+
2008_000048
|
11 |
+
2008_000057
|
12 |
+
2008_000058
|
13 |
+
2008_000068
|
14 |
+
2008_000072
|
15 |
+
2008_000079
|
16 |
+
2008_000081
|
17 |
+
2008_000083
|
18 |
+
2008_000088
|
19 |
+
2008_000094
|
20 |
+
2008_000101
|
21 |
+
2008_000104
|
22 |
+
2008_000106
|
23 |
+
2008_000108
|
24 |
+
2008_000110
|
25 |
+
2008_000111
|
26 |
+
2008_000126
|
27 |
+
2008_000127
|
28 |
+
2008_000129
|
29 |
+
2008_000130
|
30 |
+
2008_000135
|
31 |
+
2008_000150
|
32 |
+
2008_000152
|
33 |
+
2008_000156
|
34 |
+
2008_000159
|
35 |
+
2008_000160
|
36 |
+
2008_000161
|
37 |
+
2008_000166
|
38 |
+
2008_000167
|
39 |
+
2008_000168
|
40 |
+
2008_000169
|
41 |
+
2008_000171
|
42 |
+
2008_000175
|
43 |
+
2008_000178
|
44 |
+
2008_000186
|
45 |
+
2008_000198
|
46 |
+
2008_000206
|
47 |
+
2008_000208
|
48 |
+
2008_000209
|
49 |
+
2008_000211
|
50 |
+
2008_000220
|
51 |
+
2008_000224
|
52 |
+
2008_000230
|
53 |
+
2008_000240
|
54 |
+
2008_000248
|
55 |
+
2008_000249
|
56 |
+
2008_000250
|
57 |
+
2008_000256
|
58 |
+
2008_000279
|
59 |
+
2008_000282
|
60 |
+
2008_000285
|
61 |
+
2008_000286
|
62 |
+
2008_000296
|
63 |
+
2008_000300
|
64 |
+
2008_000322
|
65 |
+
2008_000324
|
66 |
+
2008_000337
|
67 |
+
2008_000366
|
68 |
+
2008_000369
|
69 |
+
2008_000377
|
70 |
+
2008_000384
|
71 |
+
2008_000390
|
72 |
+
2008_000404
|
73 |
+
2008_000411
|
74 |
+
2008_000434
|
75 |
+
2008_000440
|
76 |
+
2008_000460
|
77 |
+
2008_000467
|
78 |
+
2008_000478
|
79 |
+
2008_000485
|
80 |
+
2008_000487
|
81 |
+
2008_000490
|
82 |
+
2008_000503
|
83 |
+
2008_000504
|
84 |
+
2008_000507
|
85 |
+
2008_000513
|
86 |
+
2008_000523
|
87 |
+
2008_000529
|
88 |
+
2008_000556
|
89 |
+
2008_000565
|
90 |
+
2008_000580
|
91 |
+
2008_000590
|
92 |
+
2008_000596
|
93 |
+
2008_000597
|
94 |
+
2008_000600
|
95 |
+
2008_000603
|
96 |
+
2008_000604
|
97 |
+
2008_000612
|
98 |
+
2008_000617
|
99 |
+
2008_000621
|
100 |
+
2008_000627
|
101 |
+
2008_000633
|
102 |
+
2008_000643
|
103 |
+
2008_000644
|
104 |
+
2008_000649
|
105 |
+
2008_000651
|
106 |
+
2008_000664
|
107 |
+
2008_000665
|
108 |
+
2008_000680
|
109 |
+
2008_000681
|
110 |
+
2008_000684
|
111 |
+
2008_000685
|
112 |
+
2008_000688
|
113 |
+
2008_000693
|
114 |
+
2008_000698
|
115 |
+
2008_000707
|
116 |
+
2008_000709
|
117 |
+
2008_000712
|
118 |
+
2008_000747
|
119 |
+
2008_000751
|
120 |
+
2008_000754
|
121 |
+
2008_000762
|
122 |
+
2008_000767
|
123 |
+
2008_000768
|
124 |
+
2008_000773
|
125 |
+
2008_000774
|
126 |
+
2008_000779
|
127 |
+
2008_000797
|
128 |
+
2008_000813
|
129 |
+
2008_000816
|
130 |
+
2008_000846
|
131 |
+
2008_000866
|
132 |
+
2008_000871
|
133 |
+
2008_000872
|
134 |
+
2008_000891
|
135 |
+
2008_000892
|
136 |
+
2008_000894
|
137 |
+
2008_000896
|
138 |
+
2008_000898
|
139 |
+
2008_000909
|
140 |
+
2008_000913
|
141 |
+
2008_000920
|
142 |
+
2008_000933
|
143 |
+
2008_000935
|
144 |
+
2008_000937
|
145 |
+
2008_000938
|
146 |
+
2008_000954
|
147 |
+
2008_000958
|
148 |
+
2008_000963
|
149 |
+
2008_000967
|
150 |
+
2008_000974
|
151 |
+
2008_000986
|
152 |
+
2008_000994
|
153 |
+
2008_000995
|
154 |
+
2008_001008
|
155 |
+
2008_001010
|
156 |
+
2008_001014
|
157 |
+
2008_001016
|
158 |
+
2008_001025
|
159 |
+
2008_001029
|
160 |
+
2008_001037
|
161 |
+
2008_001059
|
162 |
+
2008_001061
|
163 |
+
2008_001072
|
164 |
+
2008_001124
|
165 |
+
2008_001126
|
166 |
+
2008_001131
|
167 |
+
2008_001138
|
168 |
+
2008_001144
|
169 |
+
2008_001151
|
170 |
+
2008_001156
|
171 |
+
2008_001179
|
172 |
+
2008_001181
|
173 |
+
2008_001184
|
174 |
+
2008_001186
|
175 |
+
2008_001197
|
176 |
+
2008_001207
|
177 |
+
2008_001212
|
178 |
+
2008_001233
|
179 |
+
2008_001234
|
180 |
+
2008_001258
|
181 |
+
2008_001268
|
182 |
+
2008_001279
|
183 |
+
2008_001281
|
184 |
+
2008_001288
|
185 |
+
2008_001291
|
186 |
+
2008_001298
|
187 |
+
2008_001309
|
188 |
+
2008_001315
|
189 |
+
2008_001316
|
190 |
+
2008_001319
|
191 |
+
2008_001327
|
192 |
+
2008_001328
|
193 |
+
2008_001332
|
194 |
+
2008_001341
|
195 |
+
2008_001347
|
196 |
+
2008_001355
|
197 |
+
2008_001378
|
198 |
+
2008_001386
|
199 |
+
2008_001400
|
200 |
+
2008_001409
|
201 |
+
2008_001411
|
202 |
+
2008_001416
|
203 |
+
2008_001418
|
204 |
+
2008_001435
|
205 |
+
2008_001459
|
206 |
+
2008_001469
|
207 |
+
2008_001474
|
208 |
+
2008_001477
|
209 |
+
2008_001483
|
210 |
+
2008_001484
|
211 |
+
2008_001485
|
212 |
+
2008_001496
|
213 |
+
2008_001507
|
214 |
+
2008_001511
|
215 |
+
2008_001519
|
216 |
+
2008_001557
|
217 |
+
2008_001567
|
218 |
+
2008_001570
|
219 |
+
2008_001571
|
220 |
+
2008_001572
|
221 |
+
2008_001579
|
222 |
+
2008_001587
|
223 |
+
2008_001608
|
224 |
+
2008_001611
|
225 |
+
2008_001614
|
226 |
+
2008_001621
|
227 |
+
2008_001639
|
228 |
+
2008_001658
|
229 |
+
2008_001678
|
230 |
+
2008_001700
|
231 |
+
2008_001713
|
232 |
+
2008_001720
|
233 |
+
2008_001755
|
234 |
+
2008_001779
|
235 |
+
2008_001785
|
236 |
+
2008_001793
|
237 |
+
2008_001794
|
238 |
+
2008_001803
|
239 |
+
2008_001818
|
240 |
+
2008_001848
|
241 |
+
2008_001855
|
242 |
+
2008_001857
|
243 |
+
2008_001861
|
244 |
+
2008_001875
|
245 |
+
2008_001878
|
246 |
+
2008_001886
|
247 |
+
2008_001897
|
248 |
+
2008_001916
|
249 |
+
2008_001925
|
250 |
+
2008_001949
|
251 |
+
2008_001953
|
252 |
+
2008_001972
|
253 |
+
2008_001999
|
254 |
+
2008_002027
|
255 |
+
2008_002040
|
256 |
+
2008_002057
|
257 |
+
2008_002070
|
258 |
+
2008_002075
|
259 |
+
2008_002095
|
260 |
+
2008_002104
|
261 |
+
2008_002105
|
262 |
+
2008_002106
|
263 |
+
2008_002136
|
264 |
+
2008_002137
|
265 |
+
2008_002147
|
266 |
+
2008_002149
|
267 |
+
2008_002163
|
268 |
+
2008_002173
|
269 |
+
2008_002174
|
270 |
+
2008_002184
|
271 |
+
2008_002186
|
272 |
+
2008_002188
|
273 |
+
2008_002190
|
274 |
+
2008_002203
|
275 |
+
2008_002211
|
276 |
+
2008_002217
|
277 |
+
2008_002228
|
278 |
+
2008_002233
|
279 |
+
2008_002246
|
280 |
+
2008_002257
|
281 |
+
2008_002261
|
282 |
+
2008_002285
|
283 |
+
2008_002287
|
284 |
+
2008_002295
|
285 |
+
2008_002303
|
286 |
+
2008_002306
|
287 |
+
2008_002309
|
288 |
+
2008_002310
|
289 |
+
2008_002318
|
290 |
+
2008_002320
|
291 |
+
2008_002332
|
292 |
+
2008_002337
|
293 |
+
2008_002345
|
294 |
+
2008_002348
|
295 |
+
2008_002352
|
296 |
+
2008_002360
|
297 |
+
2008_002381
|
298 |
+
2008_002387
|
299 |
+
2008_002388
|
300 |
+
2008_002393
|
301 |
+
2008_002406
|
302 |
+
2008_002440
|
303 |
+
2008_002455
|
304 |
+
2008_002460
|
305 |
+
2008_002462
|
306 |
+
2008_002480
|
307 |
+
2008_002518
|
308 |
+
2008_002525
|
309 |
+
2008_002535
|
310 |
+
2008_002544
|
311 |
+
2008_002553
|
312 |
+
2008_002569
|
313 |
+
2008_002572
|
314 |
+
2008_002587
|
315 |
+
2008_002635
|
316 |
+
2008_002655
|
317 |
+
2008_002695
|
318 |
+
2008_002702
|
319 |
+
2008_002706
|
320 |
+
2008_002707
|
321 |
+
2008_002722
|
322 |
+
2008_002745
|
323 |
+
2008_002757
|
324 |
+
2008_002779
|
325 |
+
2008_002805
|
326 |
+
2008_002871
|
327 |
+
2008_002895
|
328 |
+
2008_002905
|
329 |
+
2008_002923
|
330 |
+
2008_002927
|
331 |
+
2008_002939
|
332 |
+
2008_002941
|
333 |
+
2008_002962
|
334 |
+
2008_002975
|
335 |
+
2008_003000
|
336 |
+
2008_003031
|
337 |
+
2008_003038
|
338 |
+
2008_003042
|
339 |
+
2008_003069
|
340 |
+
2008_003070
|
341 |
+
2008_003115
|
342 |
+
2008_003116
|
343 |
+
2008_003130
|
344 |
+
2008_003137
|
345 |
+
2008_003138
|
346 |
+
2008_003139
|
347 |
+
2008_003165
|
348 |
+
2008_003171
|
349 |
+
2008_003176
|
350 |
+
2008_003192
|
351 |
+
2008_003194
|
352 |
+
2008_003195
|
353 |
+
2008_003198
|
354 |
+
2008_003227
|
355 |
+
2008_003247
|
356 |
+
2008_003262
|
357 |
+
2008_003298
|
358 |
+
2008_003299
|
359 |
+
2008_003307
|
360 |
+
2008_003337
|
361 |
+
2008_003353
|
362 |
+
2008_003355
|
363 |
+
2008_003363
|
364 |
+
2008_003383
|
365 |
+
2008_003389
|
366 |
+
2008_003392
|
367 |
+
2008_003399
|
368 |
+
2008_003436
|
369 |
+
2008_003457
|
370 |
+
2008_003465
|
371 |
+
2008_003481
|
372 |
+
2008_003539
|
373 |
+
2008_003548
|
374 |
+
2008_003550
|
375 |
+
2008_003567
|
376 |
+
2008_003568
|
377 |
+
2008_003606
|
378 |
+
2008_003615
|
379 |
+
2008_003654
|
380 |
+
2008_003670
|
381 |
+
2008_003700
|
382 |
+
2008_003705
|
383 |
+
2008_003727
|
384 |
+
2008_003731
|
385 |
+
2008_003734
|
386 |
+
2008_003760
|
387 |
+
2008_003804
|
388 |
+
2008_003807
|
389 |
+
2008_003810
|
390 |
+
2008_003822
|
391 |
+
2008_003833
|
392 |
+
2008_003877
|
393 |
+
2008_003879
|
394 |
+
2008_003895
|
395 |
+
2008_003901
|
396 |
+
2008_003903
|
397 |
+
2008_003911
|
398 |
+
2008_003919
|
399 |
+
2008_003927
|
400 |
+
2008_003937
|
401 |
+
2008_003946
|
402 |
+
2008_003950
|
403 |
+
2008_003955
|
404 |
+
2008_003981
|
405 |
+
2008_003991
|
406 |
+
2008_004009
|
407 |
+
2008_004039
|
408 |
+
2008_004052
|
409 |
+
2008_004063
|
410 |
+
2008_004070
|
411 |
+
2008_004078
|
412 |
+
2008_004104
|
413 |
+
2008_004139
|
414 |
+
2008_004177
|
415 |
+
2008_004181
|
416 |
+
2008_004200
|
417 |
+
2008_004219
|
418 |
+
2008_004236
|
419 |
+
2008_004250
|
420 |
+
2008_004266
|
421 |
+
2008_004299
|
422 |
+
2008_004320
|
423 |
+
2008_004334
|
424 |
+
2008_004343
|
425 |
+
2008_004349
|
426 |
+
2008_004366
|
427 |
+
2008_004386
|
428 |
+
2008_004401
|
429 |
+
2008_004423
|
430 |
+
2008_004448
|
431 |
+
2008_004481
|
432 |
+
2008_004516
|
433 |
+
2008_004536
|
434 |
+
2008_004582
|
435 |
+
2008_004609
|
436 |
+
2008_004638
|
437 |
+
2008_004642
|
438 |
+
2008_004644
|
439 |
+
2008_004669
|
440 |
+
2008_004673
|
441 |
+
2008_004691
|
442 |
+
2008_004693
|
443 |
+
2008_004709
|
444 |
+
2008_004715
|
445 |
+
2008_004757
|
446 |
+
2008_004775
|
447 |
+
2008_004782
|
448 |
+
2008_004785
|
449 |
+
2008_004798
|
450 |
+
2008_004848
|
451 |
+
2008_004861
|
452 |
+
2008_004870
|
453 |
+
2008_004877
|
454 |
+
2008_004884
|
455 |
+
2008_004891
|
456 |
+
2008_004901
|
457 |
+
2008_004919
|
458 |
+
2008_005058
|
459 |
+
2008_005069
|
460 |
+
2008_005086
|
461 |
+
2008_005087
|
462 |
+
2008_005112
|
463 |
+
2008_005113
|
464 |
+
2008_005118
|
465 |
+
2008_005128
|
466 |
+
2008_005129
|
467 |
+
2008_005153
|
468 |
+
2008_005161
|
469 |
+
2008_005162
|
470 |
+
2008_005165
|
471 |
+
2008_005187
|
472 |
+
2008_005227
|
473 |
+
2008_005308
|
474 |
+
2008_005318
|
475 |
+
2008_005320
|
476 |
+
2008_005351
|
477 |
+
2008_005372
|
478 |
+
2008_005383
|
479 |
+
2008_005391
|
480 |
+
2008_005407
|
481 |
+
2008_005420
|
482 |
+
2008_005440
|
483 |
+
2008_005487
|
484 |
+
2008_005493
|
485 |
+
2008_005520
|
486 |
+
2008_005551
|
487 |
+
2008_005556
|
488 |
+
2008_005576
|
489 |
+
2008_005578
|
490 |
+
2008_005594
|
491 |
+
2008_005619
|
492 |
+
2008_005629
|
493 |
+
2008_005644
|
494 |
+
2008_005645
|
495 |
+
2008_005651
|
496 |
+
2008_005661
|
497 |
+
2008_005662
|
498 |
+
2008_005667
|
499 |
+
2008_005694
|
500 |
+
2008_005697
|
501 |
+
2008_005709
|
502 |
+
2008_005710
|
503 |
+
2008_005733
|
504 |
+
2008_005749
|
505 |
+
2008_005753
|
506 |
+
2008_005771
|
507 |
+
2008_005781
|
508 |
+
2008_005793
|
509 |
+
2008_005802
|
510 |
+
2008_005833
|
511 |
+
2008_005844
|
512 |
+
2008_005908
|
513 |
+
2008_005931
|
514 |
+
2008_005952
|
515 |
+
2008_006016
|
516 |
+
2008_006030
|
517 |
+
2008_006033
|
518 |
+
2008_006054
|
519 |
+
2008_006073
|
520 |
+
2008_006091
|
521 |
+
2008_006142
|
522 |
+
2008_006150
|
523 |
+
2008_006206
|
524 |
+
2008_006217
|
525 |
+
2008_006264
|
526 |
+
2008_006283
|
527 |
+
2008_006308
|
528 |
+
2008_006313
|
529 |
+
2008_006333
|
530 |
+
2008_006343
|
531 |
+
2008_006381
|
532 |
+
2008_006391
|
533 |
+
2008_006423
|
534 |
+
2008_006428
|
535 |
+
2008_006440
|
536 |
+
2008_006444
|
537 |
+
2008_006473
|
538 |
+
2008_006505
|
539 |
+
2008_006531
|
540 |
+
2008_006560
|
541 |
+
2008_006571
|
542 |
+
2008_006582
|
543 |
+
2008_006594
|
544 |
+
2008_006601
|
545 |
+
2008_006633
|
546 |
+
2008_006653
|
547 |
+
2008_006678
|
548 |
+
2008_006755
|
549 |
+
2008_006772
|
550 |
+
2008_006788
|
551 |
+
2008_006799
|
552 |
+
2008_006809
|
553 |
+
2008_006838
|
554 |
+
2008_006845
|
555 |
+
2008_006852
|
556 |
+
2008_006894
|
557 |
+
2008_006905
|
558 |
+
2008_006947
|
559 |
+
2008_006983
|
560 |
+
2008_007049
|
561 |
+
2008_007065
|
562 |
+
2008_007068
|
563 |
+
2008_007111
|
564 |
+
2008_007148
|
565 |
+
2008_007159
|
566 |
+
2008_007193
|
567 |
+
2008_007228
|
568 |
+
2008_007235
|
569 |
+
2008_007249
|
570 |
+
2008_007255
|
571 |
+
2008_007268
|
572 |
+
2008_007275
|
573 |
+
2008_007292
|
574 |
+
2008_007299
|
575 |
+
2008_007306
|
576 |
+
2008_007316
|
577 |
+
2008_007400
|
578 |
+
2008_007401
|
579 |
+
2008_007419
|
580 |
+
2008_007437
|
581 |
+
2008_007483
|
582 |
+
2008_007487
|
583 |
+
2008_007520
|
584 |
+
2008_007551
|
585 |
+
2008_007603
|
586 |
+
2008_007616
|
587 |
+
2008_007654
|
588 |
+
2008_007663
|
589 |
+
2008_007708
|
590 |
+
2008_007795
|
591 |
+
2008_007801
|
592 |
+
2008_007859
|
593 |
+
2008_007903
|
594 |
+
2008_007920
|
595 |
+
2008_007926
|
596 |
+
2008_008014
|
597 |
+
2008_008017
|
598 |
+
2008_008060
|
599 |
+
2008_008077
|
600 |
+
2008_008107
|
601 |
+
2008_008108
|
602 |
+
2008_008119
|
603 |
+
2008_008126
|
604 |
+
2008_008133
|
605 |
+
2008_008144
|
606 |
+
2008_008216
|
607 |
+
2008_008244
|
608 |
+
2008_008248
|
609 |
+
2008_008250
|
610 |
+
2008_008260
|
611 |
+
2008_008277
|
612 |
+
2008_008280
|
613 |
+
2008_008290
|
614 |
+
2008_008304
|
615 |
+
2008_008340
|
616 |
+
2008_008371
|
617 |
+
2008_008390
|
618 |
+
2008_008397
|
619 |
+
2008_008409
|
620 |
+
2008_008412
|
621 |
+
2008_008419
|
622 |
+
2008_008454
|
623 |
+
2008_008491
|
624 |
+
2008_008498
|
625 |
+
2008_008565
|
626 |
+
2008_008599
|
627 |
+
2008_008603
|
628 |
+
2008_008631
|
629 |
+
2008_008634
|
630 |
+
2008_008640
|
631 |
+
2008_008646
|
632 |
+
2008_008660
|
633 |
+
2008_008663
|
634 |
+
2008_008664
|
635 |
+
2008_008709
|
636 |
+
2008_008720
|
637 |
+
2008_008747
|
638 |
+
2008_008768
|
639 |
+
2009_000004
|
640 |
+
2009_000019
|
641 |
+
2009_000024
|
642 |
+
2009_000025
|
643 |
+
2009_000053
|
644 |
+
2009_000076
|
645 |
+
2009_000107
|
646 |
+
2009_000110
|
647 |
+
2009_000115
|
648 |
+
2009_000117
|
649 |
+
2009_000175
|
650 |
+
2009_000220
|
651 |
+
2009_000259
|
652 |
+
2009_000275
|
653 |
+
2009_000314
|
654 |
+
2009_000368
|
655 |
+
2009_000373
|
656 |
+
2009_000384
|
657 |
+
2009_000388
|
658 |
+
2009_000423
|
659 |
+
2009_000433
|
660 |
+
2009_000434
|
661 |
+
2009_000458
|
662 |
+
2009_000475
|
663 |
+
2009_000481
|
664 |
+
2009_000495
|
665 |
+
2009_000514
|
666 |
+
2009_000555
|
667 |
+
2009_000556
|
668 |
+
2009_000561
|
669 |
+
2009_000571
|
670 |
+
2009_000581
|
671 |
+
2009_000605
|
672 |
+
2009_000609
|
673 |
+
2009_000644
|
674 |
+
2009_000654
|
675 |
+
2009_000671
|
676 |
+
2009_000733
|
677 |
+
2009_000740
|
678 |
+
2009_000766
|
679 |
+
2009_000775
|
680 |
+
2009_000776
|
681 |
+
2009_000795
|
682 |
+
2009_000850
|
683 |
+
2009_000881
|
684 |
+
2009_000900
|
685 |
+
2009_000914
|
686 |
+
2009_000941
|
687 |
+
2009_000977
|
688 |
+
2009_000984
|
689 |
+
2009_000986
|
690 |
+
2009_001005
|
691 |
+
2009_001015
|
692 |
+
2009_001058
|
693 |
+
2009_001072
|
694 |
+
2009_001087
|
695 |
+
2009_001092
|
696 |
+
2009_001109
|
697 |
+
2009_001114
|
698 |
+
2009_001115
|
699 |
+
2009_001141
|
700 |
+
2009_001174
|
701 |
+
2009_001175
|
702 |
+
2009_001182
|
703 |
+
2009_001222
|
704 |
+
2009_001228
|
705 |
+
2009_001246
|
706 |
+
2009_001262
|
707 |
+
2009_001274
|
708 |
+
2009_001284
|
709 |
+
2009_001297
|
710 |
+
2009_001331
|
711 |
+
2009_001336
|
712 |
+
2009_001337
|
713 |
+
2009_001379
|
714 |
+
2009_001392
|
715 |
+
2009_001451
|
716 |
+
2009_001485
|
717 |
+
2009_001488
|
718 |
+
2009_001497
|
719 |
+
2009_001504
|
720 |
+
2009_001506
|
721 |
+
2009_001573
|
722 |
+
2009_001576
|
723 |
+
2009_001603
|
724 |
+
2009_001613
|
725 |
+
2009_001652
|
726 |
+
2009_001661
|
727 |
+
2009_001668
|
728 |
+
2009_001680
|
729 |
+
2009_001688
|
730 |
+
2009_001697
|
731 |
+
2009_001729
|
732 |
+
2009_001771
|
733 |
+
2009_001785
|
734 |
+
2009_001793
|
735 |
+
2009_001814
|
736 |
+
2009_001866
|
737 |
+
2009_001872
|
738 |
+
2009_001880
|
739 |
+
2009_001883
|
740 |
+
2009_001891
|
741 |
+
2009_001913
|
742 |
+
2009_001938
|
743 |
+
2009_001946
|
744 |
+
2009_001953
|
745 |
+
2009_001969
|
746 |
+
2009_001978
|
747 |
+
2009_001995
|
748 |
+
2009_002007
|
749 |
+
2009_002036
|
750 |
+
2009_002041
|
751 |
+
2009_002049
|
752 |
+
2009_002051
|
753 |
+
2009_002062
|
754 |
+
2009_002063
|
755 |
+
2009_002067
|
756 |
+
2009_002085
|
757 |
+
2009_002092
|
758 |
+
2009_002114
|
759 |
+
2009_002115
|
760 |
+
2009_002142
|
761 |
+
2009_002148
|
762 |
+
2009_002157
|
763 |
+
2009_002181
|
764 |
+
2009_002220
|
765 |
+
2009_002284
|
766 |
+
2009_002287
|
767 |
+
2009_002300
|
768 |
+
2009_002310
|
769 |
+
2009_002315
|
770 |
+
2009_002334
|
771 |
+
2009_002337
|
772 |
+
2009_002354
|
773 |
+
2009_002357
|
774 |
+
2009_002411
|
775 |
+
2009_002426
|
776 |
+
2009_002458
|
777 |
+
2009_002459
|
778 |
+
2009_002461
|
779 |
+
2009_002466
|
780 |
+
2009_002481
|
781 |
+
2009_002483
|
782 |
+
2009_002503
|
783 |
+
2009_002581
|
784 |
+
2009_002583
|
785 |
+
2009_002589
|
786 |
+
2009_002600
|
787 |
+
2009_002601
|
788 |
+
2009_002602
|
789 |
+
2009_002641
|
790 |
+
2009_002646
|
791 |
+
2009_002656
|
792 |
+
2009_002666
|
793 |
+
2009_002720
|
794 |
+
2009_002767
|
795 |
+
2009_002768
|
796 |
+
2009_002794
|
797 |
+
2009_002821
|
798 |
+
2009_002825
|
799 |
+
2009_002839
|
800 |
+
2009_002840
|
801 |
+
2009_002859
|
802 |
+
2009_002860
|
803 |
+
2009_002881
|
804 |
+
2009_002889
|
805 |
+
2009_002892
|
806 |
+
2009_002895
|
807 |
+
2009_002896
|
808 |
+
2009_002900
|
809 |
+
2009_002924
|
810 |
+
2009_002966
|
811 |
+
2009_002973
|
812 |
+
2009_002981
|
813 |
+
2009_003004
|
814 |
+
2009_003021
|
815 |
+
2009_003028
|
816 |
+
2009_003037
|
817 |
+
2009_003038
|
818 |
+
2009_003055
|
819 |
+
2009_003085
|
820 |
+
2009_003100
|
821 |
+
2009_003106
|
822 |
+
2009_003117
|
823 |
+
2009_003139
|
824 |
+
2009_003170
|
825 |
+
2009_003179
|
826 |
+
2009_003184
|
827 |
+
2009_003186
|
828 |
+
2009_003190
|
829 |
+
2009_003221
|
830 |
+
2009_003236
|
831 |
+
2009_003242
|
832 |
+
2009_003244
|
833 |
+
2009_003260
|
834 |
+
2009_003264
|
835 |
+
2009_003274
|
836 |
+
2009_003283
|
837 |
+
2009_003296
|
838 |
+
2009_003332
|
839 |
+
2009_003341
|
840 |
+
2009_003354
|
841 |
+
2009_003370
|
842 |
+
2009_003371
|
843 |
+
2009_003374
|
844 |
+
2009_003391
|
845 |
+
2009_003393
|
846 |
+
2009_003404
|
847 |
+
2009_003405
|
848 |
+
2009_003414
|
849 |
+
2009_003428
|
850 |
+
2009_003470
|
851 |
+
2009_003474
|
852 |
+
2009_003532
|
853 |
+
2009_003536
|
854 |
+
2009_003578
|
855 |
+
2009_003580
|
856 |
+
2009_003620
|
857 |
+
2009_003621
|
858 |
+
2009_003680
|
859 |
+
2009_003699
|
860 |
+
2009_003727
|
861 |
+
2009_003737
|
862 |
+
2009_003780
|
863 |
+
2009_003811
|
864 |
+
2009_003824
|
865 |
+
2009_003831
|
866 |
+
2009_003844
|
867 |
+
2009_003850
|
868 |
+
2009_003851
|
869 |
+
2009_003864
|
870 |
+
2009_003868
|
871 |
+
2009_003869
|
872 |
+
2009_003893
|
873 |
+
2009_003909
|
874 |
+
2009_003924
|
875 |
+
2009_003925
|
876 |
+
2009_003960
|
877 |
+
2009_003979
|
878 |
+
2009_003990
|
879 |
+
2009_003997
|
880 |
+
2009_004006
|
881 |
+
2009_004010
|
882 |
+
2009_004066
|
883 |
+
2009_004077
|
884 |
+
2009_004081
|
885 |
+
2009_004097
|
886 |
+
2009_004098
|
887 |
+
2009_004136
|
888 |
+
2009_004216
|
889 |
+
2009_004220
|
890 |
+
2009_004266
|
891 |
+
2009_004269
|
892 |
+
2009_004286
|
893 |
+
2009_004296
|
894 |
+
2009_004321
|
895 |
+
2009_004342
|
896 |
+
2009_004343
|
897 |
+
2009_004344
|
898 |
+
2009_004385
|
899 |
+
2009_004408
|
900 |
+
2009_004420
|
901 |
+
2009_004441
|
902 |
+
2009_004447
|
903 |
+
2009_004461
|
904 |
+
2009_004467
|
905 |
+
2009_004485
|
906 |
+
2009_004488
|
907 |
+
2009_004516
|
908 |
+
2009_004521
|
909 |
+
2009_004544
|
910 |
+
2009_004596
|
911 |
+
2009_004613
|
912 |
+
2009_004615
|
913 |
+
2009_004618
|
914 |
+
2009_004621
|
915 |
+
2009_004646
|
916 |
+
2009_004659
|
917 |
+
2009_004663
|
918 |
+
2009_004666
|
919 |
+
2009_004691
|
920 |
+
2009_004715
|
921 |
+
2009_004726
|
922 |
+
2009_004753
|
923 |
+
2009_004776
|
924 |
+
2009_004811
|
925 |
+
2009_004814
|
926 |
+
2009_004818
|
927 |
+
2009_004835
|
928 |
+
2009_004863
|
929 |
+
2009_004894
|
930 |
+
2009_004909
|
931 |
+
2009_004928
|
932 |
+
2009_004937
|
933 |
+
2009_004954
|
934 |
+
2009_004966
|
935 |
+
2009_004970
|
936 |
+
2009_004976
|
937 |
+
2009_005004
|
938 |
+
2009_005011
|
939 |
+
2009_005053
|
940 |
+
2009_005072
|
941 |
+
2009_005115
|
942 |
+
2009_005146
|
943 |
+
2009_005151
|
944 |
+
2009_005164
|
945 |
+
2009_005179
|
946 |
+
2009_005224
|
947 |
+
2009_005243
|
948 |
+
2009_005249
|
949 |
+
2009_005252
|
950 |
+
2009_005254
|
951 |
+
2009_005258
|
952 |
+
2009_005264
|
953 |
+
2009_005266
|
954 |
+
2009_005276
|
955 |
+
2009_005290
|
956 |
+
2009_005295
|
957 |
+
2010_000004
|
958 |
+
2010_000005
|
959 |
+
2010_000006
|
960 |
+
2010_000032
|
961 |
+
2010_000062
|
962 |
+
2010_000093
|
963 |
+
2010_000094
|
964 |
+
2010_000161
|
965 |
+
2010_000176
|
966 |
+
2010_000223
|
967 |
+
2010_000226
|
968 |
+
2010_000236
|
969 |
+
2010_000239
|
970 |
+
2010_000287
|
971 |
+
2010_000300
|
972 |
+
2010_000301
|
973 |
+
2010_000328
|
974 |
+
2010_000378
|
975 |
+
2010_000405
|
976 |
+
2010_000407
|
977 |
+
2010_000472
|
978 |
+
2010_000479
|
979 |
+
2010_000491
|
980 |
+
2010_000533
|
981 |
+
2010_000535
|
982 |
+
2010_000542
|
983 |
+
2010_000554
|
984 |
+
2010_000580
|
985 |
+
2010_000594
|
986 |
+
2010_000596
|
987 |
+
2010_000599
|
988 |
+
2010_000606
|
989 |
+
2010_000615
|
990 |
+
2010_000654
|
991 |
+
2010_000659
|
992 |
+
2010_000693
|
993 |
+
2010_000698
|
994 |
+
2010_000730
|
995 |
+
2010_000734
|
996 |
+
2010_000741
|
997 |
+
2010_000755
|
998 |
+
2010_000768
|
999 |
+
2010_000794
|
1000 |
+
2010_000813
|
1001 |
+
2010_000817
|
1002 |
+
2010_000834
|
1003 |
+
2010_000839
|
1004 |
+
2010_000848
|
1005 |
+
2010_000881
|
1006 |
+
2010_000888
|
1007 |
+
2010_000900
|
1008 |
+
2010_000903
|
1009 |
+
2010_000924
|
1010 |
+
2010_000946
|
1011 |
+
2010_000953
|
1012 |
+
2010_000957
|
1013 |
+
2010_000967
|
1014 |
+
2010_000992
|
1015 |
+
2010_000998
|
1016 |
+
2010_001053
|
1017 |
+
2010_001067
|
1018 |
+
2010_001114
|
1019 |
+
2010_001132
|
1020 |
+
2010_001138
|
1021 |
+
2010_001169
|
1022 |
+
2010_001171
|
1023 |
+
2010_001228
|
1024 |
+
2010_001260
|
1025 |
+
2010_001268
|
1026 |
+
2010_001280
|
1027 |
+
2010_001298
|
1028 |
+
2010_001302
|
1029 |
+
2010_001308
|
1030 |
+
2010_001324
|
1031 |
+
2010_001332
|
1032 |
+
2010_001335
|
1033 |
+
2010_001345
|
1034 |
+
2010_001346
|
1035 |
+
2010_001349
|
1036 |
+
2010_001373
|
1037 |
+
2010_001381
|
1038 |
+
2010_001392
|
1039 |
+
2010_001396
|
1040 |
+
2010_001420
|
1041 |
+
2010_001500
|
1042 |
+
2010_001506
|
1043 |
+
2010_001521
|
1044 |
+
2010_001532
|
1045 |
+
2010_001558
|
1046 |
+
2010_001598
|
1047 |
+
2010_001611
|
1048 |
+
2010_001631
|
1049 |
+
2010_001639
|
1050 |
+
2010_001651
|
1051 |
+
2010_001663
|
1052 |
+
2010_001664
|
1053 |
+
2010_001728
|
1054 |
+
2010_001778
|
1055 |
+
2010_001861
|
1056 |
+
2010_001874
|
1057 |
+
2010_001900
|
1058 |
+
2010_001905
|
1059 |
+
2010_001969
|
1060 |
+
2010_002008
|
1061 |
+
2010_002014
|
1062 |
+
2010_002049
|
1063 |
+
2010_002052
|
1064 |
+
2010_002091
|
1065 |
+
2010_002115
|
1066 |
+
2010_002119
|
1067 |
+
2010_002134
|
1068 |
+
2010_002156
|
1069 |
+
2010_002160
|
1070 |
+
2010_002186
|
1071 |
+
2010_002210
|
1072 |
+
2010_002241
|
1073 |
+
2010_002252
|
1074 |
+
2010_002258
|
1075 |
+
2010_002262
|
1076 |
+
2010_002273
|
1077 |
+
2010_002290
|
1078 |
+
2010_002292
|
1079 |
+
2010_002347
|
1080 |
+
2010_002358
|
1081 |
+
2010_002360
|
1082 |
+
2010_002367
|
1083 |
+
2010_002416
|
1084 |
+
2010_002451
|
1085 |
+
2010_002481
|
1086 |
+
2010_002490
|
1087 |
+
2010_002495
|
1088 |
+
2010_002588
|
1089 |
+
2010_002607
|
1090 |
+
2010_002609
|
1091 |
+
2010_002610
|
1092 |
+
2010_002641
|
1093 |
+
2010_002685
|
1094 |
+
2010_002699
|
1095 |
+
2010_002719
|
1096 |
+
2010_002735
|
1097 |
+
2010_002751
|
1098 |
+
2010_002804
|
1099 |
+
2010_002835
|
1100 |
+
2010_002852
|
1101 |
+
2010_002885
|
1102 |
+
2010_002889
|
1103 |
+
2010_002904
|
1104 |
+
2010_002908
|
1105 |
+
2010_002916
|
1106 |
+
2010_002974
|
1107 |
+
2010_002977
|
1108 |
+
2010_003005
|
1109 |
+
2010_003021
|
1110 |
+
2010_003030
|
1111 |
+
2010_003038
|
1112 |
+
2010_003046
|
1113 |
+
2010_003052
|
1114 |
+
2010_003089
|
1115 |
+
2010_003110
|
1116 |
+
2010_003118
|
1117 |
+
2010_003171
|
1118 |
+
2010_003217
|
1119 |
+
2010_003221
|
1120 |
+
2010_003228
|
1121 |
+
2010_003243
|
1122 |
+
2010_003271
|
1123 |
+
2010_003295
|
1124 |
+
2010_003306
|
1125 |
+
2010_003324
|
1126 |
+
2010_003363
|
1127 |
+
2010_003382
|
1128 |
+
2010_003388
|
1129 |
+
2010_003389
|
1130 |
+
2010_003392
|
1131 |
+
2010_003430
|
1132 |
+
2010_003442
|
1133 |
+
2010_003459
|
1134 |
+
2010_003485
|
1135 |
+
2010_003486
|
1136 |
+
2010_003500
|
1137 |
+
2010_003523
|
1138 |
+
2010_003542
|
1139 |
+
2010_003552
|
1140 |
+
2010_003570
|
1141 |
+
2010_003572
|
1142 |
+
2010_003586
|
1143 |
+
2010_003615
|
1144 |
+
2010_003623
|
1145 |
+
2010_003657
|
1146 |
+
2010_003666
|
1147 |
+
2010_003705
|
1148 |
+
2010_003710
|
1149 |
+
2010_003720
|
1150 |
+
2010_003733
|
1151 |
+
2010_003750
|
1152 |
+
2010_003767
|
1153 |
+
2010_003802
|
1154 |
+
2010_003809
|
1155 |
+
2010_003830
|
1156 |
+
2010_003832
|
1157 |
+
2010_003836
|
1158 |
+
2010_003838
|
1159 |
+
2010_003850
|
1160 |
+
2010_003867
|
1161 |
+
2010_003882
|
1162 |
+
2010_003909
|
1163 |
+
2010_003922
|
1164 |
+
2010_003923
|
1165 |
+
2010_003978
|
1166 |
+
2010_003989
|
1167 |
+
2010_003990
|
1168 |
+
2010_004000
|
1169 |
+
2010_004003
|
1170 |
+
2010_004068
|
1171 |
+
2010_004076
|
1172 |
+
2010_004117
|
1173 |
+
2010_004136
|
1174 |
+
2010_004142
|
1175 |
+
2010_004195
|
1176 |
+
2010_004200
|
1177 |
+
2010_004202
|
1178 |
+
2010_004232
|
1179 |
+
2010_004261
|
1180 |
+
2010_004266
|
1181 |
+
2010_004273
|
1182 |
+
2010_004305
|
1183 |
+
2010_004403
|
1184 |
+
2010_004433
|
1185 |
+
2010_004434
|
1186 |
+
2010_004435
|
1187 |
+
2010_004438
|
1188 |
+
2010_004442
|
1189 |
+
2010_004473
|
1190 |
+
2010_004482
|
1191 |
+
2010_004487
|
1192 |
+
2010_004489
|
1193 |
+
2010_004512
|
1194 |
+
2010_004525
|
1195 |
+
2010_004527
|
1196 |
+
2010_004532
|
1197 |
+
2010_004566
|
1198 |
+
2010_004568
|
1199 |
+
2010_004579
|
1200 |
+
2010_004611
|
1201 |
+
2010_004641
|
1202 |
+
2010_004688
|
1203 |
+
2010_004699
|
1204 |
+
2010_004702
|
1205 |
+
2010_004716
|
1206 |
+
2010_004754
|
1207 |
+
2010_004767
|
1208 |
+
2010_004776
|
1209 |
+
2010_004811
|
1210 |
+
2010_004837
|
1211 |
+
2010_004839
|
1212 |
+
2010_004845
|
1213 |
+
2010_004860
|
1214 |
+
2010_004867
|
1215 |
+
2010_004881
|
1216 |
+
2010_004939
|
1217 |
+
2010_005001
|
1218 |
+
2010_005047
|
1219 |
+
2010_005051
|
1220 |
+
2010_005091
|
1221 |
+
2010_005095
|
1222 |
+
2010_005125
|
1223 |
+
2010_005140
|
1224 |
+
2010_005177
|
1225 |
+
2010_005178
|
1226 |
+
2010_005194
|
1227 |
+
2010_005197
|
1228 |
+
2010_005200
|
1229 |
+
2010_005205
|
1230 |
+
2010_005212
|
1231 |
+
2010_005248
|
1232 |
+
2010_005294
|
1233 |
+
2010_005298
|
1234 |
+
2010_005313
|
1235 |
+
2010_005324
|
1236 |
+
2010_005328
|
1237 |
+
2010_005329
|
1238 |
+
2010_005380
|
1239 |
+
2010_005404
|
1240 |
+
2010_005407
|
1241 |
+
2010_005411
|
1242 |
+
2010_005423
|
1243 |
+
2010_005499
|
1244 |
+
2010_005509
|
1245 |
+
2010_005510
|
1246 |
+
2010_005544
|
1247 |
+
2010_005549
|
1248 |
+
2010_005590
|
1249 |
+
2010_005639
|
1250 |
+
2010_005699
|
1251 |
+
2010_005704
|
1252 |
+
2010_005707
|
1253 |
+
2010_005711
|
1254 |
+
2010_005726
|
1255 |
+
2010_005741
|
1256 |
+
2010_005765
|
1257 |
+
2010_005790
|
1258 |
+
2010_005792
|
1259 |
+
2010_005797
|
1260 |
+
2010_005812
|
1261 |
+
2010_005850
|
1262 |
+
2010_005861
|
1263 |
+
2010_005869
|
1264 |
+
2010_005908
|
1265 |
+
2010_005915
|
1266 |
+
2010_005946
|
1267 |
+
2010_005965
|
1268 |
+
2010_006044
|
1269 |
+
2010_006047
|
1270 |
+
2010_006052
|
1271 |
+
2010_006081
|
1272 |
+
2011_000001
|
1273 |
+
2011_000013
|
1274 |
+
2011_000014
|
1275 |
+
2011_000020
|
1276 |
+
2011_000032
|
1277 |
+
2011_000042
|
1278 |
+
2011_000063
|
1279 |
+
2011_000115
|
1280 |
+
2011_000120
|
1281 |
+
2011_000240
|
1282 |
+
2011_000244
|
1283 |
+
2011_000254
|
1284 |
+
2011_000261
|
1285 |
+
2011_000262
|
1286 |
+
2011_000271
|
1287 |
+
2011_000274
|
1288 |
+
2011_000306
|
1289 |
+
2011_000311
|
1290 |
+
2011_000316
|
1291 |
+
2011_000328
|
1292 |
+
2011_000351
|
1293 |
+
2011_000352
|
1294 |
+
2011_000406
|
1295 |
+
2011_000414
|
1296 |
+
2011_000448
|
1297 |
+
2011_000451
|
1298 |
+
2011_000470
|
1299 |
+
2011_000473
|
1300 |
+
2011_000515
|
1301 |
+
2011_000537
|
1302 |
+
2011_000576
|
1303 |
+
2011_000603
|
1304 |
+
2011_000616
|
1305 |
+
2011_000636
|
1306 |
+
2011_000639
|
1307 |
+
2011_000654
|
1308 |
+
2011_000660
|
1309 |
+
2011_000664
|
1310 |
+
2011_000667
|
1311 |
+
2011_000670
|
1312 |
+
2011_000676
|
1313 |
+
2011_000721
|
1314 |
+
2011_000723
|
1315 |
+
2011_000762
|
1316 |
+
2011_000766
|
1317 |
+
2011_000786
|
1318 |
+
2011_000802
|
1319 |
+
2011_000810
|
1320 |
+
2011_000821
|
1321 |
+
2011_000841
|
1322 |
+
2011_000844
|
1323 |
+
2011_000846
|
1324 |
+
2011_000869
|
1325 |
+
2011_000890
|
1326 |
+
2011_000915
|
1327 |
+
2011_000924
|
1328 |
+
2011_000937
|
1329 |
+
2011_000939
|
1330 |
+
2011_000952
|
1331 |
+
2011_000968
|
1332 |
+
2011_000974
|
1333 |
+
2011_001037
|
1334 |
+
2011_001072
|
1335 |
+
2011_001085
|
1336 |
+
2011_001089
|
1337 |
+
2011_001090
|
1338 |
+
2011_001099
|
1339 |
+
2011_001104
|
1340 |
+
2011_001112
|
1341 |
+
2011_001120
|
1342 |
+
2011_001132
|
1343 |
+
2011_001151
|
1344 |
+
2011_001194
|
1345 |
+
2011_001258
|
1346 |
+
2011_001274
|
1347 |
+
2011_001314
|
1348 |
+
2011_001317
|
1349 |
+
2011_001321
|
1350 |
+
2011_001379
|
1351 |
+
2011_001425
|
1352 |
+
2011_001431
|
1353 |
+
2011_001443
|
1354 |
+
2011_001446
|
1355 |
+
2011_001452
|
1356 |
+
2011_001454
|
1357 |
+
2011_001477
|
1358 |
+
2011_001509
|
1359 |
+
2011_001512
|
1360 |
+
2011_001515
|
1361 |
+
2011_001528
|
1362 |
+
2011_001554
|
1363 |
+
2011_001561
|
1364 |
+
2011_001580
|
1365 |
+
2011_001587
|
1366 |
+
2011_001623
|
1367 |
+
2011_001648
|
1368 |
+
2011_001651
|
1369 |
+
2011_001654
|
1370 |
+
2011_001684
|
1371 |
+
2011_001696
|
1372 |
+
2011_001697
|
1373 |
+
2011_001760
|
1374 |
+
2011_001761
|
1375 |
+
2011_001798
|
1376 |
+
2011_001807
|
1377 |
+
2011_001851
|
1378 |
+
2011_001852
|
1379 |
+
2011_001853
|
1380 |
+
2011_001888
|
1381 |
+
2011_001940
|
1382 |
+
2011_002014
|
1383 |
+
2011_002028
|
1384 |
+
2011_002056
|
1385 |
+
2011_002061
|
1386 |
+
2011_002068
|
1387 |
+
2011_002076
|
1388 |
+
2011_002090
|
1389 |
+
2011_002095
|
1390 |
+
2011_002104
|
1391 |
+
2011_002136
|
1392 |
+
2011_002138
|
1393 |
+
2011_002151
|
1394 |
+
2011_002153
|
1395 |
+
2011_002155
|
1396 |
+
2011_002197
|
1397 |
+
2011_002198
|
1398 |
+
2011_002243
|
1399 |
+
2011_002250
|
1400 |
+
2011_002257
|
1401 |
+
2011_002262
|
1402 |
+
2011_002264
|
1403 |
+
2011_002296
|
1404 |
+
2011_002314
|
1405 |
+
2011_002331
|
1406 |
+
2011_002333
|
1407 |
+
2011_002411
|
1408 |
+
2011_002417
|
1409 |
+
2011_002425
|
1410 |
+
2011_002437
|
1411 |
+
2011_002444
|
1412 |
+
2011_002445
|
1413 |
+
2011_002449
|
1414 |
+
2011_002468
|
1415 |
+
2011_002469
|
1416 |
+
2011_002473
|
1417 |
+
2011_002508
|
1418 |
+
2011_002523
|
1419 |
+
2011_002534
|
1420 |
+
2011_002557
|
1421 |
+
2011_002564
|
1422 |
+
2011_002572
|
1423 |
+
2011_002597
|
1424 |
+
2011_002622
|
1425 |
+
2011_002632
|
1426 |
+
2011_002635
|
1427 |
+
2011_002643
|
1428 |
+
2011_002653
|
1429 |
+
2011_002667
|
1430 |
+
2011_002681
|
1431 |
+
2011_002707
|
1432 |
+
2011_002736
|
1433 |
+
2011_002759
|
1434 |
+
2011_002783
|
1435 |
+
2011_002792
|
1436 |
+
2011_002799
|
1437 |
+
2011_002824
|
1438 |
+
2011_002835
|
1439 |
+
2011_002866
|
1440 |
+
2011_002876
|
1441 |
+
2011_002888
|
1442 |
+
2011_002894
|
1443 |
+
2011_002903
|
1444 |
+
2011_002905
|
1445 |
+
2011_002986
|
1446 |
+
2011_003045
|
1447 |
+
2011_003064
|
1448 |
+
2011_003070
|
1449 |
+
2011_003083
|
1450 |
+
2011_003093
|
1451 |
+
2011_003096
|
1452 |
+
2011_003102
|
1453 |
+
2011_003156
|
1454 |
+
2011_003170
|
1455 |
+
2011_003178
|
1456 |
+
2011_003231
|
data/train.txt
ADDED
@@ -0,0 +1,1464 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2007_000032
|
2 |
+
2007_000039
|
3 |
+
2007_000063
|
4 |
+
2007_000068
|
5 |
+
2007_000121
|
6 |
+
2007_000170
|
7 |
+
2007_000241
|
8 |
+
2007_000243
|
9 |
+
2007_000250
|
10 |
+
2007_000256
|
11 |
+
2007_000333
|
12 |
+
2007_000363
|
13 |
+
2007_000364
|
14 |
+
2007_000392
|
15 |
+
2007_000480
|
16 |
+
2007_000504
|
17 |
+
2007_000515
|
18 |
+
2007_000528
|
19 |
+
2007_000549
|
20 |
+
2007_000584
|
21 |
+
2007_000645
|
22 |
+
2007_000648
|
23 |
+
2007_000713
|
24 |
+
2007_000720
|
25 |
+
2007_000733
|
26 |
+
2007_000738
|
27 |
+
2007_000768
|
28 |
+
2007_000793
|
29 |
+
2007_000822
|
30 |
+
2007_000836
|
31 |
+
2007_000876
|
32 |
+
2007_000904
|
33 |
+
2007_001027
|
34 |
+
2007_001073
|
35 |
+
2007_001149
|
36 |
+
2007_001185
|
37 |
+
2007_001225
|
38 |
+
2007_001340
|
39 |
+
2007_001397
|
40 |
+
2007_001416
|
41 |
+
2007_001420
|
42 |
+
2007_001439
|
43 |
+
2007_001487
|
44 |
+
2007_001595
|
45 |
+
2007_001602
|
46 |
+
2007_001609
|
47 |
+
2007_001698
|
48 |
+
2007_001704
|
49 |
+
2007_001709
|
50 |
+
2007_001724
|
51 |
+
2007_001764
|
52 |
+
2007_001825
|
53 |
+
2007_001834
|
54 |
+
2007_001857
|
55 |
+
2007_001872
|
56 |
+
2007_001901
|
57 |
+
2007_001917
|
58 |
+
2007_001960
|
59 |
+
2007_002024
|
60 |
+
2007_002055
|
61 |
+
2007_002088
|
62 |
+
2007_002099
|
63 |
+
2007_002105
|
64 |
+
2007_002107
|
65 |
+
2007_002120
|
66 |
+
2007_002142
|
67 |
+
2007_002198
|
68 |
+
2007_002212
|
69 |
+
2007_002216
|
70 |
+
2007_002227
|
71 |
+
2007_002234
|
72 |
+
2007_002273
|
73 |
+
2007_002281
|
74 |
+
2007_002293
|
75 |
+
2007_002361
|
76 |
+
2007_002368
|
77 |
+
2007_002370
|
78 |
+
2007_002403
|
79 |
+
2007_002462
|
80 |
+
2007_002488
|
81 |
+
2007_002545
|
82 |
+
2007_002611
|
83 |
+
2007_002639
|
84 |
+
2007_002668
|
85 |
+
2007_002669
|
86 |
+
2007_002760
|
87 |
+
2007_002789
|
88 |
+
2007_002845
|
89 |
+
2007_002895
|
90 |
+
2007_002896
|
91 |
+
2007_002914
|
92 |
+
2007_002953
|
93 |
+
2007_002954
|
94 |
+
2007_002967
|
95 |
+
2007_003000
|
96 |
+
2007_003118
|
97 |
+
2007_003178
|
98 |
+
2007_003189
|
99 |
+
2007_003190
|
100 |
+
2007_003191
|
101 |
+
2007_003205
|
102 |
+
2007_003207
|
103 |
+
2007_003251
|
104 |
+
2007_003267
|
105 |
+
2007_003286
|
106 |
+
2007_003330
|
107 |
+
2007_003431
|
108 |
+
2007_003451
|
109 |
+
2007_003525
|
110 |
+
2007_003529
|
111 |
+
2007_003541
|
112 |
+
2007_003565
|
113 |
+
2007_003580
|
114 |
+
2007_003593
|
115 |
+
2007_003604
|
116 |
+
2007_003668
|
117 |
+
2007_003715
|
118 |
+
2007_003778
|
119 |
+
2007_003788
|
120 |
+
2007_003815
|
121 |
+
2007_003876
|
122 |
+
2007_003889
|
123 |
+
2007_003910
|
124 |
+
2007_004003
|
125 |
+
2007_004009
|
126 |
+
2007_004065
|
127 |
+
2007_004081
|
128 |
+
2007_004166
|
129 |
+
2007_004289
|
130 |
+
2007_004291
|
131 |
+
2007_004328
|
132 |
+
2007_004423
|
133 |
+
2007_004459
|
134 |
+
2007_004476
|
135 |
+
2007_004481
|
136 |
+
2007_004500
|
137 |
+
2007_004537
|
138 |
+
2007_004627
|
139 |
+
2007_004663
|
140 |
+
2007_004705
|
141 |
+
2007_004707
|
142 |
+
2007_004768
|
143 |
+
2007_004769
|
144 |
+
2007_004810
|
145 |
+
2007_004830
|
146 |
+
2007_004841
|
147 |
+
2007_004948
|
148 |
+
2007_004951
|
149 |
+
2007_004988
|
150 |
+
2007_004998
|
151 |
+
2007_005043
|
152 |
+
2007_005064
|
153 |
+
2007_005086
|
154 |
+
2007_005124
|
155 |
+
2007_005130
|
156 |
+
2007_005144
|
157 |
+
2007_005210
|
158 |
+
2007_005212
|
159 |
+
2007_005227
|
160 |
+
2007_005248
|
161 |
+
2007_005262
|
162 |
+
2007_005264
|
163 |
+
2007_005266
|
164 |
+
2007_005273
|
165 |
+
2007_005314
|
166 |
+
2007_005360
|
167 |
+
2007_005368
|
168 |
+
2007_005430
|
169 |
+
2007_005647
|
170 |
+
2007_005688
|
171 |
+
2007_005702
|
172 |
+
2007_005790
|
173 |
+
2007_005797
|
174 |
+
2007_005859
|
175 |
+
2007_005878
|
176 |
+
2007_005902
|
177 |
+
2007_005951
|
178 |
+
2007_005988
|
179 |
+
2007_005989
|
180 |
+
2007_006004
|
181 |
+
2007_006066
|
182 |
+
2007_006134
|
183 |
+
2007_006136
|
184 |
+
2007_006151
|
185 |
+
2007_006212
|
186 |
+
2007_006232
|
187 |
+
2007_006254
|
188 |
+
2007_006281
|
189 |
+
2007_006303
|
190 |
+
2007_006317
|
191 |
+
2007_006400
|
192 |
+
2007_006409
|
193 |
+
2007_006445
|
194 |
+
2007_006477
|
195 |
+
2007_006483
|
196 |
+
2007_006490
|
197 |
+
2007_006530
|
198 |
+
2007_006581
|
199 |
+
2007_006585
|
200 |
+
2007_006605
|
201 |
+
2007_006615
|
202 |
+
2007_006641
|
203 |
+
2007_006660
|
204 |
+
2007_006661
|
205 |
+
2007_006673
|
206 |
+
2007_006699
|
207 |
+
2007_006704
|
208 |
+
2007_006803
|
209 |
+
2007_006832
|
210 |
+
2007_006865
|
211 |
+
2007_006899
|
212 |
+
2007_006900
|
213 |
+
2007_006944
|
214 |
+
2007_007003
|
215 |
+
2007_007021
|
216 |
+
2007_007048
|
217 |
+
2007_007098
|
218 |
+
2007_007154
|
219 |
+
2007_007230
|
220 |
+
2007_007250
|
221 |
+
2007_007355
|
222 |
+
2007_007387
|
223 |
+
2007_007398
|
224 |
+
2007_007415
|
225 |
+
2007_007432
|
226 |
+
2007_007447
|
227 |
+
2007_007480
|
228 |
+
2007_007481
|
229 |
+
2007_007523
|
230 |
+
2007_007530
|
231 |
+
2007_007585
|
232 |
+
2007_007591
|
233 |
+
2007_007621
|
234 |
+
2007_007649
|
235 |
+
2007_007698
|
236 |
+
2007_007726
|
237 |
+
2007_007772
|
238 |
+
2007_007773
|
239 |
+
2007_007783
|
240 |
+
2007_007878
|
241 |
+
2007_007890
|
242 |
+
2007_007891
|
243 |
+
2007_007902
|
244 |
+
2007_007908
|
245 |
+
2007_007930
|
246 |
+
2007_007947
|
247 |
+
2007_007948
|
248 |
+
2007_008043
|
249 |
+
2007_008072
|
250 |
+
2007_008085
|
251 |
+
2007_008140
|
252 |
+
2007_008142
|
253 |
+
2007_008203
|
254 |
+
2007_008218
|
255 |
+
2007_008219
|
256 |
+
2007_008307
|
257 |
+
2007_008403
|
258 |
+
2007_008407
|
259 |
+
2007_008468
|
260 |
+
2007_008526
|
261 |
+
2007_008571
|
262 |
+
2007_008575
|
263 |
+
2007_008714
|
264 |
+
2007_008764
|
265 |
+
2007_008778
|
266 |
+
2007_008801
|
267 |
+
2007_008821
|
268 |
+
2007_008927
|
269 |
+
2007_008932
|
270 |
+
2007_008945
|
271 |
+
2007_008948
|
272 |
+
2007_008994
|
273 |
+
2007_009030
|
274 |
+
2007_009052
|
275 |
+
2007_009082
|
276 |
+
2007_009139
|
277 |
+
2007_009209
|
278 |
+
2007_009216
|
279 |
+
2007_009295
|
280 |
+
2007_009322
|
281 |
+
2007_009327
|
282 |
+
2007_009348
|
283 |
+
2007_009422
|
284 |
+
2007_009435
|
285 |
+
2007_009436
|
286 |
+
2007_009464
|
287 |
+
2007_009527
|
288 |
+
2007_009533
|
289 |
+
2007_009550
|
290 |
+
2007_009554
|
291 |
+
2007_009580
|
292 |
+
2007_009594
|
293 |
+
2007_009597
|
294 |
+
2007_009605
|
295 |
+
2007_009607
|
296 |
+
2007_009618
|
297 |
+
2007_009630
|
298 |
+
2007_009649
|
299 |
+
2007_009665
|
300 |
+
2007_009709
|
301 |
+
2007_009724
|
302 |
+
2007_009759
|
303 |
+
2007_009779
|
304 |
+
2007_009788
|
305 |
+
2007_009807
|
306 |
+
2007_009832
|
307 |
+
2007_009889
|
308 |
+
2007_009899
|
309 |
+
2007_009901
|
310 |
+
2007_009947
|
311 |
+
2007_009950
|
312 |
+
2008_000015
|
313 |
+
2008_000019
|
314 |
+
2008_000028
|
315 |
+
2008_000033
|
316 |
+
2008_000074
|
317 |
+
2008_000089
|
318 |
+
2008_000103
|
319 |
+
2008_000105
|
320 |
+
2008_000131
|
321 |
+
2008_000144
|
322 |
+
2008_000162
|
323 |
+
2008_000187
|
324 |
+
2008_000188
|
325 |
+
2008_000197
|
326 |
+
2008_000207
|
327 |
+
2008_000217
|
328 |
+
2008_000226
|
329 |
+
2008_000235
|
330 |
+
2008_000238
|
331 |
+
2008_000259
|
332 |
+
2008_000273
|
333 |
+
2008_000284
|
334 |
+
2008_000287
|
335 |
+
2008_000289
|
336 |
+
2008_000290
|
337 |
+
2008_000309
|
338 |
+
2008_000316
|
339 |
+
2008_000336
|
340 |
+
2008_000348
|
341 |
+
2008_000361
|
342 |
+
2008_000365
|
343 |
+
2008_000399
|
344 |
+
2008_000400
|
345 |
+
2008_000415
|
346 |
+
2008_000422
|
347 |
+
2008_000436
|
348 |
+
2008_000470
|
349 |
+
2008_000491
|
350 |
+
2008_000495
|
351 |
+
2008_000505
|
352 |
+
2008_000515
|
353 |
+
2008_000540
|
354 |
+
2008_000544
|
355 |
+
2008_000567
|
356 |
+
2008_000578
|
357 |
+
2008_000584
|
358 |
+
2008_000588
|
359 |
+
2008_000595
|
360 |
+
2008_000626
|
361 |
+
2008_000645
|
362 |
+
2008_000676
|
363 |
+
2008_000696
|
364 |
+
2008_000711
|
365 |
+
2008_000716
|
366 |
+
2008_000733
|
367 |
+
2008_000760
|
368 |
+
2008_000764
|
369 |
+
2008_000778
|
370 |
+
2008_000785
|
371 |
+
2008_000832
|
372 |
+
2008_000841
|
373 |
+
2008_000860
|
374 |
+
2008_000861
|
375 |
+
2008_000870
|
376 |
+
2008_000923
|
377 |
+
2008_001030
|
378 |
+
2008_001056
|
379 |
+
2008_001106
|
380 |
+
2008_001112
|
381 |
+
2008_001118
|
382 |
+
2008_001119
|
383 |
+
2008_001137
|
384 |
+
2008_001159
|
385 |
+
2008_001169
|
386 |
+
2008_001188
|
387 |
+
2008_001203
|
388 |
+
2008_001208
|
389 |
+
2008_001215
|
390 |
+
2008_001235
|
391 |
+
2008_001245
|
392 |
+
2008_001263
|
393 |
+
2008_001274
|
394 |
+
2008_001358
|
395 |
+
2008_001375
|
396 |
+
2008_001387
|
397 |
+
2008_001399
|
398 |
+
2008_001402
|
399 |
+
2008_001408
|
400 |
+
2008_001413
|
401 |
+
2008_001462
|
402 |
+
2008_001467
|
403 |
+
2008_001479
|
404 |
+
2008_001498
|
405 |
+
2008_001510
|
406 |
+
2008_001523
|
407 |
+
2008_001566
|
408 |
+
2008_001592
|
409 |
+
2008_001601
|
410 |
+
2008_001610
|
411 |
+
2008_001632
|
412 |
+
2008_001643
|
413 |
+
2008_001691
|
414 |
+
2008_001716
|
415 |
+
2008_001719
|
416 |
+
2008_001741
|
417 |
+
2008_001761
|
418 |
+
2008_001787
|
419 |
+
2008_001829
|
420 |
+
2008_001876
|
421 |
+
2008_001882
|
422 |
+
2008_001896
|
423 |
+
2008_001926
|
424 |
+
2008_001997
|
425 |
+
2008_002032
|
426 |
+
2008_002064
|
427 |
+
2008_002066
|
428 |
+
2008_002067
|
429 |
+
2008_002073
|
430 |
+
2008_002079
|
431 |
+
2008_002080
|
432 |
+
2008_002123
|
433 |
+
2008_002160
|
434 |
+
2008_002175
|
435 |
+
2008_002177
|
436 |
+
2008_002182
|
437 |
+
2008_002200
|
438 |
+
2008_002210
|
439 |
+
2008_002215
|
440 |
+
2008_002218
|
441 |
+
2008_002221
|
442 |
+
2008_002247
|
443 |
+
2008_002248
|
444 |
+
2008_002255
|
445 |
+
2008_002258
|
446 |
+
2008_002288
|
447 |
+
2008_002338
|
448 |
+
2008_002411
|
449 |
+
2008_002425
|
450 |
+
2008_002471
|
451 |
+
2008_002473
|
452 |
+
2008_002551
|
453 |
+
2008_002641
|
454 |
+
2008_002650
|
455 |
+
2008_002697
|
456 |
+
2008_002704
|
457 |
+
2008_002710
|
458 |
+
2008_002719
|
459 |
+
2008_002749
|
460 |
+
2008_002762
|
461 |
+
2008_002772
|
462 |
+
2008_002834
|
463 |
+
2008_002868
|
464 |
+
2008_002885
|
465 |
+
2008_002894
|
466 |
+
2008_002960
|
467 |
+
2008_002970
|
468 |
+
2008_002972
|
469 |
+
2008_002993
|
470 |
+
2008_003060
|
471 |
+
2008_003065
|
472 |
+
2008_003068
|
473 |
+
2008_003083
|
474 |
+
2008_003087
|
475 |
+
2008_003094
|
476 |
+
2008_003101
|
477 |
+
2008_003168
|
478 |
+
2008_003180
|
479 |
+
2008_003196
|
480 |
+
2008_003200
|
481 |
+
2008_003208
|
482 |
+
2008_003252
|
483 |
+
2008_003329
|
484 |
+
2008_003362
|
485 |
+
2008_003373
|
486 |
+
2008_003381
|
487 |
+
2008_003415
|
488 |
+
2008_003429
|
489 |
+
2008_003480
|
490 |
+
2008_003500
|
491 |
+
2008_003523
|
492 |
+
2008_003562
|
493 |
+
2008_003585
|
494 |
+
2008_003665
|
495 |
+
2008_003691
|
496 |
+
2008_003701
|
497 |
+
2008_003703
|
498 |
+
2008_003729
|
499 |
+
2008_003769
|
500 |
+
2008_003774
|
501 |
+
2008_003779
|
502 |
+
2008_003814
|
503 |
+
2008_003913
|
504 |
+
2008_003939
|
505 |
+
2008_003947
|
506 |
+
2008_003986
|
507 |
+
2008_003998
|
508 |
+
2008_004014
|
509 |
+
2008_004026
|
510 |
+
2008_004055
|
511 |
+
2008_004080
|
512 |
+
2008_004097
|
513 |
+
2008_004112
|
514 |
+
2008_004259
|
515 |
+
2008_004321
|
516 |
+
2008_004358
|
517 |
+
2008_004365
|
518 |
+
2008_004416
|
519 |
+
2008_004430
|
520 |
+
2008_004441
|
521 |
+
2008_004547
|
522 |
+
2008_004551
|
523 |
+
2008_004583
|
524 |
+
2008_004588
|
525 |
+
2008_004607
|
526 |
+
2008_004663
|
527 |
+
2008_004750
|
528 |
+
2008_004776
|
529 |
+
2008_004822
|
530 |
+
2008_004838
|
531 |
+
2008_004841
|
532 |
+
2008_004869
|
533 |
+
2008_004892
|
534 |
+
2008_004911
|
535 |
+
2008_004914
|
536 |
+
2008_004946
|
537 |
+
2008_004983
|
538 |
+
2008_005006
|
539 |
+
2008_005074
|
540 |
+
2008_005196
|
541 |
+
2008_005214
|
542 |
+
2008_005231
|
543 |
+
2008_005266
|
544 |
+
2008_005294
|
545 |
+
2008_005300
|
546 |
+
2008_005321
|
547 |
+
2008_005342
|
548 |
+
2008_005345
|
549 |
+
2008_005367
|
550 |
+
2008_005375
|
551 |
+
2008_005512
|
552 |
+
2008_005541
|
553 |
+
2008_005600
|
554 |
+
2008_005650
|
555 |
+
2008_005668
|
556 |
+
2008_005678
|
557 |
+
2008_005679
|
558 |
+
2008_005698
|
559 |
+
2008_005706
|
560 |
+
2008_005713
|
561 |
+
2008_005714
|
562 |
+
2008_005716
|
563 |
+
2008_005747
|
564 |
+
2008_005770
|
565 |
+
2008_005839
|
566 |
+
2008_005843
|
567 |
+
2008_005845
|
568 |
+
2008_005874
|
569 |
+
2008_005926
|
570 |
+
2008_005938
|
571 |
+
2008_005945
|
572 |
+
2008_005953
|
573 |
+
2008_006032
|
574 |
+
2008_006065
|
575 |
+
2008_006070
|
576 |
+
2008_006140
|
577 |
+
2008_006182
|
578 |
+
2008_006213
|
579 |
+
2008_006215
|
580 |
+
2008_006221
|
581 |
+
2008_006289
|
582 |
+
2008_006339
|
583 |
+
2008_006345
|
584 |
+
2008_006349
|
585 |
+
2008_006353
|
586 |
+
2008_006389
|
587 |
+
2008_006434
|
588 |
+
2008_006481
|
589 |
+
2008_006482
|
590 |
+
2008_006490
|
591 |
+
2008_006509
|
592 |
+
2008_006558
|
593 |
+
2008_006655
|
594 |
+
2008_006748
|
595 |
+
2008_006751
|
596 |
+
2008_006843
|
597 |
+
2008_006873
|
598 |
+
2008_006877
|
599 |
+
2008_006908
|
600 |
+
2008_006920
|
601 |
+
2008_007011
|
602 |
+
2008_007012
|
603 |
+
2008_007090
|
604 |
+
2008_007142
|
605 |
+
2008_007165
|
606 |
+
2008_007201
|
607 |
+
2008_007239
|
608 |
+
2008_007242
|
609 |
+
2008_007245
|
610 |
+
2008_007313
|
611 |
+
2008_007355
|
612 |
+
2008_007357
|
613 |
+
2008_007375
|
614 |
+
2008_007428
|
615 |
+
2008_007433
|
616 |
+
2008_007472
|
617 |
+
2008_007581
|
618 |
+
2008_007691
|
619 |
+
2008_007759
|
620 |
+
2008_007858
|
621 |
+
2008_007998
|
622 |
+
2008_008106
|
623 |
+
2008_008193
|
624 |
+
2008_008263
|
625 |
+
2008_008323
|
626 |
+
2008_008324
|
627 |
+
2008_008343
|
628 |
+
2008_008462
|
629 |
+
2008_008476
|
630 |
+
2008_008511
|
631 |
+
2008_008521
|
632 |
+
2008_008525
|
633 |
+
2008_008541
|
634 |
+
2008_008545
|
635 |
+
2008_008550
|
636 |
+
2008_008770
|
637 |
+
2008_008773
|
638 |
+
2009_000006
|
639 |
+
2009_000015
|
640 |
+
2009_000028
|
641 |
+
2009_000029
|
642 |
+
2009_000073
|
643 |
+
2009_000100
|
644 |
+
2009_000103
|
645 |
+
2009_000133
|
646 |
+
2009_000161
|
647 |
+
2009_000176
|
648 |
+
2009_000177
|
649 |
+
2009_000250
|
650 |
+
2009_000285
|
651 |
+
2009_000347
|
652 |
+
2009_000385
|
653 |
+
2009_000400
|
654 |
+
2009_000405
|
655 |
+
2009_000408
|
656 |
+
2009_000409
|
657 |
+
2009_000420
|
658 |
+
2009_000444
|
659 |
+
2009_000454
|
660 |
+
2009_000503
|
661 |
+
2009_000505
|
662 |
+
2009_000532
|
663 |
+
2009_000535
|
664 |
+
2009_000544
|
665 |
+
2009_000553
|
666 |
+
2009_000562
|
667 |
+
2009_000603
|
668 |
+
2009_000626
|
669 |
+
2009_000635
|
670 |
+
2009_000655
|
671 |
+
2009_000662
|
672 |
+
2009_000684
|
673 |
+
2009_000690
|
674 |
+
2009_000709
|
675 |
+
2009_000720
|
676 |
+
2009_000744
|
677 |
+
2009_000746
|
678 |
+
2009_000774
|
679 |
+
2009_000801
|
680 |
+
2009_000887
|
681 |
+
2009_000894
|
682 |
+
2009_000895
|
683 |
+
2009_000906
|
684 |
+
2009_000938
|
685 |
+
2009_000987
|
686 |
+
2009_000996
|
687 |
+
2009_001002
|
688 |
+
2009_001019
|
689 |
+
2009_001027
|
690 |
+
2009_001036
|
691 |
+
2009_001070
|
692 |
+
2009_001085
|
693 |
+
2009_001095
|
694 |
+
2009_001096
|
695 |
+
2009_001100
|
696 |
+
2009_001104
|
697 |
+
2009_001117
|
698 |
+
2009_001124
|
699 |
+
2009_001137
|
700 |
+
2009_001140
|
701 |
+
2009_001145
|
702 |
+
2009_001146
|
703 |
+
2009_001163
|
704 |
+
2009_001177
|
705 |
+
2009_001197
|
706 |
+
2009_001203
|
707 |
+
2009_001205
|
708 |
+
2009_001251
|
709 |
+
2009_001253
|
710 |
+
2009_001264
|
711 |
+
2009_001268
|
712 |
+
2009_001270
|
713 |
+
2009_001283
|
714 |
+
2009_001306
|
715 |
+
2009_001311
|
716 |
+
2009_001339
|
717 |
+
2009_001359
|
718 |
+
2009_001385
|
719 |
+
2009_001388
|
720 |
+
2009_001390
|
721 |
+
2009_001403
|
722 |
+
2009_001422
|
723 |
+
2009_001443
|
724 |
+
2009_001444
|
725 |
+
2009_001481
|
726 |
+
2009_001502
|
727 |
+
2009_001514
|
728 |
+
2009_001516
|
729 |
+
2009_001544
|
730 |
+
2009_001615
|
731 |
+
2009_001625
|
732 |
+
2009_001636
|
733 |
+
2009_001640
|
734 |
+
2009_001651
|
735 |
+
2009_001664
|
736 |
+
2009_001690
|
737 |
+
2009_001693
|
738 |
+
2009_001724
|
739 |
+
2009_001735
|
740 |
+
2009_001744
|
741 |
+
2009_001755
|
742 |
+
2009_001782
|
743 |
+
2009_001783
|
744 |
+
2009_001802
|
745 |
+
2009_001828
|
746 |
+
2009_001868
|
747 |
+
2009_001871
|
748 |
+
2009_001885
|
749 |
+
2009_001888
|
750 |
+
2009_001894
|
751 |
+
2009_001898
|
752 |
+
2009_001922
|
753 |
+
2009_001937
|
754 |
+
2009_001961
|
755 |
+
2009_001964
|
756 |
+
2009_001972
|
757 |
+
2009_002010
|
758 |
+
2009_002019
|
759 |
+
2009_002052
|
760 |
+
2009_002060
|
761 |
+
2009_002072
|
762 |
+
2009_002083
|
763 |
+
2009_002117
|
764 |
+
2009_002153
|
765 |
+
2009_002204
|
766 |
+
2009_002216
|
767 |
+
2009_002229
|
768 |
+
2009_002245
|
769 |
+
2009_002262
|
770 |
+
2009_002264
|
771 |
+
2009_002281
|
772 |
+
2009_002285
|
773 |
+
2009_002314
|
774 |
+
2009_002343
|
775 |
+
2009_002362
|
776 |
+
2009_002387
|
777 |
+
2009_002409
|
778 |
+
2009_002416
|
779 |
+
2009_002419
|
780 |
+
2009_002422
|
781 |
+
2009_002423
|
782 |
+
2009_002425
|
783 |
+
2009_002448
|
784 |
+
2009_002460
|
785 |
+
2009_002472
|
786 |
+
2009_002519
|
787 |
+
2009_002530
|
788 |
+
2009_002543
|
789 |
+
2009_002567
|
790 |
+
2009_002586
|
791 |
+
2009_002588
|
792 |
+
2009_002599
|
793 |
+
2009_002613
|
794 |
+
2009_002626
|
795 |
+
2009_002628
|
796 |
+
2009_002662
|
797 |
+
2009_002674
|
798 |
+
2009_002713
|
799 |
+
2009_002715
|
800 |
+
2009_002734
|
801 |
+
2009_002763
|
802 |
+
2009_002789
|
803 |
+
2009_002820
|
804 |
+
2009_002844
|
805 |
+
2009_002845
|
806 |
+
2009_002849
|
807 |
+
2009_002862
|
808 |
+
2009_002872
|
809 |
+
2009_002885
|
810 |
+
2009_002897
|
811 |
+
2009_002912
|
812 |
+
2009_002914
|
813 |
+
2009_002917
|
814 |
+
2009_002932
|
815 |
+
2009_002972
|
816 |
+
2009_002984
|
817 |
+
2009_002988
|
818 |
+
2009_002993
|
819 |
+
2009_003006
|
820 |
+
2009_003007
|
821 |
+
2009_003012
|
822 |
+
2009_003034
|
823 |
+
2009_003035
|
824 |
+
2009_003039
|
825 |
+
2009_003053
|
826 |
+
2009_003054
|
827 |
+
2009_003075
|
828 |
+
2009_003087
|
829 |
+
2009_003088
|
830 |
+
2009_003090
|
831 |
+
2009_003142
|
832 |
+
2009_003146
|
833 |
+
2009_003147
|
834 |
+
2009_003164
|
835 |
+
2009_003172
|
836 |
+
2009_003200
|
837 |
+
2009_003249
|
838 |
+
2009_003317
|
839 |
+
2009_003340
|
840 |
+
2009_003345
|
841 |
+
2009_003353
|
842 |
+
2009_003361
|
843 |
+
2009_003369
|
844 |
+
2009_003455
|
845 |
+
2009_003461
|
846 |
+
2009_003468
|
847 |
+
2009_003497
|
848 |
+
2009_003519
|
849 |
+
2009_003522
|
850 |
+
2009_003539
|
851 |
+
2009_003555
|
852 |
+
2009_003613
|
853 |
+
2009_003636
|
854 |
+
2009_003646
|
855 |
+
2009_003660
|
856 |
+
2009_003690
|
857 |
+
2009_003697
|
858 |
+
2009_003711
|
859 |
+
2009_003734
|
860 |
+
2009_003736
|
861 |
+
2009_003757
|
862 |
+
2009_003768
|
863 |
+
2009_003783
|
864 |
+
2009_003799
|
865 |
+
2009_003815
|
866 |
+
2009_003820
|
867 |
+
2009_003825
|
868 |
+
2009_003860
|
869 |
+
2009_003865
|
870 |
+
2009_003921
|
871 |
+
2009_003922
|
872 |
+
2009_003933
|
873 |
+
2009_003961
|
874 |
+
2009_003975
|
875 |
+
2009_004091
|
876 |
+
2009_004095
|
877 |
+
2009_004105
|
878 |
+
2009_004117
|
879 |
+
2009_004171
|
880 |
+
2009_004178
|
881 |
+
2009_004180
|
882 |
+
2009_004186
|
883 |
+
2009_004191
|
884 |
+
2009_004212
|
885 |
+
2009_004213
|
886 |
+
2009_004228
|
887 |
+
2009_004249
|
888 |
+
2009_004264
|
889 |
+
2009_004278
|
890 |
+
2009_004301
|
891 |
+
2009_004316
|
892 |
+
2009_004317
|
893 |
+
2009_004327
|
894 |
+
2009_004328
|
895 |
+
2009_004334
|
896 |
+
2009_004336
|
897 |
+
2009_004368
|
898 |
+
2009_004374
|
899 |
+
2009_004409
|
900 |
+
2009_004417
|
901 |
+
2009_004425
|
902 |
+
2009_004426
|
903 |
+
2009_004434
|
904 |
+
2009_004446
|
905 |
+
2009_004464
|
906 |
+
2009_004479
|
907 |
+
2009_004519
|
908 |
+
2009_004539
|
909 |
+
2009_004561
|
910 |
+
2009_004620
|
911 |
+
2009_004626
|
912 |
+
2009_004643
|
913 |
+
2009_004656
|
914 |
+
2009_004661
|
915 |
+
2009_004674
|
916 |
+
2009_004705
|
917 |
+
2009_004790
|
918 |
+
2009_004805
|
919 |
+
2009_004829
|
920 |
+
2009_004887
|
921 |
+
2009_004888
|
922 |
+
2009_004890
|
923 |
+
2009_004901
|
924 |
+
2009_004904
|
925 |
+
2009_004919
|
926 |
+
2009_004939
|
927 |
+
2009_004980
|
928 |
+
2009_004990
|
929 |
+
2009_005000
|
930 |
+
2009_005016
|
931 |
+
2009_005031
|
932 |
+
2009_005037
|
933 |
+
2009_005055
|
934 |
+
2009_005056
|
935 |
+
2009_005069
|
936 |
+
2009_005084
|
937 |
+
2009_005085
|
938 |
+
2009_005107
|
939 |
+
2009_005118
|
940 |
+
2009_005120
|
941 |
+
2009_005128
|
942 |
+
2009_005130
|
943 |
+
2009_005141
|
944 |
+
2009_005145
|
945 |
+
2009_005160
|
946 |
+
2009_005177
|
947 |
+
2009_005194
|
948 |
+
2009_005234
|
949 |
+
2009_005236
|
950 |
+
2009_005247
|
951 |
+
2009_005269
|
952 |
+
2009_005287
|
953 |
+
2010_000002
|
954 |
+
2010_000043
|
955 |
+
2010_000063
|
956 |
+
2010_000075
|
957 |
+
2010_000076
|
958 |
+
2010_000114
|
959 |
+
2010_000117
|
960 |
+
2010_000131
|
961 |
+
2010_000132
|
962 |
+
2010_000148
|
963 |
+
2010_000187
|
964 |
+
2010_000189
|
965 |
+
2010_000195
|
966 |
+
2010_000269
|
967 |
+
2010_000285
|
968 |
+
2010_000371
|
969 |
+
2010_000392
|
970 |
+
2010_000404
|
971 |
+
2010_000436
|
972 |
+
2010_000437
|
973 |
+
2010_000466
|
974 |
+
2010_000469
|
975 |
+
2010_000492
|
976 |
+
2010_000498
|
977 |
+
2010_000503
|
978 |
+
2010_000519
|
979 |
+
2010_000567
|
980 |
+
2010_000588
|
981 |
+
2010_000632
|
982 |
+
2010_000661
|
983 |
+
2010_000675
|
984 |
+
2010_000685
|
985 |
+
2010_000746
|
986 |
+
2010_000748
|
987 |
+
2010_000772
|
988 |
+
2010_000787
|
989 |
+
2010_000810
|
990 |
+
2010_000815
|
991 |
+
2010_000847
|
992 |
+
2010_000855
|
993 |
+
2010_000885
|
994 |
+
2010_000887
|
995 |
+
2010_000978
|
996 |
+
2010_000986
|
997 |
+
2010_001043
|
998 |
+
2010_001120
|
999 |
+
2010_001131
|
1000 |
+
2010_001154
|
1001 |
+
2010_001160
|
1002 |
+
2010_001177
|
1003 |
+
2010_001183
|
1004 |
+
2010_001184
|
1005 |
+
2010_001195
|
1006 |
+
2010_001245
|
1007 |
+
2010_001247
|
1008 |
+
2010_001261
|
1009 |
+
2010_001273
|
1010 |
+
2010_001279
|
1011 |
+
2010_001282
|
1012 |
+
2010_001329
|
1013 |
+
2010_001347
|
1014 |
+
2010_001374
|
1015 |
+
2010_001386
|
1016 |
+
2010_001399
|
1017 |
+
2010_001413
|
1018 |
+
2010_001418
|
1019 |
+
2010_001422
|
1020 |
+
2010_001457
|
1021 |
+
2010_001514
|
1022 |
+
2010_001515
|
1023 |
+
2010_001561
|
1024 |
+
2010_001562
|
1025 |
+
2010_001576
|
1026 |
+
2010_001590
|
1027 |
+
2010_001595
|
1028 |
+
2010_001618
|
1029 |
+
2010_001619
|
1030 |
+
2010_001630
|
1031 |
+
2010_001660
|
1032 |
+
2010_001676
|
1033 |
+
2010_001706
|
1034 |
+
2010_001732
|
1035 |
+
2010_001748
|
1036 |
+
2010_001807
|
1037 |
+
2010_001842
|
1038 |
+
2010_001849
|
1039 |
+
2010_001850
|
1040 |
+
2010_001852
|
1041 |
+
2010_001860
|
1042 |
+
2010_001922
|
1043 |
+
2010_001923
|
1044 |
+
2010_001933
|
1045 |
+
2010_001939
|
1046 |
+
2010_001944
|
1047 |
+
2010_002018
|
1048 |
+
2010_002020
|
1049 |
+
2010_002032
|
1050 |
+
2010_002039
|
1051 |
+
2010_002047
|
1052 |
+
2010_002054
|
1053 |
+
2010_002055
|
1054 |
+
2010_002070
|
1055 |
+
2010_002097
|
1056 |
+
2010_002107
|
1057 |
+
2010_002139
|
1058 |
+
2010_002154
|
1059 |
+
2010_002166
|
1060 |
+
2010_002203
|
1061 |
+
2010_002218
|
1062 |
+
2010_002236
|
1063 |
+
2010_002254
|
1064 |
+
2010_002286
|
1065 |
+
2010_002338
|
1066 |
+
2010_002363
|
1067 |
+
2010_002379
|
1068 |
+
2010_002382
|
1069 |
+
2010_002387
|
1070 |
+
2010_002413
|
1071 |
+
2010_002418
|
1072 |
+
2010_002440
|
1073 |
+
2010_002455
|
1074 |
+
2010_002457
|
1075 |
+
2010_002499
|
1076 |
+
2010_002527
|
1077 |
+
2010_002532
|
1078 |
+
2010_002551
|
1079 |
+
2010_002556
|
1080 |
+
2010_002570
|
1081 |
+
2010_002573
|
1082 |
+
2010_002625
|
1083 |
+
2010_002659
|
1084 |
+
2010_002697
|
1085 |
+
2010_002720
|
1086 |
+
2010_002733
|
1087 |
+
2010_002750
|
1088 |
+
2010_002778
|
1089 |
+
2010_002786
|
1090 |
+
2010_002794
|
1091 |
+
2010_002811
|
1092 |
+
2010_002815
|
1093 |
+
2010_002838
|
1094 |
+
2010_002856
|
1095 |
+
2010_002870
|
1096 |
+
2010_002892
|
1097 |
+
2010_002907
|
1098 |
+
2010_002935
|
1099 |
+
2010_002937
|
1100 |
+
2010_002938
|
1101 |
+
2010_002962
|
1102 |
+
2010_002973
|
1103 |
+
2010_003010
|
1104 |
+
2010_003017
|
1105 |
+
2010_003062
|
1106 |
+
2010_003088
|
1107 |
+
2010_003093
|
1108 |
+
2010_003097
|
1109 |
+
2010_003114
|
1110 |
+
2010_003119
|
1111 |
+
2010_003153
|
1112 |
+
2010_003157
|
1113 |
+
2010_003170
|
1114 |
+
2010_003174
|
1115 |
+
2010_003203
|
1116 |
+
2010_003230
|
1117 |
+
2010_003250
|
1118 |
+
2010_003252
|
1119 |
+
2010_003269
|
1120 |
+
2010_003274
|
1121 |
+
2010_003342
|
1122 |
+
2010_003345
|
1123 |
+
2010_003380
|
1124 |
+
2010_003383
|
1125 |
+
2010_003384
|
1126 |
+
2010_003529
|
1127 |
+
2010_003534
|
1128 |
+
2010_003599
|
1129 |
+
2010_003634
|
1130 |
+
2010_003651
|
1131 |
+
2010_003665
|
1132 |
+
2010_003670
|
1133 |
+
2010_003680
|
1134 |
+
2010_003696
|
1135 |
+
2010_003717
|
1136 |
+
2010_003737
|
1137 |
+
2010_003798
|
1138 |
+
2010_003799
|
1139 |
+
2010_003884
|
1140 |
+
2010_003887
|
1141 |
+
2010_003894
|
1142 |
+
2010_003899
|
1143 |
+
2010_003911
|
1144 |
+
2010_003925
|
1145 |
+
2010_003950
|
1146 |
+
2010_003954
|
1147 |
+
2010_003958
|
1148 |
+
2010_003974
|
1149 |
+
2010_004005
|
1150 |
+
2010_004025
|
1151 |
+
2010_004060
|
1152 |
+
2010_004069
|
1153 |
+
2010_004071
|
1154 |
+
2010_004072
|
1155 |
+
2010_004074
|
1156 |
+
2010_004109
|
1157 |
+
2010_004119
|
1158 |
+
2010_004144
|
1159 |
+
2010_004154
|
1160 |
+
2010_004171
|
1161 |
+
2010_004180
|
1162 |
+
2010_004186
|
1163 |
+
2010_004210
|
1164 |
+
2010_004222
|
1165 |
+
2010_004258
|
1166 |
+
2010_004283
|
1167 |
+
2010_004288
|
1168 |
+
2010_004289
|
1169 |
+
2010_004306
|
1170 |
+
2010_004361
|
1171 |
+
2010_004363
|
1172 |
+
2010_004365
|
1173 |
+
2010_004370
|
1174 |
+
2010_004429
|
1175 |
+
2010_004450
|
1176 |
+
2010_004478
|
1177 |
+
2010_004481
|
1178 |
+
2010_004493
|
1179 |
+
2010_004499
|
1180 |
+
2010_004540
|
1181 |
+
2010_004560
|
1182 |
+
2010_004577
|
1183 |
+
2010_004598
|
1184 |
+
2010_004616
|
1185 |
+
2010_004620
|
1186 |
+
2010_004625
|
1187 |
+
2010_004669
|
1188 |
+
2010_004683
|
1189 |
+
2010_004694
|
1190 |
+
2010_004704
|
1191 |
+
2010_004721
|
1192 |
+
2010_004760
|
1193 |
+
2010_004766
|
1194 |
+
2010_004773
|
1195 |
+
2010_004805
|
1196 |
+
2010_004808
|
1197 |
+
2010_004900
|
1198 |
+
2010_004916
|
1199 |
+
2010_004933
|
1200 |
+
2010_004938
|
1201 |
+
2010_004948
|
1202 |
+
2010_004960
|
1203 |
+
2010_004963
|
1204 |
+
2010_005016
|
1205 |
+
2010_005028
|
1206 |
+
2010_005055
|
1207 |
+
2010_005064
|
1208 |
+
2010_005098
|
1209 |
+
2010_005106
|
1210 |
+
2010_005111
|
1211 |
+
2010_005119
|
1212 |
+
2010_005128
|
1213 |
+
2010_005129
|
1214 |
+
2010_005198
|
1215 |
+
2010_005202
|
1216 |
+
2010_005217
|
1217 |
+
2010_005223
|
1218 |
+
2010_005232
|
1219 |
+
2010_005277
|
1220 |
+
2010_005317
|
1221 |
+
2010_005318
|
1222 |
+
2010_005419
|
1223 |
+
2010_005429
|
1224 |
+
2010_005450
|
1225 |
+
2010_005457
|
1226 |
+
2010_005468
|
1227 |
+
2010_005471
|
1228 |
+
2010_005494
|
1229 |
+
2010_005500
|
1230 |
+
2010_005505
|
1231 |
+
2010_005506
|
1232 |
+
2010_005513
|
1233 |
+
2010_005519
|
1234 |
+
2010_005522
|
1235 |
+
2010_005596
|
1236 |
+
2010_005627
|
1237 |
+
2010_005643
|
1238 |
+
2010_005652
|
1239 |
+
2010_005663
|
1240 |
+
2010_005669
|
1241 |
+
2010_005678
|
1242 |
+
2010_005700
|
1243 |
+
2010_005721
|
1244 |
+
2010_005723
|
1245 |
+
2010_005725
|
1246 |
+
2010_005734
|
1247 |
+
2010_005744
|
1248 |
+
2010_005746
|
1249 |
+
2010_005755
|
1250 |
+
2010_005758
|
1251 |
+
2010_005775
|
1252 |
+
2010_005791
|
1253 |
+
2010_005796
|
1254 |
+
2010_005800
|
1255 |
+
2010_005805
|
1256 |
+
2010_005810
|
1257 |
+
2010_005820
|
1258 |
+
2010_005830
|
1259 |
+
2010_005835
|
1260 |
+
2010_005836
|
1261 |
+
2010_005876
|
1262 |
+
2010_005891
|
1263 |
+
2010_005898
|
1264 |
+
2010_005919
|
1265 |
+
2010_005927
|
1266 |
+
2010_005932
|
1267 |
+
2010_005951
|
1268 |
+
2010_005952
|
1269 |
+
2010_005978
|
1270 |
+
2010_005982
|
1271 |
+
2010_006009
|
1272 |
+
2011_000003
|
1273 |
+
2011_000006
|
1274 |
+
2011_000025
|
1275 |
+
2011_000027
|
1276 |
+
2011_000068
|
1277 |
+
2011_000069
|
1278 |
+
2011_000105
|
1279 |
+
2011_000108
|
1280 |
+
2011_000116
|
1281 |
+
2011_000122
|
1282 |
+
2011_000145
|
1283 |
+
2011_000149
|
1284 |
+
2011_000152
|
1285 |
+
2011_000182
|
1286 |
+
2011_000197
|
1287 |
+
2011_000208
|
1288 |
+
2011_000216
|
1289 |
+
2011_000219
|
1290 |
+
2011_000221
|
1291 |
+
2011_000222
|
1292 |
+
2011_000228
|
1293 |
+
2011_000243
|
1294 |
+
2011_000252
|
1295 |
+
2011_000258
|
1296 |
+
2011_000268
|
1297 |
+
2011_000277
|
1298 |
+
2011_000278
|
1299 |
+
2011_000293
|
1300 |
+
2011_000345
|
1301 |
+
2011_000359
|
1302 |
+
2011_000379
|
1303 |
+
2011_000382
|
1304 |
+
2011_000400
|
1305 |
+
2011_000428
|
1306 |
+
2011_000449
|
1307 |
+
2011_000453
|
1308 |
+
2011_000457
|
1309 |
+
2011_000468
|
1310 |
+
2011_000469
|
1311 |
+
2011_000513
|
1312 |
+
2011_000542
|
1313 |
+
2011_000550
|
1314 |
+
2011_000551
|
1315 |
+
2011_000556
|
1316 |
+
2011_000573
|
1317 |
+
2011_000577
|
1318 |
+
2011_000589
|
1319 |
+
2011_000594
|
1320 |
+
2011_000637
|
1321 |
+
2011_000641
|
1322 |
+
2011_000642
|
1323 |
+
2011_000646
|
1324 |
+
2011_000651
|
1325 |
+
2011_000652
|
1326 |
+
2011_000713
|
1327 |
+
2011_000758
|
1328 |
+
2011_000768
|
1329 |
+
2011_000771
|
1330 |
+
2011_000790
|
1331 |
+
2011_000793
|
1332 |
+
2011_000834
|
1333 |
+
2011_000840
|
1334 |
+
2011_000882
|
1335 |
+
2011_000893
|
1336 |
+
2011_000895
|
1337 |
+
2011_000920
|
1338 |
+
2011_000934
|
1339 |
+
2011_000944
|
1340 |
+
2011_000973
|
1341 |
+
2011_000982
|
1342 |
+
2011_000997
|
1343 |
+
2011_000999
|
1344 |
+
2011_001004
|
1345 |
+
2011_001015
|
1346 |
+
2011_001027
|
1347 |
+
2011_001133
|
1348 |
+
2011_001135
|
1349 |
+
2011_001139
|
1350 |
+
2011_001166
|
1351 |
+
2011_001175
|
1352 |
+
2011_001198
|
1353 |
+
2011_001211
|
1354 |
+
2011_001259
|
1355 |
+
2011_001270
|
1356 |
+
2011_001336
|
1357 |
+
2011_001400
|
1358 |
+
2011_001402
|
1359 |
+
2011_001411
|
1360 |
+
2011_001412
|
1361 |
+
2011_001432
|
1362 |
+
2011_001463
|
1363 |
+
2011_001475
|
1364 |
+
2011_001479
|
1365 |
+
2011_001519
|
1366 |
+
2011_001536
|
1367 |
+
2011_001542
|
1368 |
+
2011_001571
|
1369 |
+
2011_001621
|
1370 |
+
2011_001622
|
1371 |
+
2011_001632
|
1372 |
+
2011_001652
|
1373 |
+
2011_001653
|
1374 |
+
2011_001695
|
1375 |
+
2011_001710
|
1376 |
+
2011_001730
|
1377 |
+
2011_001753
|
1378 |
+
2011_001754
|
1379 |
+
2011_001764
|
1380 |
+
2011_001765
|
1381 |
+
2011_001790
|
1382 |
+
2011_001810
|
1383 |
+
2011_001855
|
1384 |
+
2011_001866
|
1385 |
+
2011_001875
|
1386 |
+
2011_001895
|
1387 |
+
2011_001902
|
1388 |
+
2011_001904
|
1389 |
+
2011_001922
|
1390 |
+
2011_001924
|
1391 |
+
2011_001928
|
1392 |
+
2011_001959
|
1393 |
+
2011_001967
|
1394 |
+
2011_001972
|
1395 |
+
2011_001974
|
1396 |
+
2011_001991
|
1397 |
+
2011_002027
|
1398 |
+
2011_002050
|
1399 |
+
2011_002107
|
1400 |
+
2011_002111
|
1401 |
+
2011_002114
|
1402 |
+
2011_002119
|
1403 |
+
2011_002134
|
1404 |
+
2011_002135
|
1405 |
+
2011_002149
|
1406 |
+
2011_002222
|
1407 |
+
2011_002224
|
1408 |
+
2011_002227
|
1409 |
+
2011_002246
|
1410 |
+
2011_002291
|
1411 |
+
2011_002300
|
1412 |
+
2011_002303
|
1413 |
+
2011_002335
|
1414 |
+
2011_002341
|
1415 |
+
2011_002350
|
1416 |
+
2011_002381
|
1417 |
+
2011_002385
|
1418 |
+
2011_002389
|
1419 |
+
2011_002398
|
1420 |
+
2011_002410
|
1421 |
+
2011_002447
|
1422 |
+
2011_002457
|
1423 |
+
2011_002464
|
1424 |
+
2011_002488
|
1425 |
+
2011_002503
|
1426 |
+
2011_002504
|
1427 |
+
2011_002511
|
1428 |
+
2011_002528
|
1429 |
+
2011_002553
|
1430 |
+
2011_002559
|
1431 |
+
2011_002561
|
1432 |
+
2011_002585
|
1433 |
+
2011_002590
|
1434 |
+
2011_002652
|
1435 |
+
2011_002656
|
1436 |
+
2011_002709
|
1437 |
+
2011_002715
|
1438 |
+
2011_002717
|
1439 |
+
2011_002752
|
1440 |
+
2011_002767
|
1441 |
+
2011_002770
|
1442 |
+
2011_002834
|
1443 |
+
2011_002851
|
1444 |
+
2011_002872
|
1445 |
+
2011_002873
|
1446 |
+
2011_002920
|
1447 |
+
2011_002932
|
1448 |
+
2011_002935
|
1449 |
+
2011_002947
|
1450 |
+
2011_002953
|
1451 |
+
2011_002956
|
1452 |
+
2011_003025
|
1453 |
+
2011_003038
|
1454 |
+
2011_003057
|
1455 |
+
2011_003066
|
1456 |
+
2011_003078
|
1457 |
+
2011_003121
|
1458 |
+
2011_003141
|
1459 |
+
2011_003151
|
1460 |
+
2011_003184
|
1461 |
+
2011_003216
|
1462 |
+
2011_003238
|
1463 |
+
2011_003246
|
1464 |
+
2011_003255
|
data/train_aug.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/val.txt
ADDED
@@ -0,0 +1,1449 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2007_000033
|
2 |
+
2007_000042
|
3 |
+
2007_000061
|
4 |
+
2007_000123
|
5 |
+
2007_000129
|
6 |
+
2007_000175
|
7 |
+
2007_000187
|
8 |
+
2007_000323
|
9 |
+
2007_000332
|
10 |
+
2007_000346
|
11 |
+
2007_000452
|
12 |
+
2007_000464
|
13 |
+
2007_000491
|
14 |
+
2007_000529
|
15 |
+
2007_000559
|
16 |
+
2007_000572
|
17 |
+
2007_000629
|
18 |
+
2007_000636
|
19 |
+
2007_000661
|
20 |
+
2007_000663
|
21 |
+
2007_000676
|
22 |
+
2007_000727
|
23 |
+
2007_000762
|
24 |
+
2007_000783
|
25 |
+
2007_000799
|
26 |
+
2007_000804
|
27 |
+
2007_000830
|
28 |
+
2007_000837
|
29 |
+
2007_000847
|
30 |
+
2007_000862
|
31 |
+
2007_000925
|
32 |
+
2007_000999
|
33 |
+
2007_001154
|
34 |
+
2007_001175
|
35 |
+
2007_001239
|
36 |
+
2007_001284
|
37 |
+
2007_001288
|
38 |
+
2007_001289
|
39 |
+
2007_001299
|
40 |
+
2007_001311
|
41 |
+
2007_001321
|
42 |
+
2007_001377
|
43 |
+
2007_001408
|
44 |
+
2007_001423
|
45 |
+
2007_001430
|
46 |
+
2007_001457
|
47 |
+
2007_001458
|
48 |
+
2007_001526
|
49 |
+
2007_001568
|
50 |
+
2007_001585
|
51 |
+
2007_001586
|
52 |
+
2007_001587
|
53 |
+
2007_001594
|
54 |
+
2007_001630
|
55 |
+
2007_001677
|
56 |
+
2007_001678
|
57 |
+
2007_001717
|
58 |
+
2007_001733
|
59 |
+
2007_001761
|
60 |
+
2007_001763
|
61 |
+
2007_001774
|
62 |
+
2007_001884
|
63 |
+
2007_001955
|
64 |
+
2007_002046
|
65 |
+
2007_002094
|
66 |
+
2007_002119
|
67 |
+
2007_002132
|
68 |
+
2007_002260
|
69 |
+
2007_002266
|
70 |
+
2007_002268
|
71 |
+
2007_002284
|
72 |
+
2007_002376
|
73 |
+
2007_002378
|
74 |
+
2007_002387
|
75 |
+
2007_002400
|
76 |
+
2007_002412
|
77 |
+
2007_002426
|
78 |
+
2007_002427
|
79 |
+
2007_002445
|
80 |
+
2007_002470
|
81 |
+
2007_002539
|
82 |
+
2007_002565
|
83 |
+
2007_002597
|
84 |
+
2007_002618
|
85 |
+
2007_002619
|
86 |
+
2007_002624
|
87 |
+
2007_002643
|
88 |
+
2007_002648
|
89 |
+
2007_002719
|
90 |
+
2007_002728
|
91 |
+
2007_002823
|
92 |
+
2007_002824
|
93 |
+
2007_002852
|
94 |
+
2007_002903
|
95 |
+
2007_003011
|
96 |
+
2007_003020
|
97 |
+
2007_003022
|
98 |
+
2007_003051
|
99 |
+
2007_003088
|
100 |
+
2007_003101
|
101 |
+
2007_003106
|
102 |
+
2007_003110
|
103 |
+
2007_003131
|
104 |
+
2007_003134
|
105 |
+
2007_003137
|
106 |
+
2007_003143
|
107 |
+
2007_003169
|
108 |
+
2007_003188
|
109 |
+
2007_003194
|
110 |
+
2007_003195
|
111 |
+
2007_003201
|
112 |
+
2007_003349
|
113 |
+
2007_003367
|
114 |
+
2007_003373
|
115 |
+
2007_003499
|
116 |
+
2007_003503
|
117 |
+
2007_003506
|
118 |
+
2007_003530
|
119 |
+
2007_003571
|
120 |
+
2007_003587
|
121 |
+
2007_003611
|
122 |
+
2007_003621
|
123 |
+
2007_003682
|
124 |
+
2007_003711
|
125 |
+
2007_003714
|
126 |
+
2007_003742
|
127 |
+
2007_003786
|
128 |
+
2007_003841
|
129 |
+
2007_003848
|
130 |
+
2007_003861
|
131 |
+
2007_003872
|
132 |
+
2007_003917
|
133 |
+
2007_003957
|
134 |
+
2007_003991
|
135 |
+
2007_004033
|
136 |
+
2007_004052
|
137 |
+
2007_004112
|
138 |
+
2007_004121
|
139 |
+
2007_004143
|
140 |
+
2007_004189
|
141 |
+
2007_004190
|
142 |
+
2007_004193
|
143 |
+
2007_004241
|
144 |
+
2007_004275
|
145 |
+
2007_004281
|
146 |
+
2007_004380
|
147 |
+
2007_004392
|
148 |
+
2007_004405
|
149 |
+
2007_004468
|
150 |
+
2007_004483
|
151 |
+
2007_004510
|
152 |
+
2007_004538
|
153 |
+
2007_004558
|
154 |
+
2007_004644
|
155 |
+
2007_004649
|
156 |
+
2007_004712
|
157 |
+
2007_004722
|
158 |
+
2007_004856
|
159 |
+
2007_004866
|
160 |
+
2007_004902
|
161 |
+
2007_004969
|
162 |
+
2007_005058
|
163 |
+
2007_005074
|
164 |
+
2007_005107
|
165 |
+
2007_005114
|
166 |
+
2007_005149
|
167 |
+
2007_005173
|
168 |
+
2007_005281
|
169 |
+
2007_005294
|
170 |
+
2007_005296
|
171 |
+
2007_005304
|
172 |
+
2007_005331
|
173 |
+
2007_005354
|
174 |
+
2007_005358
|
175 |
+
2007_005428
|
176 |
+
2007_005460
|
177 |
+
2007_005469
|
178 |
+
2007_005509
|
179 |
+
2007_005547
|
180 |
+
2007_005600
|
181 |
+
2007_005608
|
182 |
+
2007_005626
|
183 |
+
2007_005689
|
184 |
+
2007_005696
|
185 |
+
2007_005705
|
186 |
+
2007_005759
|
187 |
+
2007_005803
|
188 |
+
2007_005813
|
189 |
+
2007_005828
|
190 |
+
2007_005844
|
191 |
+
2007_005845
|
192 |
+
2007_005857
|
193 |
+
2007_005911
|
194 |
+
2007_005915
|
195 |
+
2007_005978
|
196 |
+
2007_006028
|
197 |
+
2007_006035
|
198 |
+
2007_006046
|
199 |
+
2007_006076
|
200 |
+
2007_006086
|
201 |
+
2007_006117
|
202 |
+
2007_006171
|
203 |
+
2007_006241
|
204 |
+
2007_006260
|
205 |
+
2007_006277
|
206 |
+
2007_006348
|
207 |
+
2007_006364
|
208 |
+
2007_006373
|
209 |
+
2007_006444
|
210 |
+
2007_006449
|
211 |
+
2007_006549
|
212 |
+
2007_006553
|
213 |
+
2007_006560
|
214 |
+
2007_006647
|
215 |
+
2007_006678
|
216 |
+
2007_006680
|
217 |
+
2007_006698
|
218 |
+
2007_006761
|
219 |
+
2007_006802
|
220 |
+
2007_006837
|
221 |
+
2007_006841
|
222 |
+
2007_006864
|
223 |
+
2007_006866
|
224 |
+
2007_006946
|
225 |
+
2007_007007
|
226 |
+
2007_007084
|
227 |
+
2007_007109
|
228 |
+
2007_007130
|
229 |
+
2007_007165
|
230 |
+
2007_007168
|
231 |
+
2007_007195
|
232 |
+
2007_007196
|
233 |
+
2007_007203
|
234 |
+
2007_007211
|
235 |
+
2007_007235
|
236 |
+
2007_007341
|
237 |
+
2007_007414
|
238 |
+
2007_007417
|
239 |
+
2007_007470
|
240 |
+
2007_007477
|
241 |
+
2007_007493
|
242 |
+
2007_007498
|
243 |
+
2007_007524
|
244 |
+
2007_007534
|
245 |
+
2007_007624
|
246 |
+
2007_007651
|
247 |
+
2007_007688
|
248 |
+
2007_007748
|
249 |
+
2007_007795
|
250 |
+
2007_007810
|
251 |
+
2007_007815
|
252 |
+
2007_007818
|
253 |
+
2007_007836
|
254 |
+
2007_007849
|
255 |
+
2007_007881
|
256 |
+
2007_007996
|
257 |
+
2007_008051
|
258 |
+
2007_008084
|
259 |
+
2007_008106
|
260 |
+
2007_008110
|
261 |
+
2007_008204
|
262 |
+
2007_008222
|
263 |
+
2007_008256
|
264 |
+
2007_008260
|
265 |
+
2007_008339
|
266 |
+
2007_008374
|
267 |
+
2007_008415
|
268 |
+
2007_008430
|
269 |
+
2007_008543
|
270 |
+
2007_008547
|
271 |
+
2007_008596
|
272 |
+
2007_008645
|
273 |
+
2007_008670
|
274 |
+
2007_008708
|
275 |
+
2007_008722
|
276 |
+
2007_008747
|
277 |
+
2007_008802
|
278 |
+
2007_008815
|
279 |
+
2007_008897
|
280 |
+
2007_008944
|
281 |
+
2007_008964
|
282 |
+
2007_008973
|
283 |
+
2007_008980
|
284 |
+
2007_009015
|
285 |
+
2007_009068
|
286 |
+
2007_009084
|
287 |
+
2007_009088
|
288 |
+
2007_009096
|
289 |
+
2007_009221
|
290 |
+
2007_009245
|
291 |
+
2007_009251
|
292 |
+
2007_009252
|
293 |
+
2007_009258
|
294 |
+
2007_009320
|
295 |
+
2007_009323
|
296 |
+
2007_009331
|
297 |
+
2007_009346
|
298 |
+
2007_009392
|
299 |
+
2007_009413
|
300 |
+
2007_009419
|
301 |
+
2007_009446
|
302 |
+
2007_009458
|
303 |
+
2007_009521
|
304 |
+
2007_009562
|
305 |
+
2007_009592
|
306 |
+
2007_009654
|
307 |
+
2007_009655
|
308 |
+
2007_009684
|
309 |
+
2007_009687
|
310 |
+
2007_009691
|
311 |
+
2007_009706
|
312 |
+
2007_009750
|
313 |
+
2007_009756
|
314 |
+
2007_009764
|
315 |
+
2007_009794
|
316 |
+
2007_009817
|
317 |
+
2007_009841
|
318 |
+
2007_009897
|
319 |
+
2007_009911
|
320 |
+
2007_009923
|
321 |
+
2007_009938
|
322 |
+
2008_000009
|
323 |
+
2008_000016
|
324 |
+
2008_000073
|
325 |
+
2008_000075
|
326 |
+
2008_000080
|
327 |
+
2008_000107
|
328 |
+
2008_000120
|
329 |
+
2008_000123
|
330 |
+
2008_000149
|
331 |
+
2008_000182
|
332 |
+
2008_000213
|
333 |
+
2008_000215
|
334 |
+
2008_000223
|
335 |
+
2008_000233
|
336 |
+
2008_000234
|
337 |
+
2008_000239
|
338 |
+
2008_000254
|
339 |
+
2008_000270
|
340 |
+
2008_000271
|
341 |
+
2008_000345
|
342 |
+
2008_000359
|
343 |
+
2008_000391
|
344 |
+
2008_000401
|
345 |
+
2008_000464
|
346 |
+
2008_000469
|
347 |
+
2008_000474
|
348 |
+
2008_000501
|
349 |
+
2008_000510
|
350 |
+
2008_000533
|
351 |
+
2008_000573
|
352 |
+
2008_000589
|
353 |
+
2008_000602
|
354 |
+
2008_000630
|
355 |
+
2008_000657
|
356 |
+
2008_000661
|
357 |
+
2008_000662
|
358 |
+
2008_000666
|
359 |
+
2008_000673
|
360 |
+
2008_000700
|
361 |
+
2008_000725
|
362 |
+
2008_000731
|
363 |
+
2008_000763
|
364 |
+
2008_000765
|
365 |
+
2008_000782
|
366 |
+
2008_000795
|
367 |
+
2008_000811
|
368 |
+
2008_000848
|
369 |
+
2008_000853
|
370 |
+
2008_000863
|
371 |
+
2008_000911
|
372 |
+
2008_000919
|
373 |
+
2008_000943
|
374 |
+
2008_000992
|
375 |
+
2008_001013
|
376 |
+
2008_001028
|
377 |
+
2008_001040
|
378 |
+
2008_001070
|
379 |
+
2008_001074
|
380 |
+
2008_001076
|
381 |
+
2008_001078
|
382 |
+
2008_001135
|
383 |
+
2008_001150
|
384 |
+
2008_001170
|
385 |
+
2008_001231
|
386 |
+
2008_001249
|
387 |
+
2008_001260
|
388 |
+
2008_001283
|
389 |
+
2008_001308
|
390 |
+
2008_001379
|
391 |
+
2008_001404
|
392 |
+
2008_001433
|
393 |
+
2008_001439
|
394 |
+
2008_001478
|
395 |
+
2008_001491
|
396 |
+
2008_001504
|
397 |
+
2008_001513
|
398 |
+
2008_001514
|
399 |
+
2008_001531
|
400 |
+
2008_001546
|
401 |
+
2008_001547
|
402 |
+
2008_001580
|
403 |
+
2008_001629
|
404 |
+
2008_001640
|
405 |
+
2008_001682
|
406 |
+
2008_001688
|
407 |
+
2008_001715
|
408 |
+
2008_001821
|
409 |
+
2008_001874
|
410 |
+
2008_001885
|
411 |
+
2008_001895
|
412 |
+
2008_001966
|
413 |
+
2008_001971
|
414 |
+
2008_001992
|
415 |
+
2008_002043
|
416 |
+
2008_002152
|
417 |
+
2008_002205
|
418 |
+
2008_002212
|
419 |
+
2008_002239
|
420 |
+
2008_002240
|
421 |
+
2008_002241
|
422 |
+
2008_002269
|
423 |
+
2008_002273
|
424 |
+
2008_002358
|
425 |
+
2008_002379
|
426 |
+
2008_002383
|
427 |
+
2008_002429
|
428 |
+
2008_002464
|
429 |
+
2008_002467
|
430 |
+
2008_002492
|
431 |
+
2008_002495
|
432 |
+
2008_002504
|
433 |
+
2008_002521
|
434 |
+
2008_002536
|
435 |
+
2008_002588
|
436 |
+
2008_002623
|
437 |
+
2008_002680
|
438 |
+
2008_002681
|
439 |
+
2008_002775
|
440 |
+
2008_002778
|
441 |
+
2008_002835
|
442 |
+
2008_002859
|
443 |
+
2008_002864
|
444 |
+
2008_002900
|
445 |
+
2008_002904
|
446 |
+
2008_002929
|
447 |
+
2008_002936
|
448 |
+
2008_002942
|
449 |
+
2008_002958
|
450 |
+
2008_003003
|
451 |
+
2008_003026
|
452 |
+
2008_003034
|
453 |
+
2008_003076
|
454 |
+
2008_003105
|
455 |
+
2008_003108
|
456 |
+
2008_003110
|
457 |
+
2008_003135
|
458 |
+
2008_003141
|
459 |
+
2008_003155
|
460 |
+
2008_003210
|
461 |
+
2008_003238
|
462 |
+
2008_003270
|
463 |
+
2008_003330
|
464 |
+
2008_003333
|
465 |
+
2008_003369
|
466 |
+
2008_003379
|
467 |
+
2008_003451
|
468 |
+
2008_003461
|
469 |
+
2008_003477
|
470 |
+
2008_003492
|
471 |
+
2008_003499
|
472 |
+
2008_003511
|
473 |
+
2008_003546
|
474 |
+
2008_003576
|
475 |
+
2008_003577
|
476 |
+
2008_003676
|
477 |
+
2008_003709
|
478 |
+
2008_003733
|
479 |
+
2008_003777
|
480 |
+
2008_003782
|
481 |
+
2008_003821
|
482 |
+
2008_003846
|
483 |
+
2008_003856
|
484 |
+
2008_003858
|
485 |
+
2008_003874
|
486 |
+
2008_003876
|
487 |
+
2008_003885
|
488 |
+
2008_003886
|
489 |
+
2008_003926
|
490 |
+
2008_003976
|
491 |
+
2008_004069
|
492 |
+
2008_004101
|
493 |
+
2008_004140
|
494 |
+
2008_004172
|
495 |
+
2008_004175
|
496 |
+
2008_004212
|
497 |
+
2008_004279
|
498 |
+
2008_004339
|
499 |
+
2008_004345
|
500 |
+
2008_004363
|
501 |
+
2008_004367
|
502 |
+
2008_004396
|
503 |
+
2008_004399
|
504 |
+
2008_004453
|
505 |
+
2008_004477
|
506 |
+
2008_004552
|
507 |
+
2008_004562
|
508 |
+
2008_004575
|
509 |
+
2008_004610
|
510 |
+
2008_004612
|
511 |
+
2008_004621
|
512 |
+
2008_004624
|
513 |
+
2008_004654
|
514 |
+
2008_004659
|
515 |
+
2008_004687
|
516 |
+
2008_004701
|
517 |
+
2008_004704
|
518 |
+
2008_004705
|
519 |
+
2008_004754
|
520 |
+
2008_004758
|
521 |
+
2008_004854
|
522 |
+
2008_004910
|
523 |
+
2008_004995
|
524 |
+
2008_005049
|
525 |
+
2008_005089
|
526 |
+
2008_005097
|
527 |
+
2008_005105
|
528 |
+
2008_005145
|
529 |
+
2008_005197
|
530 |
+
2008_005217
|
531 |
+
2008_005242
|
532 |
+
2008_005245
|
533 |
+
2008_005254
|
534 |
+
2008_005262
|
535 |
+
2008_005338
|
536 |
+
2008_005398
|
537 |
+
2008_005399
|
538 |
+
2008_005422
|
539 |
+
2008_005439
|
540 |
+
2008_005445
|
541 |
+
2008_005525
|
542 |
+
2008_005544
|
543 |
+
2008_005628
|
544 |
+
2008_005633
|
545 |
+
2008_005637
|
546 |
+
2008_005642
|
547 |
+
2008_005676
|
548 |
+
2008_005680
|
549 |
+
2008_005691
|
550 |
+
2008_005727
|
551 |
+
2008_005738
|
552 |
+
2008_005812
|
553 |
+
2008_005904
|
554 |
+
2008_005915
|
555 |
+
2008_006008
|
556 |
+
2008_006036
|
557 |
+
2008_006055
|
558 |
+
2008_006063
|
559 |
+
2008_006108
|
560 |
+
2008_006130
|
561 |
+
2008_006143
|
562 |
+
2008_006159
|
563 |
+
2008_006216
|
564 |
+
2008_006219
|
565 |
+
2008_006229
|
566 |
+
2008_006254
|
567 |
+
2008_006275
|
568 |
+
2008_006325
|
569 |
+
2008_006327
|
570 |
+
2008_006341
|
571 |
+
2008_006408
|
572 |
+
2008_006480
|
573 |
+
2008_006523
|
574 |
+
2008_006526
|
575 |
+
2008_006528
|
576 |
+
2008_006553
|
577 |
+
2008_006554
|
578 |
+
2008_006703
|
579 |
+
2008_006722
|
580 |
+
2008_006752
|
581 |
+
2008_006784
|
582 |
+
2008_006835
|
583 |
+
2008_006874
|
584 |
+
2008_006981
|
585 |
+
2008_006986
|
586 |
+
2008_007025
|
587 |
+
2008_007031
|
588 |
+
2008_007048
|
589 |
+
2008_007120
|
590 |
+
2008_007123
|
591 |
+
2008_007143
|
592 |
+
2008_007194
|
593 |
+
2008_007219
|
594 |
+
2008_007273
|
595 |
+
2008_007350
|
596 |
+
2008_007378
|
597 |
+
2008_007392
|
598 |
+
2008_007402
|
599 |
+
2008_007497
|
600 |
+
2008_007498
|
601 |
+
2008_007507
|
602 |
+
2008_007513
|
603 |
+
2008_007527
|
604 |
+
2008_007548
|
605 |
+
2008_007596
|
606 |
+
2008_007677
|
607 |
+
2008_007737
|
608 |
+
2008_007797
|
609 |
+
2008_007804
|
610 |
+
2008_007811
|
611 |
+
2008_007814
|
612 |
+
2008_007828
|
613 |
+
2008_007836
|
614 |
+
2008_007945
|
615 |
+
2008_007994
|
616 |
+
2008_008051
|
617 |
+
2008_008103
|
618 |
+
2008_008127
|
619 |
+
2008_008221
|
620 |
+
2008_008252
|
621 |
+
2008_008268
|
622 |
+
2008_008296
|
623 |
+
2008_008301
|
624 |
+
2008_008335
|
625 |
+
2008_008362
|
626 |
+
2008_008392
|
627 |
+
2008_008393
|
628 |
+
2008_008421
|
629 |
+
2008_008434
|
630 |
+
2008_008469
|
631 |
+
2008_008629
|
632 |
+
2008_008682
|
633 |
+
2008_008711
|
634 |
+
2008_008746
|
635 |
+
2009_000012
|
636 |
+
2009_000013
|
637 |
+
2009_000022
|
638 |
+
2009_000032
|
639 |
+
2009_000037
|
640 |
+
2009_000039
|
641 |
+
2009_000074
|
642 |
+
2009_000080
|
643 |
+
2009_000087
|
644 |
+
2009_000096
|
645 |
+
2009_000121
|
646 |
+
2009_000136
|
647 |
+
2009_000149
|
648 |
+
2009_000156
|
649 |
+
2009_000201
|
650 |
+
2009_000205
|
651 |
+
2009_000219
|
652 |
+
2009_000242
|
653 |
+
2009_000309
|
654 |
+
2009_000318
|
655 |
+
2009_000335
|
656 |
+
2009_000351
|
657 |
+
2009_000354
|
658 |
+
2009_000387
|
659 |
+
2009_000391
|
660 |
+
2009_000412
|
661 |
+
2009_000418
|
662 |
+
2009_000421
|
663 |
+
2009_000426
|
664 |
+
2009_000440
|
665 |
+
2009_000446
|
666 |
+
2009_000455
|
667 |
+
2009_000457
|
668 |
+
2009_000469
|
669 |
+
2009_000487
|
670 |
+
2009_000488
|
671 |
+
2009_000523
|
672 |
+
2009_000573
|
673 |
+
2009_000619
|
674 |
+
2009_000628
|
675 |
+
2009_000641
|
676 |
+
2009_000664
|
677 |
+
2009_000675
|
678 |
+
2009_000704
|
679 |
+
2009_000705
|
680 |
+
2009_000712
|
681 |
+
2009_000716
|
682 |
+
2009_000723
|
683 |
+
2009_000727
|
684 |
+
2009_000730
|
685 |
+
2009_000731
|
686 |
+
2009_000732
|
687 |
+
2009_000771
|
688 |
+
2009_000825
|
689 |
+
2009_000828
|
690 |
+
2009_000839
|
691 |
+
2009_000840
|
692 |
+
2009_000845
|
693 |
+
2009_000879
|
694 |
+
2009_000892
|
695 |
+
2009_000919
|
696 |
+
2009_000924
|
697 |
+
2009_000931
|
698 |
+
2009_000935
|
699 |
+
2009_000964
|
700 |
+
2009_000989
|
701 |
+
2009_000991
|
702 |
+
2009_000998
|
703 |
+
2009_001008
|
704 |
+
2009_001082
|
705 |
+
2009_001108
|
706 |
+
2009_001160
|
707 |
+
2009_001215
|
708 |
+
2009_001240
|
709 |
+
2009_001255
|
710 |
+
2009_001278
|
711 |
+
2009_001299
|
712 |
+
2009_001300
|
713 |
+
2009_001314
|
714 |
+
2009_001332
|
715 |
+
2009_001333
|
716 |
+
2009_001363
|
717 |
+
2009_001391
|
718 |
+
2009_001411
|
719 |
+
2009_001433
|
720 |
+
2009_001505
|
721 |
+
2009_001535
|
722 |
+
2009_001536
|
723 |
+
2009_001565
|
724 |
+
2009_001607
|
725 |
+
2009_001644
|
726 |
+
2009_001663
|
727 |
+
2009_001683
|
728 |
+
2009_001684
|
729 |
+
2009_001687
|
730 |
+
2009_001718
|
731 |
+
2009_001731
|
732 |
+
2009_001765
|
733 |
+
2009_001768
|
734 |
+
2009_001775
|
735 |
+
2009_001804
|
736 |
+
2009_001816
|
737 |
+
2009_001818
|
738 |
+
2009_001850
|
739 |
+
2009_001851
|
740 |
+
2009_001854
|
741 |
+
2009_001941
|
742 |
+
2009_001991
|
743 |
+
2009_002012
|
744 |
+
2009_002035
|
745 |
+
2009_002042
|
746 |
+
2009_002082
|
747 |
+
2009_002094
|
748 |
+
2009_002097
|
749 |
+
2009_002122
|
750 |
+
2009_002150
|
751 |
+
2009_002155
|
752 |
+
2009_002164
|
753 |
+
2009_002165
|
754 |
+
2009_002171
|
755 |
+
2009_002185
|
756 |
+
2009_002202
|
757 |
+
2009_002221
|
758 |
+
2009_002238
|
759 |
+
2009_002239
|
760 |
+
2009_002265
|
761 |
+
2009_002268
|
762 |
+
2009_002291
|
763 |
+
2009_002295
|
764 |
+
2009_002317
|
765 |
+
2009_002320
|
766 |
+
2009_002346
|
767 |
+
2009_002366
|
768 |
+
2009_002372
|
769 |
+
2009_002382
|
770 |
+
2009_002390
|
771 |
+
2009_002415
|
772 |
+
2009_002445
|
773 |
+
2009_002487
|
774 |
+
2009_002521
|
775 |
+
2009_002527
|
776 |
+
2009_002535
|
777 |
+
2009_002539
|
778 |
+
2009_002549
|
779 |
+
2009_002562
|
780 |
+
2009_002568
|
781 |
+
2009_002571
|
782 |
+
2009_002573
|
783 |
+
2009_002584
|
784 |
+
2009_002591
|
785 |
+
2009_002594
|
786 |
+
2009_002604
|
787 |
+
2009_002618
|
788 |
+
2009_002635
|
789 |
+
2009_002638
|
790 |
+
2009_002649
|
791 |
+
2009_002651
|
792 |
+
2009_002727
|
793 |
+
2009_002732
|
794 |
+
2009_002749
|
795 |
+
2009_002753
|
796 |
+
2009_002771
|
797 |
+
2009_002808
|
798 |
+
2009_002856
|
799 |
+
2009_002887
|
800 |
+
2009_002888
|
801 |
+
2009_002928
|
802 |
+
2009_002936
|
803 |
+
2009_002975
|
804 |
+
2009_002982
|
805 |
+
2009_002990
|
806 |
+
2009_003003
|
807 |
+
2009_003005
|
808 |
+
2009_003043
|
809 |
+
2009_003059
|
810 |
+
2009_003063
|
811 |
+
2009_003065
|
812 |
+
2009_003071
|
813 |
+
2009_003080
|
814 |
+
2009_003105
|
815 |
+
2009_003123
|
816 |
+
2009_003193
|
817 |
+
2009_003196
|
818 |
+
2009_003217
|
819 |
+
2009_003224
|
820 |
+
2009_003241
|
821 |
+
2009_003269
|
822 |
+
2009_003273
|
823 |
+
2009_003299
|
824 |
+
2009_003304
|
825 |
+
2009_003311
|
826 |
+
2009_003323
|
827 |
+
2009_003343
|
828 |
+
2009_003378
|
829 |
+
2009_003387
|
830 |
+
2009_003406
|
831 |
+
2009_003433
|
832 |
+
2009_003450
|
833 |
+
2009_003466
|
834 |
+
2009_003481
|
835 |
+
2009_003494
|
836 |
+
2009_003498
|
837 |
+
2009_003504
|
838 |
+
2009_003507
|
839 |
+
2009_003517
|
840 |
+
2009_003523
|
841 |
+
2009_003542
|
842 |
+
2009_003549
|
843 |
+
2009_003551
|
844 |
+
2009_003564
|
845 |
+
2009_003569
|
846 |
+
2009_003576
|
847 |
+
2009_003589
|
848 |
+
2009_003607
|
849 |
+
2009_003640
|
850 |
+
2009_003666
|
851 |
+
2009_003696
|
852 |
+
2009_003703
|
853 |
+
2009_003707
|
854 |
+
2009_003756
|
855 |
+
2009_003771
|
856 |
+
2009_003773
|
857 |
+
2009_003804
|
858 |
+
2009_003806
|
859 |
+
2009_003810
|
860 |
+
2009_003849
|
861 |
+
2009_003857
|
862 |
+
2009_003858
|
863 |
+
2009_003895
|
864 |
+
2009_003903
|
865 |
+
2009_003904
|
866 |
+
2009_003928
|
867 |
+
2009_003938
|
868 |
+
2009_003971
|
869 |
+
2009_003991
|
870 |
+
2009_004021
|
871 |
+
2009_004033
|
872 |
+
2009_004043
|
873 |
+
2009_004070
|
874 |
+
2009_004072
|
875 |
+
2009_004084
|
876 |
+
2009_004099
|
877 |
+
2009_004125
|
878 |
+
2009_004140
|
879 |
+
2009_004217
|
880 |
+
2009_004221
|
881 |
+
2009_004247
|
882 |
+
2009_004248
|
883 |
+
2009_004255
|
884 |
+
2009_004298
|
885 |
+
2009_004324
|
886 |
+
2009_004455
|
887 |
+
2009_004494
|
888 |
+
2009_004497
|
889 |
+
2009_004504
|
890 |
+
2009_004507
|
891 |
+
2009_004509
|
892 |
+
2009_004540
|
893 |
+
2009_004568
|
894 |
+
2009_004579
|
895 |
+
2009_004581
|
896 |
+
2009_004590
|
897 |
+
2009_004592
|
898 |
+
2009_004594
|
899 |
+
2009_004635
|
900 |
+
2009_004653
|
901 |
+
2009_004687
|
902 |
+
2009_004721
|
903 |
+
2009_004730
|
904 |
+
2009_004732
|
905 |
+
2009_004738
|
906 |
+
2009_004748
|
907 |
+
2009_004789
|
908 |
+
2009_004799
|
909 |
+
2009_004801
|
910 |
+
2009_004848
|
911 |
+
2009_004859
|
912 |
+
2009_004867
|
913 |
+
2009_004882
|
914 |
+
2009_004886
|
915 |
+
2009_004895
|
916 |
+
2009_004942
|
917 |
+
2009_004969
|
918 |
+
2009_004987
|
919 |
+
2009_004993
|
920 |
+
2009_004994
|
921 |
+
2009_005038
|
922 |
+
2009_005078
|
923 |
+
2009_005087
|
924 |
+
2009_005089
|
925 |
+
2009_005137
|
926 |
+
2009_005148
|
927 |
+
2009_005156
|
928 |
+
2009_005158
|
929 |
+
2009_005189
|
930 |
+
2009_005190
|
931 |
+
2009_005217
|
932 |
+
2009_005219
|
933 |
+
2009_005220
|
934 |
+
2009_005231
|
935 |
+
2009_005260
|
936 |
+
2009_005262
|
937 |
+
2009_005302
|
938 |
+
2010_000003
|
939 |
+
2010_000038
|
940 |
+
2010_000065
|
941 |
+
2010_000083
|
942 |
+
2010_000084
|
943 |
+
2010_000087
|
944 |
+
2010_000110
|
945 |
+
2010_000159
|
946 |
+
2010_000160
|
947 |
+
2010_000163
|
948 |
+
2010_000174
|
949 |
+
2010_000216
|
950 |
+
2010_000238
|
951 |
+
2010_000241
|
952 |
+
2010_000256
|
953 |
+
2010_000272
|
954 |
+
2010_000284
|
955 |
+
2010_000309
|
956 |
+
2010_000318
|
957 |
+
2010_000330
|
958 |
+
2010_000335
|
959 |
+
2010_000342
|
960 |
+
2010_000372
|
961 |
+
2010_000422
|
962 |
+
2010_000426
|
963 |
+
2010_000427
|
964 |
+
2010_000502
|
965 |
+
2010_000530
|
966 |
+
2010_000552
|
967 |
+
2010_000559
|
968 |
+
2010_000572
|
969 |
+
2010_000573
|
970 |
+
2010_000622
|
971 |
+
2010_000628
|
972 |
+
2010_000639
|
973 |
+
2010_000666
|
974 |
+
2010_000679
|
975 |
+
2010_000682
|
976 |
+
2010_000683
|
977 |
+
2010_000724
|
978 |
+
2010_000738
|
979 |
+
2010_000764
|
980 |
+
2010_000788
|
981 |
+
2010_000814
|
982 |
+
2010_000836
|
983 |
+
2010_000874
|
984 |
+
2010_000904
|
985 |
+
2010_000906
|
986 |
+
2010_000907
|
987 |
+
2010_000918
|
988 |
+
2010_000929
|
989 |
+
2010_000941
|
990 |
+
2010_000952
|
991 |
+
2010_000961
|
992 |
+
2010_001000
|
993 |
+
2010_001010
|
994 |
+
2010_001011
|
995 |
+
2010_001016
|
996 |
+
2010_001017
|
997 |
+
2010_001024
|
998 |
+
2010_001036
|
999 |
+
2010_001061
|
1000 |
+
2010_001069
|
1001 |
+
2010_001070
|
1002 |
+
2010_001079
|
1003 |
+
2010_001104
|
1004 |
+
2010_001124
|
1005 |
+
2010_001149
|
1006 |
+
2010_001151
|
1007 |
+
2010_001174
|
1008 |
+
2010_001206
|
1009 |
+
2010_001246
|
1010 |
+
2010_001251
|
1011 |
+
2010_001256
|
1012 |
+
2010_001264
|
1013 |
+
2010_001292
|
1014 |
+
2010_001313
|
1015 |
+
2010_001327
|
1016 |
+
2010_001331
|
1017 |
+
2010_001351
|
1018 |
+
2010_001367
|
1019 |
+
2010_001376
|
1020 |
+
2010_001403
|
1021 |
+
2010_001448
|
1022 |
+
2010_001451
|
1023 |
+
2010_001522
|
1024 |
+
2010_001534
|
1025 |
+
2010_001553
|
1026 |
+
2010_001557
|
1027 |
+
2010_001563
|
1028 |
+
2010_001577
|
1029 |
+
2010_001579
|
1030 |
+
2010_001646
|
1031 |
+
2010_001656
|
1032 |
+
2010_001692
|
1033 |
+
2010_001699
|
1034 |
+
2010_001734
|
1035 |
+
2010_001752
|
1036 |
+
2010_001767
|
1037 |
+
2010_001768
|
1038 |
+
2010_001773
|
1039 |
+
2010_001820
|
1040 |
+
2010_001830
|
1041 |
+
2010_001851
|
1042 |
+
2010_001908
|
1043 |
+
2010_001913
|
1044 |
+
2010_001951
|
1045 |
+
2010_001956
|
1046 |
+
2010_001962
|
1047 |
+
2010_001966
|
1048 |
+
2010_001995
|
1049 |
+
2010_002017
|
1050 |
+
2010_002025
|
1051 |
+
2010_002030
|
1052 |
+
2010_002106
|
1053 |
+
2010_002137
|
1054 |
+
2010_002142
|
1055 |
+
2010_002146
|
1056 |
+
2010_002147
|
1057 |
+
2010_002150
|
1058 |
+
2010_002161
|
1059 |
+
2010_002200
|
1060 |
+
2010_002228
|
1061 |
+
2010_002232
|
1062 |
+
2010_002251
|
1063 |
+
2010_002271
|
1064 |
+
2010_002305
|
1065 |
+
2010_002310
|
1066 |
+
2010_002336
|
1067 |
+
2010_002348
|
1068 |
+
2010_002361
|
1069 |
+
2010_002390
|
1070 |
+
2010_002396
|
1071 |
+
2010_002422
|
1072 |
+
2010_002450
|
1073 |
+
2010_002480
|
1074 |
+
2010_002512
|
1075 |
+
2010_002531
|
1076 |
+
2010_002536
|
1077 |
+
2010_002538
|
1078 |
+
2010_002546
|
1079 |
+
2010_002623
|
1080 |
+
2010_002682
|
1081 |
+
2010_002691
|
1082 |
+
2010_002693
|
1083 |
+
2010_002701
|
1084 |
+
2010_002763
|
1085 |
+
2010_002792
|
1086 |
+
2010_002868
|
1087 |
+
2010_002900
|
1088 |
+
2010_002902
|
1089 |
+
2010_002921
|
1090 |
+
2010_002929
|
1091 |
+
2010_002939
|
1092 |
+
2010_002988
|
1093 |
+
2010_003014
|
1094 |
+
2010_003060
|
1095 |
+
2010_003123
|
1096 |
+
2010_003127
|
1097 |
+
2010_003132
|
1098 |
+
2010_003168
|
1099 |
+
2010_003183
|
1100 |
+
2010_003187
|
1101 |
+
2010_003207
|
1102 |
+
2010_003231
|
1103 |
+
2010_003239
|
1104 |
+
2010_003275
|
1105 |
+
2010_003276
|
1106 |
+
2010_003293
|
1107 |
+
2010_003302
|
1108 |
+
2010_003325
|
1109 |
+
2010_003362
|
1110 |
+
2010_003365
|
1111 |
+
2010_003381
|
1112 |
+
2010_003402
|
1113 |
+
2010_003409
|
1114 |
+
2010_003418
|
1115 |
+
2010_003446
|
1116 |
+
2010_003453
|
1117 |
+
2010_003468
|
1118 |
+
2010_003473
|
1119 |
+
2010_003495
|
1120 |
+
2010_003506
|
1121 |
+
2010_003514
|
1122 |
+
2010_003531
|
1123 |
+
2010_003532
|
1124 |
+
2010_003541
|
1125 |
+
2010_003547
|
1126 |
+
2010_003597
|
1127 |
+
2010_003675
|
1128 |
+
2010_003708
|
1129 |
+
2010_003716
|
1130 |
+
2010_003746
|
1131 |
+
2010_003758
|
1132 |
+
2010_003764
|
1133 |
+
2010_003768
|
1134 |
+
2010_003771
|
1135 |
+
2010_003772
|
1136 |
+
2010_003781
|
1137 |
+
2010_003813
|
1138 |
+
2010_003820
|
1139 |
+
2010_003854
|
1140 |
+
2010_003912
|
1141 |
+
2010_003915
|
1142 |
+
2010_003947
|
1143 |
+
2010_003956
|
1144 |
+
2010_003971
|
1145 |
+
2010_004041
|
1146 |
+
2010_004042
|
1147 |
+
2010_004056
|
1148 |
+
2010_004063
|
1149 |
+
2010_004104
|
1150 |
+
2010_004120
|
1151 |
+
2010_004149
|
1152 |
+
2010_004165
|
1153 |
+
2010_004208
|
1154 |
+
2010_004219
|
1155 |
+
2010_004226
|
1156 |
+
2010_004314
|
1157 |
+
2010_004320
|
1158 |
+
2010_004322
|
1159 |
+
2010_004337
|
1160 |
+
2010_004348
|
1161 |
+
2010_004355
|
1162 |
+
2010_004369
|
1163 |
+
2010_004382
|
1164 |
+
2010_004419
|
1165 |
+
2010_004432
|
1166 |
+
2010_004472
|
1167 |
+
2010_004479
|
1168 |
+
2010_004519
|
1169 |
+
2010_004520
|
1170 |
+
2010_004529
|
1171 |
+
2010_004543
|
1172 |
+
2010_004550
|
1173 |
+
2010_004551
|
1174 |
+
2010_004556
|
1175 |
+
2010_004559
|
1176 |
+
2010_004628
|
1177 |
+
2010_004635
|
1178 |
+
2010_004662
|
1179 |
+
2010_004697
|
1180 |
+
2010_004757
|
1181 |
+
2010_004763
|
1182 |
+
2010_004772
|
1183 |
+
2010_004783
|
1184 |
+
2010_004789
|
1185 |
+
2010_004795
|
1186 |
+
2010_004815
|
1187 |
+
2010_004825
|
1188 |
+
2010_004828
|
1189 |
+
2010_004856
|
1190 |
+
2010_004857
|
1191 |
+
2010_004861
|
1192 |
+
2010_004941
|
1193 |
+
2010_004946
|
1194 |
+
2010_004951
|
1195 |
+
2010_004980
|
1196 |
+
2010_004994
|
1197 |
+
2010_005013
|
1198 |
+
2010_005021
|
1199 |
+
2010_005046
|
1200 |
+
2010_005063
|
1201 |
+
2010_005108
|
1202 |
+
2010_005118
|
1203 |
+
2010_005159
|
1204 |
+
2010_005160
|
1205 |
+
2010_005166
|
1206 |
+
2010_005174
|
1207 |
+
2010_005180
|
1208 |
+
2010_005187
|
1209 |
+
2010_005206
|
1210 |
+
2010_005245
|
1211 |
+
2010_005252
|
1212 |
+
2010_005284
|
1213 |
+
2010_005305
|
1214 |
+
2010_005344
|
1215 |
+
2010_005353
|
1216 |
+
2010_005366
|
1217 |
+
2010_005401
|
1218 |
+
2010_005421
|
1219 |
+
2010_005428
|
1220 |
+
2010_005432
|
1221 |
+
2010_005433
|
1222 |
+
2010_005496
|
1223 |
+
2010_005501
|
1224 |
+
2010_005508
|
1225 |
+
2010_005531
|
1226 |
+
2010_005534
|
1227 |
+
2010_005575
|
1228 |
+
2010_005582
|
1229 |
+
2010_005606
|
1230 |
+
2010_005626
|
1231 |
+
2010_005644
|
1232 |
+
2010_005664
|
1233 |
+
2010_005705
|
1234 |
+
2010_005706
|
1235 |
+
2010_005709
|
1236 |
+
2010_005718
|
1237 |
+
2010_005719
|
1238 |
+
2010_005727
|
1239 |
+
2010_005762
|
1240 |
+
2010_005788
|
1241 |
+
2010_005860
|
1242 |
+
2010_005871
|
1243 |
+
2010_005877
|
1244 |
+
2010_005888
|
1245 |
+
2010_005899
|
1246 |
+
2010_005922
|
1247 |
+
2010_005991
|
1248 |
+
2010_005992
|
1249 |
+
2010_006026
|
1250 |
+
2010_006034
|
1251 |
+
2010_006054
|
1252 |
+
2010_006070
|
1253 |
+
2011_000045
|
1254 |
+
2011_000051
|
1255 |
+
2011_000054
|
1256 |
+
2011_000066
|
1257 |
+
2011_000070
|
1258 |
+
2011_000112
|
1259 |
+
2011_000173
|
1260 |
+
2011_000178
|
1261 |
+
2011_000185
|
1262 |
+
2011_000226
|
1263 |
+
2011_000234
|
1264 |
+
2011_000238
|
1265 |
+
2011_000239
|
1266 |
+
2011_000248
|
1267 |
+
2011_000283
|
1268 |
+
2011_000291
|
1269 |
+
2011_000310
|
1270 |
+
2011_000312
|
1271 |
+
2011_000338
|
1272 |
+
2011_000396
|
1273 |
+
2011_000412
|
1274 |
+
2011_000419
|
1275 |
+
2011_000435
|
1276 |
+
2011_000436
|
1277 |
+
2011_000438
|
1278 |
+
2011_000455
|
1279 |
+
2011_000456
|
1280 |
+
2011_000479
|
1281 |
+
2011_000481
|
1282 |
+
2011_000482
|
1283 |
+
2011_000503
|
1284 |
+
2011_000512
|
1285 |
+
2011_000521
|
1286 |
+
2011_000526
|
1287 |
+
2011_000536
|
1288 |
+
2011_000548
|
1289 |
+
2011_000566
|
1290 |
+
2011_000585
|
1291 |
+
2011_000598
|
1292 |
+
2011_000607
|
1293 |
+
2011_000618
|
1294 |
+
2011_000638
|
1295 |
+
2011_000658
|
1296 |
+
2011_000661
|
1297 |
+
2011_000669
|
1298 |
+
2011_000747
|
1299 |
+
2011_000780
|
1300 |
+
2011_000789
|
1301 |
+
2011_000807
|
1302 |
+
2011_000809
|
1303 |
+
2011_000813
|
1304 |
+
2011_000830
|
1305 |
+
2011_000843
|
1306 |
+
2011_000874
|
1307 |
+
2011_000888
|
1308 |
+
2011_000900
|
1309 |
+
2011_000912
|
1310 |
+
2011_000953
|
1311 |
+
2011_000969
|
1312 |
+
2011_001005
|
1313 |
+
2011_001014
|
1314 |
+
2011_001020
|
1315 |
+
2011_001047
|
1316 |
+
2011_001060
|
1317 |
+
2011_001064
|
1318 |
+
2011_001069
|
1319 |
+
2011_001071
|
1320 |
+
2011_001082
|
1321 |
+
2011_001110
|
1322 |
+
2011_001114
|
1323 |
+
2011_001159
|
1324 |
+
2011_001161
|
1325 |
+
2011_001190
|
1326 |
+
2011_001232
|
1327 |
+
2011_001263
|
1328 |
+
2011_001276
|
1329 |
+
2011_001281
|
1330 |
+
2011_001287
|
1331 |
+
2011_001292
|
1332 |
+
2011_001313
|
1333 |
+
2011_001341
|
1334 |
+
2011_001346
|
1335 |
+
2011_001350
|
1336 |
+
2011_001407
|
1337 |
+
2011_001416
|
1338 |
+
2011_001421
|
1339 |
+
2011_001434
|
1340 |
+
2011_001447
|
1341 |
+
2011_001489
|
1342 |
+
2011_001529
|
1343 |
+
2011_001530
|
1344 |
+
2011_001534
|
1345 |
+
2011_001546
|
1346 |
+
2011_001567
|
1347 |
+
2011_001589
|
1348 |
+
2011_001597
|
1349 |
+
2011_001601
|
1350 |
+
2011_001607
|
1351 |
+
2011_001613
|
1352 |
+
2011_001614
|
1353 |
+
2011_001619
|
1354 |
+
2011_001624
|
1355 |
+
2011_001642
|
1356 |
+
2011_001665
|
1357 |
+
2011_001669
|
1358 |
+
2011_001674
|
1359 |
+
2011_001708
|
1360 |
+
2011_001713
|
1361 |
+
2011_001714
|
1362 |
+
2011_001722
|
1363 |
+
2011_001726
|
1364 |
+
2011_001745
|
1365 |
+
2011_001748
|
1366 |
+
2011_001775
|
1367 |
+
2011_001782
|
1368 |
+
2011_001793
|
1369 |
+
2011_001794
|
1370 |
+
2011_001812
|
1371 |
+
2011_001862
|
1372 |
+
2011_001863
|
1373 |
+
2011_001868
|
1374 |
+
2011_001880
|
1375 |
+
2011_001910
|
1376 |
+
2011_001984
|
1377 |
+
2011_001988
|
1378 |
+
2011_002002
|
1379 |
+
2011_002040
|
1380 |
+
2011_002041
|
1381 |
+
2011_002064
|
1382 |
+
2011_002075
|
1383 |
+
2011_002098
|
1384 |
+
2011_002110
|
1385 |
+
2011_002121
|
1386 |
+
2011_002124
|
1387 |
+
2011_002150
|
1388 |
+
2011_002156
|
1389 |
+
2011_002178
|
1390 |
+
2011_002200
|
1391 |
+
2011_002223
|
1392 |
+
2011_002244
|
1393 |
+
2011_002247
|
1394 |
+
2011_002279
|
1395 |
+
2011_002295
|
1396 |
+
2011_002298
|
1397 |
+
2011_002308
|
1398 |
+
2011_002317
|
1399 |
+
2011_002322
|
1400 |
+
2011_002327
|
1401 |
+
2011_002343
|
1402 |
+
2011_002358
|
1403 |
+
2011_002371
|
1404 |
+
2011_002379
|
1405 |
+
2011_002391
|
1406 |
+
2011_002498
|
1407 |
+
2011_002509
|
1408 |
+
2011_002515
|
1409 |
+
2011_002532
|
1410 |
+
2011_002535
|
1411 |
+
2011_002548
|
1412 |
+
2011_002575
|
1413 |
+
2011_002578
|
1414 |
+
2011_002589
|
1415 |
+
2011_002592
|
1416 |
+
2011_002623
|
1417 |
+
2011_002641
|
1418 |
+
2011_002644
|
1419 |
+
2011_002662
|
1420 |
+
2011_002675
|
1421 |
+
2011_002685
|
1422 |
+
2011_002713
|
1423 |
+
2011_002730
|
1424 |
+
2011_002754
|
1425 |
+
2011_002812
|
1426 |
+
2011_002863
|
1427 |
+
2011_002879
|
1428 |
+
2011_002885
|
1429 |
+
2011_002929
|
1430 |
+
2011_002951
|
1431 |
+
2011_002975
|
1432 |
+
2011_002993
|
1433 |
+
2011_002997
|
1434 |
+
2011_003003
|
1435 |
+
2011_003011
|
1436 |
+
2011_003019
|
1437 |
+
2011_003030
|
1438 |
+
2011_003055
|
1439 |
+
2011_003085
|
1440 |
+
2011_003103
|
1441 |
+
2011_003114
|
1442 |
+
2011_003145
|
1443 |
+
2011_003146
|
1444 |
+
2011_003182
|
1445 |
+
2011_003197
|
1446 |
+
2011_003205
|
1447 |
+
2011_003240
|
1448 |
+
2011_003256
|
1449 |
+
2011_003271
|
requirements.txt
ADDED
Binary file (300 Bytes). View file
|
|
res/figure_1/fig_1_a.png
ADDED
res/figure_1/fig_1_b.png
ADDED
res/figure_1/fig_1_c.png
ADDED
res/figure_2.PNG
ADDED
res/figure_2/original.png
ADDED
res/figure_3/figure_3_a.png
ADDED
res/figure_3/figure_3_b.png
ADDED
res/figure_3/figure_3_c.png
ADDED
res/figure_3/figure_3_d.png
ADDED
res/figure_4/2007_000123.png
ADDED
res/figure_4/2007_000123_gt.png
ADDED
res/figure_4/2007_000123_pred.png
ADDED
res/figure_4/2007_000175.png
ADDED
res/figure_4/2007_000175_gt.png
ADDED
res/figure_4/2007_000175_pred.png
ADDED
res/figure_4/2007_000762.png
ADDED
res/figure_4/2007_000762_gt.png
ADDED
res/figure_4/2007_000762_pred.png
ADDED
res/figure_4/2007_000799.png
ADDED
res/figure_4/2007_000799_gt.png
ADDED
res/figure_4/2007_000799_pred.png
ADDED
res/figure_4/2007_000999.png
ADDED
res/figure_4/2007_000999_gt.png
ADDED
res/figure_4/2007_000999_pred.png
ADDED
res/figure_4/2007_001239.png
ADDED
res/figure_4/2007_001239_gt.png
ADDED
res/figure_4/2007_001239_pred.png
ADDED
res/figure_4/2007_001284.png
ADDED